From 5d89d3c47adf0c8f27d3701e7faeb35975da2dac Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 24 Jul 2018 11:40:24 +0200 Subject: [PATCH 01/49] feature index prototype --- .../ml-feature-index-builder/build.gradle | 23 ++++++++ .../FeatureIndexBuilder.java | 50 ++++++++++++++++ .../action/FeatureIndexBuildAction.java | 59 +++++++++++++++++++ .../action/RestFeatureIndexBuildAction.java | 34 +++++++++++ 4 files changed, 166 insertions(+) create mode 100644 x-pack/plugin/ml-feature-index-builder/build.gradle create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java diff --git a/x-pack/plugin/ml-feature-index-builder/build.gradle b/x-pack/plugin/ml-feature-index-builder/build.gradle new file mode 100644 index 0000000000000..ec6aaf4be7fd0 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/build.gradle @@ -0,0 +1,23 @@ +import org.elasticsearch.gradle.BuildPlugin + +evaluationDependsOn(xpackModule('core')) + +apply plugin: 'elasticsearch.esplugin' + +esplugin { + name 'ml-feature-index-builder' + description 'A plugin to build feature indexes' + classname 'org.elasticsearch.xpack.ml.featureIndexBuilder.FeatureIndexBuilder' + extendedPlugins = ['x-pack-core'] +} + +dependencies { + compileOnly "org.elasticsearch:elasticsearch:${version}" + + compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') +} + + + +integTest.enabled = false diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java new file mode 100644 index 0000000000000..dc3576fc2ef09 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.FeatureIndexBuildAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestFeatureIndexBuildAction; + +import java.util.List; +import java.util.function.Supplier; + +import static java.util.Collections.singletonList; + +public class FeatureIndexBuilder extends Plugin implements ActionPlugin { + + public static final String NAME = "feature_index_builder"; + protected final boolean enabled; + + public FeatureIndexBuilder(Settings settings) { + this.enabled = true; + } + + @Override + public List getRestHandlers(final Settings settings, final RestController restController, + final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, final SettingsFilter settingsFilter, + final IndexNameExpressionResolver indexNameExpressionResolver, final Supplier nodesInCluster) { + + return singletonList(new RestFeatureIndexBuildAction(settings, restController)); + } + + @Override + public List> getActions() { + return singletonList(new ActionHandler<>(FeatureIndexBuildAction.INSTANCE, TransportFeatureIndexBuildAction.class)); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java new file mode 100644 index 0000000000000..2f6052ee5b13a --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +//public class FeatureIndexBuildAction extends Action { + public class FeatureIndexBuildAction extends Action { + + protected FeatureIndexBuildAction(String name) { + super(name); + } + + public static final FeatureIndexBuildAction INSTANCE = new FeatureIndexBuildAction(""); + + public static class Request extends ActionRequest implements ToXContentObject { + public Request() { + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return null; + } + + } + + public static class Response extends ActionResponse implements ToXContentObject { + public Response() { + super(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return null; + } + + } + + @Override + public Response newResponse() { + return null; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java new file mode 100644 index 0000000000000..2a5746afe0b59 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; + +import java.io.IOException; + +public class RestFeatureIndexBuildAction extends BaseRestHandler { + + public RestFeatureIndexBuildAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, "/_xpack/ml/feature_index/_build", this); + } + + @Override + public String getName() { + return "ml_feature_index_builder_build_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + return null; + } + +} From 9c59ae17e2cdc34aaf66f7e0f29f0e0464e88438 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 2 Aug 2018 16:00:48 +0200 Subject: [PATCH 02/49] 1st version of a feature index builder --- .../ml-feature-index-builder/build.gradle | 2 +- .../FeatureIndexBuilder.java | 109 +++++++++- .../FeatureIndexBuilderFeatureSet.java | 60 ++++++ .../FeatureIndexBuilderFeatureSetUsage.java | 23 +++ .../action/FeatureIndexBuildAction.java | 59 ------ .../PutFeatureIndexBuilderJobAction.java | 120 +++++++++++ .../StartFeatureIndexBuilderJobAction.java | 159 ++++++++++++++ ...nsportPutFeatureIndexBuilderJobAction.java | 94 +++++++++ ...portStartFeatureIndexBuilderJobAction.java | 116 +++++++++++ .../job/FeatureIndexBuilderIndexer.java | 194 ++++++++++++++++++ .../job/FeatureIndexBuilderJob.java | 95 +++++++++ .../job/FeatureIndexBuilderJobConfig.java | 147 +++++++++++++ .../job/FeatureIndexBuilderJobStatus.java | 73 +++++++ .../job/FeatureIndexBuilderJobTask.java | 102 +++++++++ .../action/RestFeatureIndexBuildAction.java | 34 --- .../RestPutFeatureIndexBuilderJobAction.java | 45 ++++ ...RestStartFeatureIndexBuilderJobAction.java | 41 ++++ 17 files changed, 1372 insertions(+), 101 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java diff --git a/x-pack/plugin/ml-feature-index-builder/build.gradle b/x-pack/plugin/ml-feature-index-builder/build.gradle index ec6aaf4be7fd0..f92709b528643 100644 --- a/x-pack/plugin/ml-feature-index-builder/build.gradle +++ b/x-pack/plugin/ml-feature-index-builder/build.gradle @@ -7,7 +7,7 @@ apply plugin: 'elasticsearch.esplugin' esplugin { name 'ml-feature-index-builder' description 'A plugin to build feature indexes' - classname 'org.elasticsearch.xpack.ml.featureIndexBuilder.FeatureIndexBuilder' + classname 'org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index dc3576fc2ef09..197a2f9f29350 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -8,43 +8,138 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.FeatureIndexBuildAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestFeatureIndexBuildAction; +import org.elasticsearch.threadpool.ExecutorBuilder; +import org.elasticsearch.threadpool.FixedExecutorBuilder; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.rollup.RollupField; +import org.elasticsearch.xpack.core.rollup.job.RollupJob; +import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportPutFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStartFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; +import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestPutFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStartFeatureIndexBuilderJobAction; +import java.time.Clock; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Set; import java.util.function.Supplier; -import static java.util.Collections.singletonList; +import static java.util.Collections.emptyList; -public class FeatureIndexBuilder extends Plugin implements ActionPlugin { +public class FeatureIndexBuilder extends Plugin implements ActionPlugin, PersistentTaskPlugin { public static final String NAME = "feature_index_builder"; - protected final boolean enabled; + public static final String BASE_PATH = "/_xpack/feature_index_builder/"; + public static final String TASK_THREAD_POOL_NAME = "feature_index_builder_indexing"; + + // list of headers that will be stored when a job is created + public static final Set HEADER_FILTERS = new HashSet<>( + Arrays.asList("es-security-runas-user", "_xpack_security_authentication")); + + private final boolean enabled; + private final Settings settings; public FeatureIndexBuilder(Settings settings) { + this.settings = settings; + + // todo: XPackSettings.FEATURE_INDEX_BUILDER_ENABLED.get(settings); this.enabled = true; } + @Override + public Collection createGuiceModules() { + List modules = new ArrayList<>(); + + modules.add(b -> XPackPlugin.bindFeatureSet(b, FeatureIndexBuilderFeatureSet.class)); + return modules; + } + @Override public List getRestHandlers(final Settings settings, final RestController restController, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, final SettingsFilter settingsFilter, final IndexNameExpressionResolver indexNameExpressionResolver, final Supplier nodesInCluster) { - return singletonList(new RestFeatureIndexBuildAction(settings, restController)); + if (!enabled) { + return emptyList(); + } + + return Arrays.asList( + new RestPutFeatureIndexBuilderJobAction(settings, restController), + new RestStartFeatureIndexBuilderJobAction(settings, restController) + ); } @Override public List> getActions() { - return singletonList(new ActionHandler<>(FeatureIndexBuildAction.INSTANCE, TransportFeatureIndexBuildAction.class)); + if (!enabled) { + return emptyList(); + } + + return Arrays.asList( + new ActionHandler<>(PutFeatureIndexBuilderJobAction.INSTANCE, TransportPutFeatureIndexBuilderJobAction.class), + new ActionHandler<>(StartFeatureIndexBuilderJobAction.INSTANCE, TransportStartFeatureIndexBuilderJobAction.class) + ); + } + + @Override + public List> getExecutorBuilders(Settings settings) { + if (false == enabled) { + return emptyList(); + } + + FixedExecutorBuilder indexing = new FixedExecutorBuilder(settings, TASK_THREAD_POOL_NAME, 4, 4, + "xpack.feature_index_builder.task_thread_pool"); + + return Collections.singletonList(indexing); + } + + @Override + public List> getPersistentTasksExecutor(ClusterService clusterService, ThreadPool threadPool, + Client client) { + if (enabled == false) { + return emptyList(); + } + + SchedulerEngine schedulerEngine = new SchedulerEngine(Clock.systemUTC()); + return Collections.singletonList(new FeatureIndexBuilderJobTask.FeatureIndexBuilderJobPersistentTasksExecutor(settings, client, + schedulerEngine, threadPool)); + } + @Override + public List getNamedXContent() { + if (enabled == false) { + return emptyList(); + } + return Collections.singletonList( + new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField("xpack/feature_index_builder/job"), + FeatureIndexBuilderJob::fromXContent) + ); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java new file mode 100644 index 0000000000000..40971cce1b9cb --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackField; +import java.util.Map; + +public class FeatureIndexBuilderFeatureSet implements XPackFeatureSet { + + private final boolean enabled; + private final XPackLicenseState licenseState; + + @Inject + public FeatureIndexBuilderFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState) { + this.enabled = true; // XPackSettings.FEATURE_INDEX_BUILDER_ENABLED.get(settings); + this.licenseState = licenseState; + } + + @Override + public String name() { + return XPackField.MACHINE_LEARNING; // todo: own implementation?? + } + + @Override + public String description() { + return "Time series feature index creation"; + } + + @Override + public boolean available() { + return licenseState != null && licenseState.isMachineLearningAllowed(); // todo: part of ML? + } + + @Override + public boolean enabled() { + return enabled; + } + + @Override + public Map nativeCodeInfo() { + return null; + } + + @Override + public void usage(ActionListener listener) { + // TODO expose the currently running rollup tasks on this node? Unclear the best + // way to do that + listener.onResponse(new FeatureIndexBuilderFeatureSetUsage(available(), enabled())); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java new file mode 100644 index 0000000000000..53f762f1b5798 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; + +import java.io.IOException; + +public class FeatureIndexBuilderFeatureSetUsage extends Usage { + public FeatureIndexBuilderFeatureSetUsage(StreamInput input) throws IOException { + super(input); + } + + public FeatureIndexBuilderFeatureSetUsage(boolean available, boolean enabled) { + super(XPackField.MACHINE_LEARNING, available, enabled); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java deleted file mode 100644 index 2f6052ee5b13a..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/FeatureIndexBuildAction.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.ml.featureindexbuilder.action; - -import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; - -//public class FeatureIndexBuildAction extends Action { - public class FeatureIndexBuildAction extends Action { - - protected FeatureIndexBuildAction(String name) { - super(name); - } - - public static final FeatureIndexBuildAction INSTANCE = new FeatureIndexBuildAction(""); - - public static class Request extends ActionRequest implements ToXContentObject { - public Request() { - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - - } - - public static class Response extends ActionResponse implements ToXContentObject { - public Response() { - super(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - - } - - @Override - public Response newResponse() { - return null; - } -} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..22e3b2a6ba2c7 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; + +import java.io.IOException; +import java.util.Objects; + +public class PutFeatureIndexBuilderJobAction extends Action { + + public static final PutFeatureIndexBuilderJobAction INSTANCE = new PutFeatureIndexBuilderJobAction(); + public static final String NAME = "cluster:admin/xpack/feature_index_builder/put"; + + private PutFeatureIndexBuilderJobAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends AcknowledgedRequest implements ToXContentObject { + + private FeatureIndexBuilderJobConfig config; + + public Request(FeatureIndexBuilderJobConfig config) { + this.setConfig(config); + } + + public Request() { + + } + + public static Request parseRequest(String id, XContentParser parser) { + FeatureIndexBuilderJobConfig.Builder config = FeatureIndexBuilderJobConfig.Builder.fromXContent(id, parser); + return new Request(config.build()); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return this.config.toXContent(builder, params); + } + + public FeatureIndexBuilderJobConfig getConfig() { + return config; + } + + public void setConfig(FeatureIndexBuilderJobConfig config) { + this.config = config; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.config = new FeatureIndexBuilderJobConfig(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + this.config.writeTo(out); + } + + @Override + public int hashCode() { + return Objects.hash(config); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(config, other.config); + } + } + + public static class RequestBuilder extends MasterNodeOperationRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, PutFeatureIndexBuilderJobAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends AcknowledgedResponse { + public Response() { + super(); + } + + public Response(boolean acknowledged) { + super(acknowledged); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..61312538fff99 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java @@ -0,0 +1,159 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.rollup.RollupField; +import java.io.IOException; +import java.util.Collections; +import java.util.Objects; + +public class StartFeatureIndexBuilderJobAction extends Action { + + public static final StartFeatureIndexBuilderJobAction INSTANCE = new StartFeatureIndexBuilderJobAction(); + public static final String NAME = "cluster:admin/xpack/feature_index_builder/start"; + + private StartFeatureIndexBuilderJobAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends BaseTasksRequest implements ToXContent { + private String id; + + public Request(String id) { + this.id = ExceptionsHelper.requireNonNull(id, RollupField.ID.getPreferredName()); + } + + public Request() { + } + + public String getId() { + return id; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(RollupField.ID.getPreferredName(), id); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(id, other.id); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, StartFeatureIndexBuilderJobAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { + private boolean started; + + public Response() { + super(Collections.emptyList(), Collections.emptyList()); + } + + public Response(StreamInput in) throws IOException { + super(Collections.emptyList(), Collections.emptyList()); + readFrom(in); + } + + public Response(boolean started) { + super(Collections.emptyList(), Collections.emptyList()); + this.started = started; + } + + public boolean isStarted() { + return started; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + started = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(started); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("started", started); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Response response = (Response) o; + return started == response.started; + } + + @Override + public int hashCode() { + return Objects.hash(started); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..6c99571bb8a7e --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Request; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Response; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; + +public class TransportPutFeatureIndexBuilderJobAction + extends TransportMasterNodeAction { + private final XPackLicenseState licenseState; + private final PersistentTasksService persistentTasksService; + private final Client client; + + @Inject + public TransportPutFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, + XPackLicenseState licenseState, PersistentTasksService persistentTasksService, Client client) { + super(settings, PutFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, PutFeatureIndexBuilderJobAction.Request::new); + this.licenseState = licenseState; + this.persistentTasksService = persistentTasksService; + this.client = client; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected PutFeatureIndexBuilderJobAction.Response newResponse() { + return new PutFeatureIndexBuilderJobAction.Response(); + } + + @Override + protected void masterOperation(Request request, ClusterState clusterState, ActionListener listener) throws Exception { + + if (!licenseState.isMachineLearningAllowed()) { + listener.onFailure(LicenseUtils.newComplianceException(XPackField.MACHINE_LEARNING)); + return; + } + + XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); + + FeatureIndexBuilderJob job = createFeatureIndexBuilderJob(request.getConfig(), threadPool); + + startPersistentTask(job, listener, persistentTasksService); + + } + + private static FeatureIndexBuilderJob createFeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config, ThreadPool threadPool) { + return new FeatureIndexBuilderJob(config); + } + + static void startPersistentTask(FeatureIndexBuilderJob job, ActionListener listener, + PersistentTasksService persistentTasksService) { + + persistentTasksService.sendStartRequest(job.getConfig().getId(), FeatureIndexBuilderJob.NAME, job, + ActionListener.wrap(persistentTask -> { + listener.onResponse(new PutFeatureIndexBuilderJobAction.Response(true)); + }, e -> { + listener.onFailure(e); + })); + } + + @Override + protected ClusterBlockException checkBlock(PutFeatureIndexBuilderJobAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..40662b4c39628 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; + +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +public class TransportStartFeatureIndexBuilderJobAction extends + TransportTasksAction { + + private final XPackLicenseState licenseState; + +@Inject +public TransportStartFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, + ActionFilters actionFilters, ClusterService clusterService, XPackLicenseState licenseState) { +super(settings, StartFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, + StartFeatureIndexBuilderJobAction.Request::new, StartFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); +this.licenseState = licenseState; +} + + @Override + protected void processTasks(StartFeatureIndexBuilderJobAction.Request request, Consumer operation) { + FeatureIndexBuilderJobTask matchingTask = null; + + // todo: re-factor, see rollup TransportTaskHelper + for (Task task : taskManager.getTasks().values()) { + if (task instanceof FeatureIndexBuilderJobTask && ((FeatureIndexBuilderJobTask)task).getConfig().getId().equals(request.getId())) { + if (matchingTask != null) { + throw new IllegalArgumentException("Found more than one matching task for feature index builder job [" + request.getId() + "] when " + + "there should only be one."); + } + matchingTask = (FeatureIndexBuilderJobTask) task; + } + } + + if (matchingTask != null) { + operation.accept(matchingTask); + } + } + + @Override + protected void doExecute(Task task, StartFeatureIndexBuilderJobAction.Request request, + ActionListener listener) { + + if (!licenseState.isMachineLearningAllowed()) { + listener.onFailure(LicenseUtils.newComplianceException(XPackField.MACHINE_LEARNING)); + return; + } + + super.doExecute(task, request, listener); + } + + @Override + protected void taskOperation(StartFeatureIndexBuilderJobAction.Request request, FeatureIndexBuilderJobTask jobTask, + ActionListener listener) { + if (jobTask.getConfig().getId().equals(request.getId())) { + jobTask.start(listener); + } else { + listener.onFailure(new RuntimeException("ID of FeatureIndexBuilder task [" + jobTask.getConfig().getId() + + "] does not match request's ID [" + request.getId() + "]")); + } + } + + @Override + protected StartFeatureIndexBuilderJobAction.Response newResponse(StartFeatureIndexBuilderJobAction.Request request, + List tasks, List taskOperationFailures, + List failedNodeExceptions) { + + if (taskOperationFailures.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper.convertToElastic(taskOperationFailures.get(0).getCause()); + } else if (failedNodeExceptions.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper.convertToElastic(failedNodeExceptions.get(0)); + } + + // Either the job doesn't exist (the user didn't create it yet) or was deleted + // after the StartAPI executed. + // In either case, let the user know + if (tasks.size() == 0) { + throw new ResourceNotFoundException("Task for FeatureIndexBuilder Job [" + request.getId() + "] not found"); + } + + assert tasks.size() == 1; + + boolean allStarted = tasks.stream().allMatch(StartFeatureIndexBuilderJobAction.Response::isStarted); + return new StartFeatureIndexBuilderJobAction.Response(allStarted); + } + + @Override + protected StartFeatureIndexBuilderJobAction.Response readTaskResponse(StreamInput in) throws IOException { + return new StartFeatureIndexBuilderJobAction.Response(in); + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java new file mode 100644 index 0000000000000..e5f33c03fb8b2 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -0,0 +1,194 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.apache.log4j.Logger; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; +import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; + +public class FeatureIndexBuilderIndexer { + private static final String PIVOT_INDEX = "pivot-reviews"; + private static final String SOURCE_INDEX = "anonreviews"; + + private static final Logger logger = Logger.getLogger(FeatureIndexBuilderIndexer.class.getName()); + private FeatureIndexBuilderJob job; + private Client client; + + public FeatureIndexBuilderIndexer(FeatureIndexBuilderJob job, Client client) { + + this.job = job; + this.client = client; + logger.info("delete pivot-reviews"); + + } + + public synchronized void start() { + deleteIndex(client); + + createIndex(client); + + int runs = 0; + + Map after = null; + logger.info("start feature indexing"); + SearchResponse response; + + try { + response = runQuery(client, after); + + CompositeAggregation compositeAggregation = response.getAggregations().get("feature"); + after = compositeAggregation.afterKey(); + + while (after != null) { + indexBuckets(compositeAggregation); + + ++runs; + response = runQuery(client, after); + + compositeAggregation = response.getAggregations().get("feature"); + after = compositeAggregation.afterKey(); + + //after = null; + } + + indexBuckets(compositeAggregation); + } catch (InterruptedException | ExecutionException e) { + logger.error("Failed to build feature index", e); + } + + logger.info("Finished feature indexing"); + } + + private void indexBuckets(CompositeAggregation compositeAggregation) { + BulkRequest bulkIndexRequest = new BulkRequest(); + try { + for (Bucket b : compositeAggregation.getBuckets()) { + + InternalAvg avgAgg = b.getAggregations().get("avg_rating"); + + XContentBuilder builder; + builder = jsonBuilder(); + builder.startObject(); + builder.field("reviewerId", b.getKey().get("reviewerId")); + builder.field("avg_rating", avgAgg.getValue()); + builder.endObject(); + bulkIndexRequest.add(new IndexRequest(PIVOT_INDEX, DOC_TYPE).source(builder)); + + } + client.bulk(bulkIndexRequest); + } catch (IOException e) { + logger.error("Failed to index", e); + } + } + + /* + * Hardcoded demo case for pivoting + */ + + private static void deleteIndex(Client client) { + DeleteIndexRequest deleteIndex = new DeleteIndexRequest(PIVOT_INDEX); + + IndicesAdminClient adminClient = client.admin().indices(); + try { + adminClient.delete(deleteIndex).actionGet(); + } catch (IndexNotFoundException e) { + } + } + + private static void createIndex(Client client) { + + CreateIndexRequest request = new CreateIndexRequest(PIVOT_INDEX); + request.settings(Settings.builder() // <1> + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + ); + request.mapping(DOC_TYPE, // <1> + "{\n" + + " \"" + DOC_TYPE + "\": {\n" + + " \"properties\": {\n" + + " \"reviewerId\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"avg_rating\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}", // <2> + XContentType.JSON); + IndicesAdminClient adminClient = client.admin().indices(); + adminClient.create(request).actionGet(); + } + + private static SearchRequest buildFeatureQuery(Map after) { + QueryBuilder queryBuilder = new MatchAllQueryBuilder(); + SearchRequest searchRequest = new SearchRequest(SOURCE_INDEX); + + List> sources = new ArrayList<>(); + sources.add(new TermsValuesSourceBuilder("reviewerId").field("reviewerId")); + + CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder("feature", sources); + compositeAggregation.size(1000); + + if (after != null) { + compositeAggregation.aggregateAfter(after); + } + + compositeAggregation.subAggregation(AggregationBuilders.avg("avg_rating").field("rating")); + compositeAggregation.subAggregation(AggregationBuilders.cardinality("dc_vendors").field("vendorId")); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.aggregation(compositeAggregation); + sourceBuilder.size(0); + sourceBuilder.query(queryBuilder); + searchRequest.source(sourceBuilder); + + return searchRequest; + } + + private static SearchResponse runQuery(Client client, Map after) throws InterruptedException, ExecutionException { + + SearchRequest request = buildFeatureQuery(after); + SearchResponse response = client.search(request).get(); + + return response; + } + + private static void indexResult() { + + + + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java new file mode 100644 index 0000000000000..4dd037d0d8d92 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; +import java.io.IOException; +import java.util.Objects; + +public class FeatureIndexBuilderJob implements XPackPlugin.XPackPersistentTaskParams { + + public static final String NAME = "xpack/feature_index_builder/job"; + + private FeatureIndexBuilderJobConfig config; + + private static final ParseField CONFIG = new ParseField("config"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER + = new ConstructingObjectParser<>(NAME, a -> new FeatureIndexBuilderJob((FeatureIndexBuilderJobConfig) a[0])); + + static { + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> FeatureIndexBuilderJobConfig.PARSER.apply(p,c).build(), CONFIG); + } + + + public FeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config) { + this.config = Objects.requireNonNull(config); + } + + public FeatureIndexBuilderJob(StreamInput in) throws IOException { + this.config = new FeatureIndexBuilderJobConfig(in); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_0_0_alpha1; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + config.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CONFIG.getPreferredName(), config); + builder.endObject(); + return builder; + } + + public FeatureIndexBuilderJobConfig getConfig() { + return config; + } + + public static FeatureIndexBuilderJob fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + FeatureIndexBuilderJob that = (FeatureIndexBuilderJob) other; + + return Objects.equals(this.config, that.config); + } + + @Override + public int hashCode() { + return Objects.hash(config); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java new file mode 100644 index 0000000000000..19e10044e5c21 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * This class holds the configuration details of a feature index builder job + */ +public class FeatureIndexBuilderJobConfig implements NamedWriteable, ToXContentObject { + private static final String NAME = "xpack/feature_index_builder/jobconfig"; + + public static final ParseField ID = new ParseField("id"); + + private String id; + + public static final ObjectParser PARSER = new ObjectParser<>(NAME, false, + FeatureIndexBuilderJobConfig.Builder::new); + + static { + PARSER.declareString(FeatureIndexBuilderJobConfig.Builder::setId, ID); + } + + FeatureIndexBuilderJobConfig(String id) { + this.id = id; + } + + public FeatureIndexBuilderJobConfig(StreamInput in) throws IOException { + id = in.readString(); + } + + public FeatureIndexBuilderJobConfig() { + } + + public String getId() { + return id; + } + + public String getCron() { + return "*"; + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + } + + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (id != null) { + // to be replace by constant + builder.field("id", id); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + FeatureIndexBuilderJobConfig that = (FeatureIndexBuilderJobConfig) other; + + return Objects.equals(this.id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + public static class Builder implements Writeable, ToXContentObject { + private String id; + + public Builder() {} + + public Builder(FeatureIndexBuilderJobConfig job) { + this.setId(job.getId()); + } + + public static FeatureIndexBuilderJobConfig.Builder fromXContent(String id, XContentParser parser) { + FeatureIndexBuilderJobConfig.Builder config = FeatureIndexBuilderJobConfig.PARSER.apply(parser, null); + if (id != null) { + config.setId(id); + } + return config; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (id != null) { + builder.field(ID.getPreferredName(), id); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public FeatureIndexBuilderJobConfig build() { + return new FeatureIndexBuilderJobConfig(id); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java new file mode 100644 index 0000000000000..85debce5339e0 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; + +public class FeatureIndexBuilderJobStatus implements Map { + + @Override + public void clear() { + } + + @Override + public boolean containsKey(Object arg0) { + return false; + } + + @Override + public boolean containsValue(Object arg0) { + return false; + } + + @Override + public Set> entrySet() { + return null; + } + + @Override + public String get(Object arg0) { + return null; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public Set keySet() { + return null; + } + + @Override + public String put(String arg0, String arg1) { + return null; + } + + @Override + public void putAll(Map arg0) { + } + + @Override + public String remove(Object arg0) { + return null; + } + + @Override + public int size() { + return 0; + } + + @Override + public Collection values() { + return null; + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java new file mode 100644 index 0000000000000..ca546529414d5 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.apache.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; +import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; +import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction.Response; + +import java.util.Map; + +public class FeatureIndexBuilderJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { + + private static final Logger logger = Logger.getLogger(FeatureIndexBuilderJobTask.class.getName()); + private final FeatureIndexBuilderIndexer indexer; + + static final String SCHEDULE_NAME = "xpack/feature_index_builder/job" + "/schedule"; + + public static class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTasksExecutor { + private final Client client; + private final SchedulerEngine schedulerEngine; + private final ThreadPool threadPool; + + public FeatureIndexBuilderJobPersistentTasksExecutor(Settings settings, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { + super(settings, "xpack/feature_index_builder/job", FeatureIndexBuilder.TASK_THREAD_POOL_NAME); + this.client = client; + this.schedulerEngine = schedulerEngine; + this.threadPool = threadPool; + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, @Nullable FeatureIndexBuilderJob params, PersistentTaskState state) { + FeatureIndexBuilderJobTask buildTask = (FeatureIndexBuilderJobTask) task; + SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job(SCHEDULE_NAME + "_" + params.getConfig().getId(), + next()); + + // Note that while the task is added to the scheduler here, the internal state will prevent + // it from doing any work until the task is "started" via the StartJob api + schedulerEngine.register(buildTask); + schedulerEngine.add(schedulerJob); + + logger.info("FeatureIndexBuilder job [" + params.getConfig().getId() + "] created."); + } + + static SchedulerEngine.Schedule next() { + return (startTime, now) -> { + return now + 1000; // to be fixed, hardcode something + }; + } + + @Override + protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, + PersistentTasksCustomMetaData.PersistentTask persistentTask, + Map headers) { + return new FeatureIndexBuilderJobTask(id, type, action, parentTaskId, persistentTask.getParams(), + (FeatureIndexBuilderJobStatus) persistentTask.getState(), client, schedulerEngine, threadPool, headers); + } + } + + private final FeatureIndexBuilderJob job; + + public FeatureIndexBuilderJobTask(long id, String type, String action, TaskId parentTask, FeatureIndexBuilderJob job, + FeatureIndexBuilderJobStatus state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, + Map headers) { + super(id, type, action, "" + "_" + job.getConfig().getId(), parentTask, headers); + this.job = job; + logger.info("construct job task"); + // todo: simplistic implementation for now + this.indexer = new FeatureIndexBuilderIndexer(job, client); + } + + public FeatureIndexBuilderJobConfig getConfig() { + return job.getConfig(); + } + + public synchronized void start(ActionListener listener) { + indexer.start(); + listener.onResponse(new StartFeatureIndexBuilderJobAction.Response(true)); + } + + @Override + public void triggered(Event event) { + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java deleted file mode 100644 index 2a5746afe0b59..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestFeatureIndexBuildAction.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; - -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; - -import java.io.IOException; - -public class RestFeatureIndexBuildAction extends BaseRestHandler { - - public RestFeatureIndexBuildAction(Settings settings, RestController controller) { - super(settings); - controller.registerHandler(RestRequest.Method.POST, "/_xpack/ml/feature_index/_build", this); - } - - @Override - public String getName() { - return "ml_feature_index_builder_build_action"; - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - return null; - } - -} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..8f08fbd2b5b05 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; + +import java.io.IOException; + +public class RestPutFeatureIndexBuilderJobAction extends BaseRestHandler { + public static final ParseField ID = new ParseField("id"); + + public RestPutFeatureIndexBuilderJobAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.PUT, FeatureIndexBuilder.BASE_PATH + "job/{id}/", this); + } + + @Override + public String getName() { + return "feature_index_builder_put_job_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String id = restRequest.param(ID.getPreferredName()); + XContentParser parser = restRequest.contentParser(); + + PutFeatureIndexBuilderJobAction.Request request = PutFeatureIndexBuilderJobAction.Request.parseRequest(id, parser); + + return channel -> client.execute(PutFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..6d0e8e6f3c22e --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.rollup.RollupField; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; + +import java.io.IOException; + +public class RestStartFeatureIndexBuilderJobAction extends BaseRestHandler { + + public RestStartFeatureIndexBuilderJobAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH + "job/{id}/_start", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String id = restRequest.param(RollupField.ID.getPreferredName()); + StartFeatureIndexBuilderJobAction.Request request = new StartFeatureIndexBuilderJobAction.Request(id); + + return channel -> client.execute(StartFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "ml_feature_index_builder_start_job_action"; + } + +} From d78d131d8757b1b4cce18253bc8886b32c281cb8 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 3 Aug 2018 13:55:59 +0200 Subject: [PATCH 03/49] do not reuse ML feature causing a conflict --- .../src/main/java/org/elasticsearch/xpack/core/XPackField.java | 2 ++ .../ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java | 2 +- .../featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java | 2 +- .../action/TransportPutFeatureIndexBuilderJobAction.java | 2 +- .../action/TransportStartFeatureIndexBuilderJobAction.java | 2 +- 5 files changed, 6 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index 70eb047c8edef..04f11202a38f7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -31,6 +31,8 @@ public final class XPackField { public static final String SQL = "sql"; /** Name constant for the rollup feature. */ public static final String ROLLUP = "rollup"; + /** Name constant for the feature index builder feature. */ + public static final String FIB = "fib"; private XPackField() {} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java index 40971cce1b9cb..9f6ca4daca8b5 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java @@ -28,7 +28,7 @@ public FeatureIndexBuilderFeatureSet(Settings settings, @Nullable XPackLicenseSt @Override public String name() { - return XPackField.MACHINE_LEARNING; // todo: own implementation?? + return XPackField.FIB; } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java index 53f762f1b5798..232fb7c54c42a 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java @@ -18,6 +18,6 @@ public FeatureIndexBuilderFeatureSetUsage(StreamInput input) throws IOException } public FeatureIndexBuilderFeatureSetUsage(boolean available, boolean enabled) { - super(XPackField.MACHINE_LEARNING, available, enabled); + super(XPackField.FIB, available, enabled); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index 6c99571bb8a7e..e4e17987d4049 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -60,7 +60,7 @@ protected PutFeatureIndexBuilderJobAction.Response newResponse() { protected void masterOperation(Request request, ClusterState clusterState, ActionListener listener) throws Exception { if (!licenseState.isMachineLearningAllowed()) { - listener.onFailure(LicenseUtils.newComplianceException(XPackField.MACHINE_LEARNING)); + listener.onFailure(LicenseUtils.newComplianceException(XPackField.FIB)); return; } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java index 40662b4c39628..b6aacc98daf1a 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java @@ -66,7 +66,7 @@ protected void doExecute(Task task, StartFeatureIndexBuilderJobAction.Request re ActionListener listener) { if (!licenseState.isMachineLearningAllowed()) { - listener.onFailure(LicenseUtils.newComplianceException(XPackField.MACHINE_LEARNING)); + listener.onFailure(LicenseUtils.newComplianceException(XPackField.FIB)); return; } From 1a00847dc93ca1a03b58d0fb0257731f1590725a Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 10 Aug 2018 15:38:14 +0200 Subject: [PATCH 04/49] temporary merge PR 32743 (#32776) This is a temporary (squashed) commit of #32743 till ed4feb6, to be reverted and replaced by the final version of this PR --- .../docs/en/rest-api/rollup/get-job.asciidoc | 6 +- .../IndexerJobStats.java} | 68 ++-- .../job => indexing}/IndexerState.java | 2 +- .../xpack/core/indexing/Iteration.java | 62 +++ .../xpack/core/indexing/IterativeIndexer.java | 384 ++++++++++++++++++ .../rollup/action/GetRollupJobsAction.java | 14 +- .../core/rollup/job/RollupJobStatus.java | 1 + .../core/indexing/IndexerJobStatsTests.java | 34 ++ .../IndexerStateEnumTests.java | 2 +- .../core/indexing/IterativeIndexerTests.java | 133 ++++++ .../job/JobWrapperSerializingTests.java | 4 +- .../core/rollup/job/RollupJobStatsTests.java | 35 -- .../core/rollup/job/RollupJobStatusTests.java | 1 + .../xpack/rollup/job/IndexerUtils.java | 4 +- .../xpack/rollup/job/RollupIndexer.java | 346 ++-------------- .../xpack/rollup/job/RollupJobTask.java | 6 +- .../xpack/rollup/job/IndexerUtilsTests.java | 20 +- .../job/RollupIndexerIndexingTests.java | 2 +- .../rollup/job/RollupIndexerStateTests.java | 12 +- .../xpack/rollup/job/RollupJobTaskTests.java | 2 +- .../rest-api-spec/test/rollup/delete_job.yml | 6 +- .../rest-api-spec/test/rollup/get_jobs.yml | 6 +- .../rest-api-spec/test/rollup/put_job.yml | 2 +- .../elasticsearch/multi_node/RollupIT.java | 2 +- 24 files changed, 729 insertions(+), 425 deletions(-) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/{rollup/job/RollupJobStats.java => indexing/IndexerJobStats.java} (62%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/{rollup/job => indexing}/IndexerState.java (97%) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/{rollup/job => indexing}/IndexerStateEnumTests.java (98%) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java diff --git a/x-pack/docs/en/rest-api/rollup/get-job.asciidoc b/x-pack/docs/en/rest-api/rollup/get-job.asciidoc index 96053dbfea64f..1a90557106bfc 100644 --- a/x-pack/docs/en/rest-api/rollup/get-job.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/get-job.asciidoc @@ -99,7 +99,7 @@ Which will yield the following response: "stats" : { "pages_processed" : 0, "documents_processed" : 0, - "rollups_indexed" : 0, + "documents_indexed" : 0, "trigger_count" : 0 } } @@ -219,7 +219,7 @@ Which will yield the following response: "stats" : { "pages_processed" : 0, "documents_processed" : 0, - "rollups_indexed" : 0, + "documents_indexed" : 0, "trigger_count" : 0 } }, @@ -268,7 +268,7 @@ Which will yield the following response: "stats" : { "pages_processed" : 0, "documents_processed" : 0, - "rollups_indexed" : 0, + "documents_indexed" : 0, "trigger_count" : 0 } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java similarity index 62% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStats.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java index 06cfb520af552..f64ed5804535d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.rollup.job; +package org.elasticsearch.xpack.core.indexing; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; - import java.io.IOException; import java.util.Objects; @@ -24,45 +23,46 @@ * and are only for external monitoring/reference. Statistics are not persisted with the job, so if the * allocated task is shutdown/restarted on a different node all the stats will reset. */ -public class RollupJobStats implements ToXContentObject, Writeable { +public class IndexerJobStats implements ToXContentObject, Writeable { public static final ParseField NAME = new ParseField("job_stats"); private static ParseField NUM_PAGES = new ParseField("pages_processed"); - private static ParseField NUM_DOCUMENTS = new ParseField("documents_processed"); - private static ParseField NUM_ROLLUPS = new ParseField("rollups_indexed"); + private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); + // BWC for RollupJobStats + private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed").withDeprecation("rollups_indexed"); private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); private long numPages = 0; - private long numDocuments = 0; - private long numRollups = 0; + private long numInputDocuments = 0; + private long numOuputDocuments = 0; private long numInvocations = 0; - public static final ConstructingObjectParser PARSER = + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME.getPreferredName(), - args -> new RollupJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + args -> new IndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); static { PARSER.declareLong(constructorArg(), NUM_PAGES); - PARSER.declareLong(constructorArg(), NUM_DOCUMENTS); - PARSER.declareLong(constructorArg(), NUM_ROLLUPS); + PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); } - public RollupJobStats() { + public IndexerJobStats() { } - public RollupJobStats(long numPages, long numDocuments, long numRollups, long numInvocations) { + public IndexerJobStats(long numPages, long numDocuments, long numOuputDocuments, long numInvocations) { this.numPages = numPages; - this.numDocuments = numDocuments; - this.numRollups = numRollups; + this.numInputDocuments = numDocuments; + this.numOuputDocuments = numOuputDocuments; this.numInvocations = numInvocations; } - public RollupJobStats(StreamInput in) throws IOException { + public IndexerJobStats(StreamInput in) throws IOException { this.numPages = in.readVLong(); - this.numDocuments = in.readVLong(); - this.numRollups = in.readVLong(); + this.numInputDocuments = in.readVLong(); + this.numOuputDocuments = in.readVLong(); this.numInvocations = in.readVLong(); } @@ -71,15 +71,15 @@ public long getNumPages() { } public long getNumDocuments() { - return numDocuments; + return numInputDocuments; } public long getNumInvocations() { return numInvocations; } - public long getNumRollups() { - return numRollups; + public long getOutputDocuments() { + return numOuputDocuments; } public void incrementNumPages(long n) { @@ -89,7 +89,7 @@ public void incrementNumPages(long n) { public void incrementNumDocuments(long n) { assert(n >= 0); - numDocuments += n; + numInputDocuments += n; } public void incrementNumInvocations(long n) { @@ -97,20 +97,20 @@ public void incrementNumInvocations(long n) { numInvocations += n; } - public void incrementNumRollups(long n) { + public void incrementNumOutputDocuments(long n) { assert(n >= 0); - numRollups += n; + numOuputDocuments += n; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(numPages); - out.writeVLong(numDocuments); - out.writeVLong(numRollups); + out.writeVLong(numInputDocuments); + out.writeVLong(numOuputDocuments); out.writeVLong(numInvocations); } - public static RollupJobStats fromXContent(XContentParser parser) { + public static IndexerJobStats fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); } catch (IOException e) { @@ -122,8 +122,8 @@ public static RollupJobStats fromXContent(XContentParser parser) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(NUM_PAGES.getPreferredName(), numPages); - builder.field(NUM_DOCUMENTS.getPreferredName(), numDocuments); - builder.field(NUM_ROLLUPS.getPreferredName(), numRollups); + builder.field(NUM_INPUT_DOCUMENTS.getPreferredName(), numInputDocuments); + builder.field(NUM_OUTPUT_DOCUMENTS.getPreferredName(), numOuputDocuments); builder.field(NUM_INVOCATIONS.getPreferredName(), numInvocations); builder.endObject(); return builder; @@ -139,18 +139,16 @@ public boolean equals(Object other) { return false; } - RollupJobStats that = (RollupJobStats) other; + IndexerJobStats that = (IndexerJobStats) other; return Objects.equals(this.numPages, that.numPages) - && Objects.equals(this.numDocuments, that.numDocuments) - && Objects.equals(this.numRollups, that.numRollups) + && Objects.equals(this.numInputDocuments, that.numInputDocuments) + && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) && Objects.equals(this.numInvocations, that.numInvocations); } @Override public int hashCode() { - return Objects.hash(numPages, numDocuments, numRollups, numInvocations); + return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations); } - } - diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/IndexerState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerState.java similarity index 97% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/IndexerState.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerState.java index 6e211c1df9e3e..1b6b9a943cba2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/IndexerState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerState.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.rollup.job; +package org.elasticsearch.xpack.core.indexing; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java new file mode 100644 index 0000000000000..5568ecd5ff806 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.indexing; + +import org.elasticsearch.action.index.IndexRequest; + +import java.util.List; + +/** + * Result object to hold the result of 1 iteration of iterative indexing. + * Acts as an interface between the implementation and the generic indexer. + */ +public class Iteration { + + private final boolean isDone; + private final JobPosition position; + private final List toIndex; + + /** + * Constructor for the result of 1 iteration. + * + * @param toIndex the list of requests to be indexed + * @param position the extracted, persistable position of the job required for the search phase + * @param isDone true if source is exhausted and job should go to sleep + * + * Note: toIndex.empty() != isDone due to possible filtering in the specific implementation + */ + public Iteration(List toIndex, JobPosition position, boolean isDone) { + this.toIndex = toIndex; + this.position = position; + this.isDone = isDone; + } + + /** + * Returns true if this indexing iteration is done and job should go into sleep mode. + */ + public boolean isDone() { + return isDone; + } + + /** + * Return the position of the job, a generic to be passed to the next query construction. + * + * @return the position + */ + public JobPosition getPosition() { + return position; + } + + /** + * List of requests to be passed to bulk indexing. + * + * @return List of index requests. + */ + public List getToIndex() { + return toIndex; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java new file mode 100644 index 0000000000000..9ec71b88e466c --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java @@ -0,0 +1,384 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.indexing; + +import org.apache.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; + +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicReference; + +/** + * An abstract class that builds an index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)}, + * it will create the index from the source index up to the last complete bucket that is allowed to be built (based on job position). + * Only one background job can run simultaneously and {@link #onFinish()} is called when the job + * finishes. {@link #onFailure(Exception)} is called if the job fails with an exception and {@link #onAbort()} is called if the indexer is + * aborted while a job is running. The indexer must be started ({@link #start()} to allow a background job to run when + * {@link #maybeTriggerAsyncJob(long)} is called. {@link #stop()} can be used to stop the background job without aborting the indexer. + * + * In a nutshell this is a 2 cycle engine: 1st it sends a query, 2nd it indexes documents based on the response, sends the next query, + * indexes, queries, indexes, ... until a condition lets the engine pause until the source provides new input. + * + * @param Type that defines a job position to be defined by the implementation. + */ +public abstract class IterativeIndexer { + private static final Logger logger = Logger.getLogger(IterativeIndexer.class.getName()); + + private final IndexerJobStats stats; + + private final AtomicReference state; + private final AtomicReference position; + private final Executor executor; + + protected IterativeIndexer(Executor executor, AtomicReference initialState, JobPosition initialPosition) { + this.executor = executor; + this.state = initialState; + this.position = new AtomicReference<>(initialPosition); + this.stats = new IndexerJobStats(); + } + + /** + * Get the current state of the indexer. + */ + public IndexerState getState() { + return state.get(); + } + + /** + * Get the current position of the indexer. + */ + public JobPosition getPosition() { + return position.get(); + } + + /** + * Get the stats of this indexer. + */ + public IndexerJobStats getStats() { + return stats; + } + + /** + * Sets the internal state to {@link IndexerState#STARTED} if the previous state + * was {@link IndexerState#STOPPED}. Setting the state to STARTED allows a job + * to run in the background when {@link #maybeTriggerAsyncJob(long)} is called. + * + * @return The new state for the indexer (STARTED, INDEXING or ABORTING if the + * job was already aborted). + */ + public synchronized IndexerState start() { + state.compareAndSet(IndexerState.STOPPED, IndexerState.STARTED); + return state.get(); + } + + /** + * Sets the internal state to {@link IndexerState#STOPPING} if an async job is + * running in the background and in such case {@link #onFinish()} will be called + * as soon as the background job detects that the indexer is stopped. If there + * is no job running when this function is called, the state is directly set to + * {@link IndexerState#STOPPED} and {@link #onFinish()} will never be called. + * + * @return The new state for the indexer (STOPPED, STOPPING or ABORTING if the + * job was already aborted). + */ + public synchronized IndexerState stop() { + IndexerState currentState = state.updateAndGet(previousState -> { + if (previousState == IndexerState.INDEXING) { + return IndexerState.STOPPING; + } else if (previousState == IndexerState.STARTED) { + return IndexerState.STOPPED; + } else { + return previousState; + } + }); + return currentState; + } + + /** + * Sets the internal state to {@link IndexerState#ABORTING}. It returns false if + * an async job is running in the background and in such case {@link #onAbort} + * will be called as soon as the background job detects that the indexer is + * aborted. If there is no job running when this function is called, it returns + * true and {@link #onAbort()} will never be called. + * + * @return true if the indexer is aborted, false if a background job is running + * and abort is delayed. + */ + public synchronized boolean abort() { + IndexerState prevState = state.getAndUpdate((prev) -> IndexerState.ABORTING); + return prevState == IndexerState.STOPPED || prevState == IndexerState.STARTED; + } + + /** + * Triggers a background job that builds the index asynchronously iff + * there is no other job that runs and the indexer is started + * ({@link IndexerState#STARTED}. + * + * @param now + * The current time in milliseconds (used to limit the job to + * complete buckets) + * @return true if a job has been triggered, false otherwise + */ + public synchronized boolean maybeTriggerAsyncJob(long now) { + final IndexerState currentState = state.get(); + switch (currentState) { + case INDEXING: + case STOPPING: + case ABORTING: + logger.warn("Schedule was triggered for job [" + getJobId() + "], but prior indexer is still running."); + return false; + + case STOPPED: + logger.debug("Schedule was triggered for job [" + getJobId() + "] but job is stopped. Ignoring trigger."); + return false; + + case STARTED: + logger.debug("Schedule was triggered for job [" + getJobId() + "], state: [" + currentState + "]"); + stats.incrementNumInvocations(1); + onStart(now); + + if (state.compareAndSet(IndexerState.STARTED, IndexerState.INDEXING)) { + // fire off the search. Note this is async, the method will return from here + executor.execute(() -> doNextSearch(buildSearchRequest(), + ActionListener.wrap(this::onSearchResponse, exc -> finishWithFailure(exc)))); + logger.debug("Beginning to index [" + getJobId() + "], state: [" + currentState + "]"); + return true; + } else { + logger.debug("Could not move from STARTED to INDEXING state because current state is [" + state.get() + "]"); + return false; + } + + default: + logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); + throw new IllegalStateException("Job encountered an illegal state [" + currentState + "]"); + } + } + + /** + * Called to get the Id of the job, used for logging. + * + * @return a string with the id of the job + */ + protected abstract String getJobId(); + + /** + * Called to process a response from the 1 search request in order to turn it into a {@link Iteration}. + * + * @param searchResponse response from the search phase. + * @return Iteration object to be passed to indexing phase. + */ + protected abstract Iteration doProcess(SearchResponse searchResponse); + + /** + * Called to build the next search request. + * + * @return SearchRequest to be passed to the search phase. + */ + protected abstract SearchRequest buildSearchRequest(); + + /** + * Called at startup after job has been triggered using {@link #maybeTriggerAsyncJob(long)} and the + * internal state is {@link IndexerState#STARTED}. + * + * @param now The current time in milliseconds passed through from {@link #maybeTriggerAsyncJob(long)} + */ + protected abstract void onStart(long now); + + /** + * Executes the {@link SearchRequest} and calls nextPhase with the + * response or the exception if an error occurs. + * + * @param request + * The search request to execute + * @param nextPhase + * Listener for the next phase + */ + protected abstract void doNextSearch(SearchRequest request, ActionListener nextPhase); + + /** + * Executes the {@link BulkRequest} and calls nextPhase with the + * response or the exception if an error occurs. + * + * @param request + * The bulk request to execute + * @param nextPhase + * Listener for the next phase + */ + protected abstract void doNextBulk(BulkRequest request, ActionListener nextPhase); + + /** + * Called periodically during the execution of a background job. Implementation + * should persists the state somewhere and continue the execution asynchronously + * using next. + * + * @param state + * The current state of the indexer + * @param position + * The current position of the indexer + * @param next + * Runnable for the next phase + */ + protected abstract void doSaveState(IndexerState state, JobPosition position, Runnable next); + + /** + * Called when a failure occurs in an async job causing the execution to stop. + * + * @param exc + * The exception + */ + protected abstract void onFailure(Exception exc); + + /** + * Called when a background job finishes. + */ + protected abstract void onFinish(); + + /** + * Called when a background job detects that the indexer is aborted causing the + * async execution to stop. + */ + protected abstract void onAbort(); + + private void finishWithFailure(Exception exc) { + doSaveState(finishAndSetState(), position.get(), () -> onFailure(exc)); + } + + private IndexerState finishAndSetState() { + return state.updateAndGet(prev -> { + switch (prev) { + case INDEXING: + // ready for another job + return IndexerState.STARTED; + + case STOPPING: + // must be started again + return IndexerState.STOPPED; + + case ABORTING: + // abort and exit + onAbort(); + return IndexerState.ABORTING; // This shouldn't matter, since onAbort() will kill the task first + + case STOPPED: + // No-op. Shouldn't really be possible to get here (should have to go through + // STOPPING + // first which will be handled) but is harmless to no-op and we don't want to + // throw exception here + return IndexerState.STOPPED; + + default: + // any other state is unanticipated at this point + throw new IllegalStateException("Indexer job encountered an illegal state [" + prev + "]"); + } + }); + } + + private void onSearchResponse(SearchResponse searchResponse) { + try { + if (checkState(getState()) == false) { + return; + } + if (searchResponse.getShardFailures().length != 0) { + throw new RuntimeException("Shard failures encountered while running indexer for job [" + getJobId() + "]: " + + Arrays.toString(searchResponse.getShardFailures())); + } + + stats.incrementNumPages(1); + Iteration iteration = doProcess(searchResponse); + + if (iteration.isDone()) { + logger.debug("Finished indexing for job [" + getJobId() + "], saving state and shutting down."); + + // Change state first, then try to persist. This prevents in-progress + // STOPPING/ABORTING from + // being persisted as STARTED but then stop the job + doSaveState(finishAndSetState(), position.get(), this::onFinish); + return; + } + + final List docs = iteration.getToIndex(); + final BulkRequest bulkRequest = new BulkRequest(); + docs.forEach(bulkRequest::add); + + // TODO this might be a valid case, e.g. if implementation filters + assert bulkRequest.requests().size() > 0; + + doNextBulk(bulkRequest, ActionListener.wrap(bulkResponse -> { + // TODO we should check items in the response and move after accordingly to + // resume the failing buckets ? + if (bulkResponse.hasFailures()) { + logger.warn("Error while attempting to bulk index documents: " + bulkResponse.buildFailureMessage()); + } + stats.incrementNumOutputDocuments(bulkResponse.getItems().length); + if (checkState(getState()) == false) { + return; + } + + JobPosition newPosition = iteration.getPosition(); + position.set(newPosition); + + onBulkResponse(bulkResponse, newPosition); + }, exc -> finishWithFailure(exc))); + } catch (Exception e) { + finishWithFailure(e); + } + } + + private void onBulkResponse(BulkResponse response, JobPosition position) { + try { + + ActionListener listener = ActionListener.wrap(this::onSearchResponse, this::finishWithFailure); + // TODO probably something more intelligent than every-50 is needed + if (stats.getNumPages() > 0 && stats.getNumPages() % 50 == 0) { + doSaveState(IndexerState.INDEXING, position, () -> doNextSearch(buildSearchRequest(), listener)); + } else { + doNextSearch(buildSearchRequest(), listener); + } + } catch (Exception e) { + finishWithFailure(e); + } + } + + /** + * Checks the {@link IndexerState} and returns false if the execution should be + * stopped. + */ + private boolean checkState(IndexerState currentState) { + switch (currentState) { + case INDEXING: + // normal state; + return true; + + case STOPPING: + logger.info("Indexer job encountered [" + IndexerState.STOPPING + "] state, halting indexer."); + doSaveState(finishAndSetState(), getPosition(), () -> { + }); + return false; + + case STOPPED: + return false; + + case ABORTING: + logger.info("Requested shutdown of indexer for job [" + getJobId() + "]"); + onAbort(); + return false; + + default: + // Anything other than indexing, aborting or stopping is unanticipated + logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); + throw new IllegalStateException("Indexer job encountered an illegal state [" + currentState + "]"); + } + } + +} \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java index 50f7931508585..4bfd5b621e780 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java @@ -25,9 +25,9 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; import java.io.IOException; @@ -204,20 +204,20 @@ public final String toString() { public static class JobWrapper implements Writeable, ToXContentObject { private final RollupJobConfig job; - private final RollupJobStats stats; + private final IndexerJobStats stats; private final RollupJobStatus status; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, a -> new JobWrapper((RollupJobConfig) a[0], - (RollupJobStats) a[1], (RollupJobStatus)a[2])); + (IndexerJobStats) a[1], (RollupJobStatus)a[2])); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStats.PARSER::apply, STATS); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), IndexerJobStats.PARSER::apply, STATS); PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStatus.PARSER::apply, STATUS); } - public JobWrapper(RollupJobConfig job, RollupJobStats stats, RollupJobStatus status) { + public JobWrapper(RollupJobConfig job, IndexerJobStats stats, RollupJobStatus status) { this.job = job; this.stats = stats; this.status = status; @@ -225,7 +225,7 @@ public JobWrapper(RollupJobConfig job, RollupJobStats stats, RollupJobStatus sta public JobWrapper(StreamInput in) throws IOException { this.job = new RollupJobConfig(in); - this.stats = new RollupJobStats(in); + this.stats = new IndexerJobStats(in); this.status = new RollupJobStatus(in); } @@ -240,7 +240,7 @@ public RollupJobConfig getJob() { return job; } - public RollupJobStats getStats() { + public IndexerJobStats getStats() { return stats; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java index 640385c9c80d5..0a2f046907c80 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; import java.util.HashMap; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java new file mode 100644 index 0000000000000..e60573d3ed071 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.indexing; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +public class IndexerJobStatsTests extends AbstractSerializingTestCase { + + @Override + protected IndexerJobStats createTestInstance() { + return randomStats(); + } + + @Override + protected Writeable.Reader instanceReader() { + return IndexerJobStats::new; + } + + @Override + protected IndexerJobStats doParseInstance(XContentParser parser) { + return IndexerJobStats.fromXContent(parser); + } + + public static IndexerJobStats randomStats() { + return new IndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong()); + } +} + diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/IndexerStateEnumTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerStateEnumTests.java similarity index 98% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/IndexerStateEnumTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerStateEnumTests.java index ec17a37e23b2b..329800c2f1a24 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/IndexerStateEnumTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerStateEnumTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.rollup.job; +package org.elasticsearch.xpack.core.indexing; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java new file mode 100644 index 0000000000000..85066cb42f519 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.indexing; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; + +public class IterativeIndexerTests extends ESTestCase { + + AtomicBoolean isFinished = new AtomicBoolean(false); + + private class MockIndexer extends IterativeIndexer { + + // test the execution order + private int step; + + protected MockIndexer(Executor executor, AtomicReference initialState, Integer initialPosition) { + super(executor, initialState, initialPosition); + } + + @Override + protected String getJobId() { + return "mock"; + } + + @Override + protected Iteration doProcess(SearchResponse searchResponse) { + assertThat(step, equalTo(3)); + ++step; + return new Iteration(Collections.emptyList(), 3, true); + } + + @Override + protected SearchRequest buildSearchRequest() { + assertThat(step, equalTo(1)); + ++step; + return null; + } + + @Override + protected void onStart(long now) { + assertThat(step, equalTo(0)); + ++step; + } + + @Override + protected void doNextSearch(SearchRequest request, ActionListener nextPhase) { + assertThat(step, equalTo(2)); + ++step; + final SearchResponseSections sections = new SearchResponseSections(new SearchHits(new SearchHit[0], 0, 0), null, null, false, + null, null, 1); + + nextPhase.onResponse(new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null)); + } + + @Override + protected void doNextBulk(BulkRequest request, ActionListener nextPhase) { + fail("should not be called"); + } + + @Override + protected void doSaveState(IndexerState state, Integer position, Runnable next) { + assertThat(step, equalTo(4)); + ++step; + next.run(); + } + + @Override + protected void onFailure(Exception exc) { + fail(exc.getMessage()); + } + + @Override + protected void onFinish() { + assertThat(step, equalTo(5)); + ++step; + isFinished.set(true); + } + + @Override + protected void onAbort() { + } + + public int getStep() { + return step; + } + + } + + public void testStateMachine() throws InterruptedException { + AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); + final ExecutorService executor = Executors.newFixedThreadPool(1); + + try { + + MockIndexer indexer = new MockIndexer(executor, state, 2); + indexer.start(); + assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + assertThat(indexer.getPosition(), equalTo(2)); + ESTestCase.awaitBusy(() -> isFinished.get()); + assertThat(indexer.getStep(), equalTo(6)); + assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); + assertThat(indexer.getStats().getNumPages(), equalTo(1L)); + assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); + assertTrue(indexer.abort()); + } finally { + executor.shutdownNow(); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java index a0df63bc38dde..cb827c040801d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java @@ -8,6 +8,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; @@ -40,7 +42,7 @@ protected GetRollupJobsAction.JobWrapper createTestInstance() { } return new GetRollupJobsAction.JobWrapper(ConfigTestHelpers.randomRollupJobConfig(random()), - new RollupJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), + new IndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), new RollupJobStatus(state, Collections.emptyMap(), randomBoolean())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java deleted file mode 100644 index 0091b21dc40d0..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.rollup.job; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; - -public class RollupJobStatsTests extends AbstractSerializingTestCase { - - @Override - protected RollupJobStats createTestInstance() { - return randomStats(); - } - - @Override - protected Writeable.Reader instanceReader() { - return RollupJobStats::new; - } - - @Override - protected RollupJobStats doParseInstance(XContentParser parser) { - return RollupJobStats.fromXContent(parser); - } - - public static RollupJobStats randomStats() { - return new RollupJobStats(randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong()); - } -} - diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java index 2c802a7e41dc3..f46bda788bf5b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.indexing.IndexerState; import java.util.HashMap; import java.util.Map; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index 9119a5445d42e..594221d921488 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -14,10 +14,10 @@ import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; -import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.rollup.Rollup; import java.util.ArrayList; @@ -46,7 +46,7 @@ class IndexerUtils { * @param isUpgradedDocID `true` if this job is using the new ID scheme * @return A list of rolled documents derived from the response */ - static List processBuckets(CompositeAggregation agg, String rollupIndex, RollupJobStats stats, + static List processBuckets(CompositeAggregation agg, String rollupIndex, IndexerJobStats stats, GroupConfig groupConfig, String jobId, boolean isUpgradedDocID) { logger.debug("Buckets: [" + agg.getBuckets().size() + "][" + jobId + "]"); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index 87294706b3b7d..c2a07b47349aa 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -5,11 +5,6 @@ */ package org.elasticsearch.xpack.rollup.job; -import org.apache.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.unit.TimeValue; @@ -20,16 +15,16 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.core.indexing.Iteration; +import org.elasticsearch.xpack.core.indexing.IterativeIndexer; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; -import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -38,25 +33,13 @@ import java.util.concurrent.atomic.AtomicReference; /** - * An abstract class that builds a rollup index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)}, - * it will create the rollup index from the source index up to the last complete bucket that is allowed to be built (based on the current - * time and the delay set on the rollup job). Only one background job can run simultaneously and {@link #onFinish()} is called when the job - * finishes. {@link #onFailure(Exception)} is called if the job fails with an exception and {@link #onAbort()} is called if the indexer is - * aborted while a job is running. The indexer must be started ({@link #start()} to allow a background job to run when - * {@link #maybeTriggerAsyncJob(long)} is called. {@link #stop()} can be used to stop the background job without aborting the indexer. + * An abstract implementation of {@link IterativeIndexer} that builds a rollup index incrementally. */ -public abstract class RollupIndexer { - private static final Logger logger = Logger.getLogger(RollupIndexer.class.getName()); - +public abstract class RollupIndexer extends IterativeIndexer > { static final String AGGREGATION_NAME = RollupField.NAME; private final RollupJob job; - private final RollupJobStats stats; - private final AtomicReference state; - private final AtomicReference> position; - private final Executor executor; protected final AtomicBoolean upgradedDocumentID; - private final CompositeAggregationBuilder compositeBuilder; private long maxBoundary; @@ -66,84 +49,16 @@ public abstract class RollupIndexer { * @param job The rollup job * @param initialState Initial state for the indexer * @param initialPosition The last indexed bucket of the task + * @param upgradedDocumentID whether job has updated IDs (for BWC) */ - RollupIndexer(Executor executor, RollupJob job, AtomicReference initialState, - Map initialPosition, AtomicBoolean upgradedDocumentID) { - this.executor = executor; + RollupIndexer(Executor executor, RollupJob job, AtomicReference initialState, Map initialPosition, + AtomicBoolean upgradedDocumentID) { + super(executor, initialState, initialPosition); this.job = job; - this.stats = new RollupJobStats(); - this.state = initialState; - this.position = new AtomicReference<>(initialPosition); this.compositeBuilder = createCompositeBuilder(job.getConfig()); this.upgradedDocumentID = upgradedDocumentID; } - /** - * Executes the {@link SearchRequest} and calls nextPhase with the response - * or the exception if an error occurs. - * - * @param request The search request to execute - * @param nextPhase Listener for the next phase - */ - protected abstract void doNextSearch(SearchRequest request, ActionListener nextPhase); - - /** - * Executes the {@link BulkRequest} and calls nextPhase with the response - * or the exception if an error occurs. - * - * @param request The bulk request to execute - * @param nextPhase Listener for the next phase - */ - protected abstract void doNextBulk(BulkRequest request, ActionListener nextPhase); - - /** - * Called periodically during the execution of a background job. Implementation should - * persists the state somewhere and continue the execution asynchronously using next. - * - * @param state The current state of the indexer - * @param position The current position of the indexer - * @param next Runnable for the next phase - */ - protected abstract void doSaveState(IndexerState state, Map position, Runnable next); - - /** - * Called when a failure occurs in an async job causing the execution to stop. - * @param exc The exception - */ - protected abstract void onFailure(Exception exc); - - /** - * Called when a background job finishes. - */ - protected abstract void onFinish(); - - /** - * Called when a background job detects that the indexer is aborted causing the async execution - * to stop. - */ - protected abstract void onAbort(); - - /** - * Get the current state of the indexer. - */ - public IndexerState getState() { - return state.get(); - } - - /** - * Get the current position of the indexer. - */ - public Map getPosition() { - return position.get(); - } - - /** - * Get the stats of this indexer. - */ - public RollupJobStats getStats() { - return stats; - } - /** * Returns if this job has upgraded it's ID scheme yet or not */ @@ -151,229 +66,28 @@ public boolean isUpgradedDocumentID() { return upgradedDocumentID.get(); } - /** - * Sets the internal state to {@link IndexerState#STARTED} if the previous state was {@link IndexerState#STOPPED}. Setting the state to - * STARTED allows a job to run in the background when {@link #maybeTriggerAsyncJob(long)} is called. - * @return The new state for the indexer (STARTED, INDEXING or ABORTING if the job was already aborted). - */ - public synchronized IndexerState start() { - state.compareAndSet(IndexerState.STOPPED, IndexerState.STARTED); - return state.get(); - } - - /** - * Sets the internal state to {@link IndexerState#STOPPING} if an async job is running in the background and in such case - * {@link #onFinish()} will be called as soon as the background job detects that the indexer is stopped. If there is no job running when - * this function is called, the state is directly set to {@link IndexerState#STOPPED} and {@link #onFinish()} will never be called. - * @return The new state for the indexer (STOPPED, STOPPING or ABORTING if the job was already aborted). - */ - public synchronized IndexerState stop() { - IndexerState currentState = state.updateAndGet(previousState -> { - if (previousState == IndexerState.INDEXING) { - return IndexerState.STOPPING; - } else if (previousState == IndexerState.STARTED) { - return IndexerState.STOPPED; - } else { - return previousState; - } - }); - return currentState; - } - - /** - * Sets the internal state to {@link IndexerState#ABORTING}. It returns false if an async job is running in the background and in such - * case {@link #onAbort} will be called as soon as the background job detects that the indexer is aborted. If there is no job running - * when this function is called, it returns true and {@link #onAbort()} will never be called. - * @return true if the indexer is aborted, false if a background job is running and abort is delayed. - */ - public synchronized boolean abort() { - IndexerState prevState = state.getAndUpdate((prev) -> IndexerState.ABORTING); - return prevState == IndexerState.STOPPED || prevState == IndexerState.STARTED; - } - - /** - * Triggers a background job that builds the rollup index asynchronously iff there is no other job that runs - * and the indexer is started ({@link IndexerState#STARTED}. - * - * @param now The current time in milliseconds (used to limit the job to complete buckets) - * @return true if a job has been triggered, false otherwise - */ - public synchronized boolean maybeTriggerAsyncJob(long now) { - final IndexerState currentState = state.get(); - switch (currentState) { - case INDEXING: - case STOPPING: - case ABORTING: - logger.warn("Schedule was triggered for rollup job [" + job.getConfig().getId() + "], but prior indexer is still running."); - return false; - - case STOPPED: - logger.debug("Schedule was triggered for rollup job [" + job.getConfig().getId() - + "] but job is stopped. Ignoring trigger."); - return false; - - case STARTED: - logger.debug("Schedule was triggered for rollup job [" + job.getConfig().getId() + "], state: [" + currentState + "]"); - // Only valid time to start indexing is when we are STARTED but not currently INDEXING. - stats.incrementNumInvocations(1); - - // rounds the current time to its current bucket based on the date histogram interval. - // this is needed to exclude buckets that can still receive new documents. - DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); - long rounded = dateHisto.createRounding().round(now); - if (dateHisto.getDelay() != null) { - // if the job has a delay we filter all documents that appear before it. - maxBoundary = rounded - TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis(); - } else { - maxBoundary = rounded; - } - - if (state.compareAndSet(IndexerState.STARTED, IndexerState.INDEXING)) { - // fire off the search. Note this is async, the method will return from here - executor.execute(() -> doNextSearch(buildSearchRequest(), - ActionListener.wrap(this::onSearchResponse, exc -> finishWithFailure(exc)))); - logger.debug("Beginning to rollup [" + job.getConfig().getId() + "], state: [" + currentState + "]"); - return true; - } else { - logger.debug("Could not move from STARTED to INDEXING state because current state is [" + state.get() + "]"); - return false; - } - - default: - logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); - throw new IllegalStateException("Rollup job encountered an illegal state [" + currentState + "]"); - } - } - - /** - * Checks the {@link IndexerState} and returns false if the execution - * should be stopped. - */ - private boolean checkState(IndexerState currentState) { - switch (currentState) { - case INDEXING: - // normal state; - return true; - - case STOPPING: - logger.info("Rollup job encountered [" + IndexerState.STOPPING + "] state, halting indexer."); - doSaveState(finishAndSetState(), getPosition(), () -> {}); - return false; - - case STOPPED: - return false; - - case ABORTING: - logger.info("Requested shutdown of indexer for job [" + job.getConfig().getId() + "]"); - onAbort(); - return false; - - default: - // Anything other than indexing, aborting or stopping is unanticipated - logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); - throw new IllegalStateException("Rollup job encountered an illegal state [" + currentState + "]"); - } + @Override + protected String getJobId() { + return job.getConfig().getId(); } - private void onBulkResponse(BulkResponse response, Map after) { - // TODO we should check items in the response and move after accordingly to resume the failing buckets ? - stats.incrementNumRollups(response.getItems().length); - if (response.hasFailures()) { - logger.warn("Error while attempting to bulk index rollup documents: " + response.buildFailureMessage()); - } - try { - if (checkState(getState()) == false) { - return ; - } - position.set(after); - ActionListener listener = ActionListener.wrap(this::onSearchResponse, this::finishWithFailure); - // TODO probably something more intelligent than every-50 is needed - if (stats.getNumPages() > 0 && stats.getNumPages() % 50 == 0) { - doSaveState(IndexerState.INDEXING, after, () -> doNextSearch(buildSearchRequest(), listener)); - } else { - doNextSearch(buildSearchRequest(), listener); - } - } catch (Exception e) { - finishWithFailure(e); + @Override + protected void onStart(long now) { + // this is needed to exclude buckets that can still receive new documents. + DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); + long rounded = dateHisto.createRounding().round(now); + if (dateHisto.getDelay() != null) { + // if the job has a delay we filter all documents that appear before it. + maxBoundary = rounded - TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis(); + } else { + maxBoundary = rounded; } } - - private void onSearchResponse(SearchResponse searchResponse) { - try { - if (checkState(getState()) == false) { - return ; - } - if (searchResponse.getShardFailures().length != 0) { - throw new RuntimeException("Shard failures encountered while running indexer for rollup job [" - + job.getConfig().getId() + "]: " + Arrays.toString(searchResponse.getShardFailures())); - } - final CompositeAggregation response = searchResponse.getAggregations().get(AGGREGATION_NAME); - if (response == null) { - throw new IllegalStateException("Missing composite response for query: " + compositeBuilder.toString()); - } - stats.incrementNumPages(1); - if (response.getBuckets().isEmpty()) { - // this is the end... - logger.debug("Finished indexing for job [" + job.getConfig().getId() + "], saving state and shutting down."); - - // Change state first, then try to persist. This prevents in-progress STOPPING/ABORTING from - // being persisted as STARTED but then stop the job - doSaveState(finishAndSetState(), position.get(), this::onFinish); - return; - } - - final BulkRequest bulkRequest = new BulkRequest(); + + @Override + protected SearchRequest buildSearchRequest() { // Indexer is single-threaded, and only place that the ID scheme can get upgraded is doSaveState(), so // we can pass down the boolean value rather than the atomic here - final List docs = IndexerUtils.processBuckets(response, job.getConfig().getRollupIndex(), - stats, job.getConfig().getGroupConfig(), job.getConfig().getId(), upgradedDocumentID.get()); - docs.forEach(bulkRequest::add); - assert bulkRequest.requests().size() > 0; - doNextBulk(bulkRequest, - ActionListener.wrap( - bulkResponse -> onBulkResponse(bulkResponse, response.afterKey()), - exc -> finishWithFailure(exc) - ) - ); - } catch(Exception e) { - finishWithFailure(e); - } - } - - private void finishWithFailure(Exception exc) { - doSaveState(finishAndSetState(), position.get(), () -> onFailure(exc)); - } - - private IndexerState finishAndSetState() { - return state.updateAndGet( - prev -> { - switch (prev) { - case INDEXING: - // ready for another job - return IndexerState.STARTED; - - case STOPPING: - // must be started again - return IndexerState.STOPPED; - - case ABORTING: - // abort and exit - onAbort(); - return IndexerState.ABORTING; // This shouldn't matter, since onAbort() will kill the task first - - case STOPPED: - // No-op. Shouldn't really be possible to get here (should have to go through STOPPING - // first which will be handled) but is harmless to no-op and we don't want to throw exception here - return IndexerState.STOPPED; - - default: - // any other state is unanticipated at this point - throw new IllegalStateException("Rollup job encountered an illegal state [" + prev + "]"); - } - }); - } - - private SearchRequest buildSearchRequest() { final Map position = getPosition(); SearchSourceBuilder searchSource = new SearchSourceBuilder() .size(0) @@ -383,6 +97,16 @@ private SearchRequest buildSearchRequest() { .aggregation(compositeBuilder.aggregateAfter(position)); return new SearchRequest(job.getConfig().getIndexPattern()).source(searchSource); } + + @Override + protected Iteration> doProcess(SearchResponse searchResponse) { + final CompositeAggregation response = searchResponse.getAggregations().get(AGGREGATION_NAME); + + return new Iteration<>( + IndexerUtils.processBuckets(response, job.getConfig().getRollupIndex(), getStats(), + job.getConfig().getGroupConfig(), job.getConfig().getId(), upgradedDocumentID.get()), + response.afterKey(), response.getBuckets().isEmpty()); + } /** * Creates a skeleton {@link CompositeAggregationBuilder} from the provided job config. diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index 65362f9ad9dd3..4898e099319b5 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -25,13 +25,13 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction; -import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.rollup.Rollup; @@ -218,7 +218,7 @@ public Status getStatus() { * Gets the stats for this task. * @return The stats of this task */ - public RollupJobStats getStats() { + public IndexerJobStats getStats() { return indexer.getStats(); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index e8c66f7e8c118..5a16740f94611 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -35,12 +35,12 @@ import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; -import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTime; import org.mockito.stubbing.Answer; @@ -64,7 +64,7 @@ public class IndexerUtilsTests extends AggregatorTestCase { public void testMissingFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupJobStats stats = new RollupJobStats(0, 0, 0, 0); + IndexerJobStats stats = new IndexerJobStats(0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -126,7 +126,7 @@ public void testMissingFields() throws IOException { public void testCorrectFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); + IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -193,7 +193,7 @@ public void testCorrectFields() throws IOException { public void testNumericTerms() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); + IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -249,7 +249,7 @@ public void testNumericTerms() throws IOException { public void testEmptyCounts() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); + IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); String timestampField = "ts"; String valueField = "the_avg"; @@ -355,7 +355,7 @@ public void testKeyOrderingOldID() { // The content of the config don't actually matter for this test // because the test is just looking at agg keys GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(123L, "abc"), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", false); + List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", false); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("1237859798")); } @@ -399,7 +399,7 @@ public void testKeyOrderingNewID() { }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1L, "abc"), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", true); + List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", true); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$c9LcrFqeFW92uN_Z7sv1hA")); } @@ -449,7 +449,7 @@ public void testKeyOrderingNewIDLong() { }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1, "abc"), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", true); + List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", true); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$VAFKZpyaEqYRPLyic57_qw")); } @@ -476,14 +476,14 @@ public void testNullKeys() { }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), randomHistogramGroupConfig(random()), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", randomBoolean()); + List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(1)); assertFalse(Strings.isNullOrEmpty(docs.get(0).id())); } public void testMissingBuckets() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); + IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); String metricField = "metric_field"; String valueField = "value_field"; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 6d29ee9f9ba6d..55f1cfbdbb29c 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -50,10 +50,10 @@ import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; -import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Before; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index 955dcbc2beb48..c74ecbadf4fbe 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -23,8 +23,8 @@ import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; -import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.mockito.stubbing.Answer; @@ -639,7 +639,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws assertThat(indexer.getStats().getNumPages(), equalTo(1L)); // Note: no docs were indexed - assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); + assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -743,7 +743,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws assertThat(indexer.getStats().getNumPages(), equalTo(1L)); // Note: no docs were indexed - assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); + assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -763,7 +763,7 @@ public void testSearchShardFailure() throws Exception { Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); Consumer failureConsumer = e -> { - assertThat(e.getMessage(), startsWith("Shard failures encountered while running indexer for rollup job")); + assertThat(e.getMessage(), startsWith("Shard failures encountered while running indexer for job")); isFinished.set(true); }; @@ -786,7 +786,7 @@ public void testSearchShardFailure() throws Exception { // Note: no pages processed, no docs were indexed assertThat(indexer.getStats().getNumPages(), equalTo(0L)); - assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); + assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -896,7 +896,7 @@ protected void doNextBulk(BulkRequest request, ActionListener next assertThat(indexer.getStats().getNumPages(), equalTo(1L)); // Note: no docs were indexed - assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); + assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index 13290f09e8eb8..c5f20b06a2314 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -19,11 +19,11 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction; -import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index 298cf27fa2f9d..d157fd61138d6 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -66,7 +66,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - rollups_indexed: 0 + documents_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -113,7 +113,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - rollups_indexed: 0 + documents_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -160,7 +160,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - rollups_indexed: 0 + documents_indexed: 0 trigger_count: 0 status: job_state: "stopped" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml index f3fa8114ddbd0..ec2c6f7581038 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -67,7 +67,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - rollups_indexed: 0 + documents_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -178,7 +178,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - rollups_indexed: 0 + documents_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -204,7 +204,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - rollups_indexed: 0 + documents_indexed: 0 trigger_count: 0 status: job_state: "stopped" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml index 516be25be2a2d..e4d635bbe3995 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml @@ -67,7 +67,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - rollups_indexed: 0 + documents_indexed: 0 trigger_count: 0 status: job_state: "stopped" diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java index 43ad4dc0a45a2..a26f3cfb2b213 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -156,7 +156,7 @@ public void testBigRollup() throws Exception { Map job = getJob(getRollupJobResponse, "rollup-job-test"); if (job != null) { assertThat(ObjectPath.eval("status.job_state", job), equalTo("started")); - assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41)); + assertThat(ObjectPath.eval("stats.documents_indexed", job), equalTo(41)); } }, 30L, TimeUnit.SECONDS); From e6d58c5df3548705f0bcf3a74af5c4c19baccc00 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 14 Aug 2018 16:12:35 +0200 Subject: [PATCH 05/49] [ML-Dataframe] Feature/dataframe basictests (#32783) this change enables testing as well as adds basic tests --- docs/reference/rest-api/info.asciidoc | 5 + .../ml-feature-index-builder/build.gradle | 10 +- .../FeatureIndexBuilder.java | 4 +- .../PutFeatureIndexBuilderJobAction.java | 7 +- ...nsportPutFeatureIndexBuilderJobAction.java | 1 - ...portStartFeatureIndexBuilderJobAction.java | 28 ++--- .../job/FeatureIndexBuilderJob.java | 24 ++--- .../job/FeatureIndexBuilderJobConfig.java | 86 ++++----------- .../job/FeatureIndexBuilderJobState.java | 100 ++++++++++++++++++ .../job/FeatureIndexBuilderJobStatus.java | 73 ------------- .../job/FeatureIndexBuilderJobTask.java | 23 ++-- .../RestPutFeatureIndexBuilderJobAction.java | 2 +- ...tureIndexBuilderJobActionRequestTests.java | 47 ++++++++ ...tartFeatureIndexBuilderJobActionTests.java | 21 ++++ 14 files changed, 245 insertions(+), 186 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index 1cf4ab563b185..643b7007a2af4 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -62,6 +62,11 @@ Example response: "status" : "active" }, "features" : { + "fib" : { + "description" : "Time series feature index creation", + "available" : false, + "enabled" : true + }, "graph" : { "description" : "Graph Data Exploration for the Elastic Stack", "available" : false, diff --git a/x-pack/plugin/ml-feature-index-builder/build.gradle b/x-pack/plugin/ml-feature-index-builder/build.gradle index f92709b528643..30d4cd8a60262 100644 --- a/x-pack/plugin/ml-feature-index-builder/build.gradle +++ b/x-pack/plugin/ml-feature-index-builder/build.gradle @@ -3,7 +3,6 @@ import org.elasticsearch.gradle.BuildPlugin evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' - esplugin { name 'ml-feature-index-builder' description 'A plugin to build feature indexes' @@ -11,13 +10,18 @@ esplugin { extendedPlugins = ['x-pack-core'] } +compileJava.options.compilerArgs << "-Xlint:-rawtypes" +compileTestJava.options.compilerArgs << "-Xlint:-rawtypes" + dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + compileOnly project(path: xpackModule('core'), configuration: 'shadow') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } - +run { + plugin xpackModule('core') +} integTest.enabled = false diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index 197a2f9f29350..e209bd5e34038 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -30,8 +30,6 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.rollup.RollupField; -import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; @@ -58,7 +56,7 @@ public class FeatureIndexBuilder extends Plugin implements ActionPlugin, Persist public static final String NAME = "feature_index_builder"; public static final String BASE_PATH = "/_xpack/feature_index_builder/"; - public static final String TASK_THREAD_POOL_NAME = "feature_index_builder_indexing"; + public static final String TASK_THREAD_POOL_NAME = "ml_feature_index_builder_indexing"; // list of headers that will be stored when a job is created public static final Set HEADER_FILTERS = new HashSet<>( diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java index 22e3b2a6ba2c7..a05b526483bec 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java @@ -48,9 +48,8 @@ public Request() { } - public static Request parseRequest(String id, XContentParser parser) { - FeatureIndexBuilderJobConfig.Builder config = FeatureIndexBuilderJobConfig.Builder.fromXContent(id, parser); - return new Request(config.build()); + public static Request fromXContent(final XContentParser parser, final String id) throws IOException { + return new Request(FeatureIndexBuilderJobConfig.fromXContent(parser, id)); } @Override @@ -107,7 +106,7 @@ protected RequestBuilder(ElasticsearchClient client, PutFeatureIndexBuilderJobAc super(client, action, new Request()); } } - + public static class Response extends AcknowledgedResponse { public Response() { super(); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index e4e17987d4049..16c541db7a01d 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -69,7 +69,6 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio FeatureIndexBuilderJob job = createFeatureIndexBuilderJob(request.getConfig(), threadPool); startPersistentTask(job, listener, persistentTasksService); - } private static FeatureIndexBuilderJob createFeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config, ThreadPool threadPool) { diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java index b6aacc98daf1a..aaaf695e346d9 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java @@ -29,28 +29,30 @@ import java.util.function.Consumer; public class TransportStartFeatureIndexBuilderJobAction extends - TransportTasksAction { + TransportTasksAction { private final XPackLicenseState licenseState; -@Inject -public TransportStartFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, - ActionFilters actionFilters, ClusterService clusterService, XPackLicenseState licenseState) { -super(settings, StartFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, - StartFeatureIndexBuilderJobAction.Request::new, StartFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); -this.licenseState = licenseState; -} + @Inject + public TransportStartFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService, XPackLicenseState licenseState) { + super(settings, StartFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, + StartFeatureIndexBuilderJobAction.Request::new, StartFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); + this.licenseState = licenseState; + } @Override protected void processTasks(StartFeatureIndexBuilderJobAction.Request request, Consumer operation) { FeatureIndexBuilderJobTask matchingTask = null; - + // todo: re-factor, see rollup TransportTaskHelper for (Task task : taskManager.getTasks().values()) { - if (task instanceof FeatureIndexBuilderJobTask && ((FeatureIndexBuilderJobTask)task).getConfig().getId().equals(request.getId())) { + if (task instanceof FeatureIndexBuilderJobTask + && ((FeatureIndexBuilderJobTask) task).getConfig().getId().equals(request.getId())) { if (matchingTask != null) { - throw new IllegalArgumentException("Found more than one matching task for feature index builder job [" + request.getId() + "] when " + - "there should only be one."); + throw new IllegalArgumentException("Found more than one matching task for feature index builder job [" + request.getId() + + "] when " + "there should only be one."); } matchingTask = (FeatureIndexBuilderJobTask) task; } @@ -69,7 +71,7 @@ protected void doExecute(Task task, StartFeatureIndexBuilderJobAction.Request re listener.onFailure(LicenseUtils.newComplianceException(XPackField.FIB)); return; } - + super.doExecute(task, request, listener); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java index 4dd037d0d8d92..16a4163e8135a 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java @@ -14,26 +14,26 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.XPackPlugin; + import java.io.IOException; import java.util.Objects; public class FeatureIndexBuilderJob implements XPackPlugin.XPackPersistentTaskParams { public static final String NAME = "xpack/feature_index_builder/job"; - + private FeatureIndexBuilderJobConfig config; - + private static final ParseField CONFIG = new ParseField("config"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER - = new ConstructingObjectParser<>(NAME, a -> new FeatureIndexBuilderJob((FeatureIndexBuilderJobConfig) a[0])); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + a -> new FeatureIndexBuilderJob((FeatureIndexBuilderJobConfig) a[0])); static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> FeatureIndexBuilderJobConfig.PARSER.apply(p,c).build(), CONFIG); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> FeatureIndexBuilderJobConfig.fromXContent(p, null), + CONFIG); } - public FeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config) { this.config = Objects.requireNonNull(config); } @@ -41,7 +41,7 @@ public FeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config) { public FeatureIndexBuilderJob(StreamInput in) throws IOException { this.config = new FeatureIndexBuilderJobConfig(in); } - + @Override public String getWriteableName() { return NAME; @@ -68,11 +68,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public FeatureIndexBuilderJobConfig getConfig() { return config; } - + public static FeatureIndexBuilderJob fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - + @Override public boolean equals(Object other) { if (this == other) { @@ -87,7 +87,7 @@ public boolean equals(Object other) { return Objects.equals(this.config, that.config); } - + @Override public int hashCode() { return Objects.hash(config); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java index 19e10044e5c21..645a97ab8d928 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java @@ -6,48 +6,49 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; - import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * This class holds the configuration details of a feature index builder job */ public class FeatureIndexBuilderJobConfig implements NamedWriteable, ToXContentObject { - private static final String NAME = "xpack/feature_index_builder/jobconfig"; - public static final ParseField ID = new ParseField("id"); + private static final String NAME = "xpack/feature_index_builder/jobconfig"; + private static final ParseField ID = new ParseField("id"); - private String id; + private final String id; - public static final ObjectParser PARSER = new ObjectParser<>(NAME, false, - FeatureIndexBuilderJobConfig.Builder::new); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, + (args, optionalId) -> { + String id = args[0] != null ? (String) args[0] : optionalId; + return new FeatureIndexBuilderJobConfig(id); + }); static { - PARSER.declareString(FeatureIndexBuilderJobConfig.Builder::setId, ID); + PARSER.declareString(optionalConstructorArg(), ID); } - FeatureIndexBuilderJobConfig(String id) { + public FeatureIndexBuilderJobConfig(final String id) { this.id = id; } - public FeatureIndexBuilderJobConfig(StreamInput in) throws IOException { + public FeatureIndexBuilderJobConfig(final StreamInput in) throws IOException { id = in.readString(); } - public FeatureIndexBuilderJobConfig() { - } - public String getId() { return id; } @@ -56,16 +57,13 @@ public String getCron() { return "*"; } - public void writeTo(StreamOutput out) throws IOException { + public void writeTo(final StreamOutput out) throws IOException { out.writeString(id); } - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); - if (id != null) { - // to be replace by constant - builder.field("id", id); - } + builder.field(ID.getPreferredName(), id); builder.endObject(); return builder; } @@ -85,7 +83,7 @@ public boolean equals(Object other) { return false; } - FeatureIndexBuilderJobConfig that = (FeatureIndexBuilderJobConfig) other; + final FeatureIndexBuilderJobConfig that = (FeatureIndexBuilderJobConfig) other; return Objects.equals(this.id, that.id); } @@ -100,48 +98,8 @@ public String toString() { return Strings.toString(this, true, true); } - public static class Builder implements Writeable, ToXContentObject { - private String id; - - public Builder() {} - - public Builder(FeatureIndexBuilderJobConfig job) { - this.setId(job.getId()); - } - - public static FeatureIndexBuilderJobConfig.Builder fromXContent(String id, XContentParser parser) { - FeatureIndexBuilderJobConfig.Builder config = FeatureIndexBuilderJobConfig.PARSER.apply(parser, null); - if (id != null) { - config.setId(id); - } - return config; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (id != null) { - builder.field(ID.getPreferredName(), id); - } - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } - - public FeatureIndexBuilderJobConfig build() { - return new FeatureIndexBuilderJobConfig(id); - } + public static FeatureIndexBuilderJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) + throws IOException { + return PARSER.parse(parser, optionalJobId); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java new file mode 100644 index 0000000000000..5634910e6c529 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.indexing.IndexerState; + +import java.io.IOException; +import java.util.Objects; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskState { + public static final String NAME = "xpack/feature_index_builder/job"; + + private final IndexerState state; + + private static final ParseField STATE = new ParseField("job_state"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + args -> new FeatureIndexBuilderJobState((IndexerState) args[0])); + + static { + PARSER.declareField(constructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return IndexerState.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, STATE, ObjectParser.ValueType.STRING); + } + + public FeatureIndexBuilderJobState(IndexerState state) { + this.state = state; + } + + public FeatureIndexBuilderJobState(StreamInput in) throws IOException { + state = IndexerState.fromStream(in); + } + + public IndexerState getJobState() { + return state; + } + + public static FeatureIndexBuilderJobState fromXContent(XContentParser parser) { + try { + return PARSER.parse(parser, null); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(STATE.getPreferredName(), state.value()); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + state.writeTo(out); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + FeatureIndexBuilderJobState that = (FeatureIndexBuilderJobState) other; + + return Objects.equals(this.state, that.state); + } + + @Override + public int hashCode() { + return Objects.hash(state); + } +} \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java deleted file mode 100644 index 85debce5339e0..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStatus.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.ml.featureindexbuilder.job; - -import java.util.Collection; -import java.util.Map; -import java.util.Set; - -public class FeatureIndexBuilderJobStatus implements Map { - - @Override - public void clear() { - } - - @Override - public boolean containsKey(Object arg0) { - return false; - } - - @Override - public boolean containsValue(Object arg0) { - return false; - } - - @Override - public Set> entrySet() { - return null; - } - - @Override - public String get(Object arg0) { - return null; - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public Set keySet() { - return null; - } - - @Override - public String put(String arg0, String arg1) { - return null; - } - - @Override - public void putAll(Map arg0) { - } - - @Override - public String remove(Object arg0) { - return null; - } - - @Override - public int size() { - return 0; - } - - @Override - public Collection values() { - return null; - } - -} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java index ca546529414d5..a4de3927e5bc1 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java @@ -17,7 +17,6 @@ import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; @@ -32,13 +31,14 @@ public class FeatureIndexBuilderJobTask extends AllocatedPersistentTask implemen private final FeatureIndexBuilderIndexer indexer; static final String SCHEDULE_NAME = "xpack/feature_index_builder/job" + "/schedule"; - + public static class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTasksExecutor { private final Client client; private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; - public FeatureIndexBuilderJobPersistentTasksExecutor(Settings settings, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { + public FeatureIndexBuilderJobPersistentTasksExecutor(Settings settings, Client client, SchedulerEngine schedulerEngine, + ThreadPool threadPool) { super(settings, "xpack/feature_index_builder/job", FeatureIndexBuilder.TASK_THREAD_POOL_NAME); this.client = client; this.schedulerEngine = schedulerEngine; @@ -48,17 +48,17 @@ public FeatureIndexBuilderJobPersistentTasksExecutor(Settings settings, Client c @Override protected void nodeOperation(AllocatedPersistentTask task, @Nullable FeatureIndexBuilderJob params, PersistentTaskState state) { FeatureIndexBuilderJobTask buildTask = (FeatureIndexBuilderJobTask) task; - SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job(SCHEDULE_NAME + "_" + params.getConfig().getId(), - next()); + SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job(SCHEDULE_NAME + "_" + params.getConfig().getId(), next()); - // Note that while the task is added to the scheduler here, the internal state will prevent + // Note that while the task is added to the scheduler here, the internal state + // will prevent // it from doing any work until the task is "started" via the StartJob api schedulerEngine.register(buildTask); schedulerEngine.add(schedulerJob); logger.info("FeatureIndexBuilder job [" + params.getConfig().getId() + "] created."); } - + static SchedulerEngine.Schedule next() { return (startTime, now) -> { return now + 1000; // to be fixed, hardcode something @@ -67,17 +67,16 @@ static SchedulerEngine.Schedule next() { @Override protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, - PersistentTasksCustomMetaData.PersistentTask persistentTask, - Map headers) { + PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { return new FeatureIndexBuilderJobTask(id, type, action, parentTaskId, persistentTask.getParams(), - (FeatureIndexBuilderJobStatus) persistentTask.getState(), client, schedulerEngine, threadPool, headers); + (FeatureIndexBuilderJobState) persistentTask.getState(), client, schedulerEngine, threadPool, headers); } } private final FeatureIndexBuilderJob job; - + public FeatureIndexBuilderJobTask(long id, String type, String action, TaskId parentTask, FeatureIndexBuilderJob job, - FeatureIndexBuilderJobStatus state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, + FeatureIndexBuilderJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { super(id, type, action, "" + "_" + job.getConfig().getId(), parentTask, headers); this.job = job; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java index 8f08fbd2b5b05..afcda67b241d2 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java @@ -37,7 +37,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(ID.getPreferredName()); XContentParser parser = restRequest.contentParser(); - PutFeatureIndexBuilderJobAction.Request request = PutFeatureIndexBuilderJobAction.Request.parseRequest(id, parser); + PutFeatureIndexBuilderJobAction.Request request = PutFeatureIndexBuilderJobAction.Request.fromXContent(parser, id); return channel -> client.execute(PutFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java new file mode 100644 index 0000000000000..14bedcbe909b1 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Request; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import org.junit.Before; + +import java.io.IOException; + +public class PutFeatureIndexBuilderJobActionRequestTests extends AbstractStreamableXContentTestCase { + + private String jobId; + + @Before + public void setupJobID() { + jobId = randomAlphaOfLengthBetween(1,10); + } + + @Override + protected Request doParseInstance(XContentParser parser) throws IOException { + return Request.fromXContent(parser, jobId); + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected Request createTestInstance() { + FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(randomAlphaOfLengthBetween(1,10)); + return new Request(config); + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java new file mode 100644 index 0000000000000..cec3458e8445a --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java @@ -0,0 +1,21 @@ +package org.elasticsearch.xpack.ml.featureindexbuilder.action; +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction.Request; + +public class StartFeatureIndexBuilderJobActionTests extends AbstractStreamableTestCase { + @Override + protected Request createTestInstance() { + return new Request(randomAlphaOfLengthBetween(1, 20)); + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } +} From a0b72c97cdc0e97e0a3aa385dcccc7cbec64dbd1 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 5 Sep 2018 14:11:36 +0200 Subject: [PATCH 06/49] Revert "temporary merge PR 32743 (#32776)" This reverts commit 1a00847dc93ca1a03b58d0fb0257731f1590725a. --- .../docs/en/rest-api/rollup/get-job.asciidoc | 6 +- .../xpack/core/indexing/Iteration.java | 62 --- .../xpack/core/indexing/IterativeIndexer.java | 384 ------------------ .../rollup/action/GetRollupJobsAction.java | 14 +- .../job}/IndexerState.java | 2 +- .../job/RollupJobStats.java} | 68 ++-- .../core/rollup/job/RollupJobStatus.java | 1 - .../core/indexing/IndexerJobStatsTests.java | 34 -- .../core/indexing/IterativeIndexerTests.java | 133 ------ .../job}/IndexerStateEnumTests.java | 2 +- .../job/JobWrapperSerializingTests.java | 4 +- .../core/rollup/job/RollupJobStatsTests.java | 35 ++ .../core/rollup/job/RollupJobStatusTests.java | 1 - .../xpack/rollup/job/IndexerUtils.java | 4 +- .../xpack/rollup/job/RollupIndexer.java | 346 ++++++++++++++-- .../xpack/rollup/job/RollupJobTask.java | 6 +- .../xpack/rollup/job/IndexerUtilsTests.java | 20 +- .../job/RollupIndexerIndexingTests.java | 2 +- .../rollup/job/RollupIndexerStateTests.java | 12 +- .../xpack/rollup/job/RollupJobTaskTests.java | 2 +- .../rest-api-spec/test/rollup/delete_job.yml | 6 +- .../rest-api-spec/test/rollup/get_jobs.yml | 6 +- .../rest-api-spec/test/rollup/put_job.yml | 2 +- .../elasticsearch/multi_node/RollupIT.java | 2 +- 24 files changed, 425 insertions(+), 729 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/{indexing => rollup/job}/IndexerState.java (97%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/{indexing/IndexerJobStats.java => rollup/job/RollupJobStats.java} (62%) delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/{indexing => rollup/job}/IndexerStateEnumTests.java (98%) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java diff --git a/x-pack/docs/en/rest-api/rollup/get-job.asciidoc b/x-pack/docs/en/rest-api/rollup/get-job.asciidoc index 1a90557106bfc..96053dbfea64f 100644 --- a/x-pack/docs/en/rest-api/rollup/get-job.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/get-job.asciidoc @@ -99,7 +99,7 @@ Which will yield the following response: "stats" : { "pages_processed" : 0, "documents_processed" : 0, - "documents_indexed" : 0, + "rollups_indexed" : 0, "trigger_count" : 0 } } @@ -219,7 +219,7 @@ Which will yield the following response: "stats" : { "pages_processed" : 0, "documents_processed" : 0, - "documents_indexed" : 0, + "rollups_indexed" : 0, "trigger_count" : 0 } }, @@ -268,7 +268,7 @@ Which will yield the following response: "stats" : { "pages_processed" : 0, "documents_processed" : 0, - "documents_indexed" : 0, + "rollups_indexed" : 0, "trigger_count" : 0 } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java deleted file mode 100644 index 5568ecd5ff806..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/Iteration.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.core.indexing; - -import org.elasticsearch.action.index.IndexRequest; - -import java.util.List; - -/** - * Result object to hold the result of 1 iteration of iterative indexing. - * Acts as an interface between the implementation and the generic indexer. - */ -public class Iteration { - - private final boolean isDone; - private final JobPosition position; - private final List toIndex; - - /** - * Constructor for the result of 1 iteration. - * - * @param toIndex the list of requests to be indexed - * @param position the extracted, persistable position of the job required for the search phase - * @param isDone true if source is exhausted and job should go to sleep - * - * Note: toIndex.empty() != isDone due to possible filtering in the specific implementation - */ - public Iteration(List toIndex, JobPosition position, boolean isDone) { - this.toIndex = toIndex; - this.position = position; - this.isDone = isDone; - } - - /** - * Returns true if this indexing iteration is done and job should go into sleep mode. - */ - public boolean isDone() { - return isDone; - } - - /** - * Return the position of the job, a generic to be passed to the next query construction. - * - * @return the position - */ - public JobPosition getPosition() { - return position; - } - - /** - * List of requests to be passed to bulk indexing. - * - * @return List of index requests. - */ - public List getToIndex() { - return toIndex; - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java deleted file mode 100644 index 9ec71b88e466c..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IterativeIndexer.java +++ /dev/null @@ -1,384 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.core.indexing; - -import org.apache.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; - -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.Executor; -import java.util.concurrent.atomic.AtomicReference; - -/** - * An abstract class that builds an index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)}, - * it will create the index from the source index up to the last complete bucket that is allowed to be built (based on job position). - * Only one background job can run simultaneously and {@link #onFinish()} is called when the job - * finishes. {@link #onFailure(Exception)} is called if the job fails with an exception and {@link #onAbort()} is called if the indexer is - * aborted while a job is running. The indexer must be started ({@link #start()} to allow a background job to run when - * {@link #maybeTriggerAsyncJob(long)} is called. {@link #stop()} can be used to stop the background job without aborting the indexer. - * - * In a nutshell this is a 2 cycle engine: 1st it sends a query, 2nd it indexes documents based on the response, sends the next query, - * indexes, queries, indexes, ... until a condition lets the engine pause until the source provides new input. - * - * @param Type that defines a job position to be defined by the implementation. - */ -public abstract class IterativeIndexer { - private static final Logger logger = Logger.getLogger(IterativeIndexer.class.getName()); - - private final IndexerJobStats stats; - - private final AtomicReference state; - private final AtomicReference position; - private final Executor executor; - - protected IterativeIndexer(Executor executor, AtomicReference initialState, JobPosition initialPosition) { - this.executor = executor; - this.state = initialState; - this.position = new AtomicReference<>(initialPosition); - this.stats = new IndexerJobStats(); - } - - /** - * Get the current state of the indexer. - */ - public IndexerState getState() { - return state.get(); - } - - /** - * Get the current position of the indexer. - */ - public JobPosition getPosition() { - return position.get(); - } - - /** - * Get the stats of this indexer. - */ - public IndexerJobStats getStats() { - return stats; - } - - /** - * Sets the internal state to {@link IndexerState#STARTED} if the previous state - * was {@link IndexerState#STOPPED}. Setting the state to STARTED allows a job - * to run in the background when {@link #maybeTriggerAsyncJob(long)} is called. - * - * @return The new state for the indexer (STARTED, INDEXING or ABORTING if the - * job was already aborted). - */ - public synchronized IndexerState start() { - state.compareAndSet(IndexerState.STOPPED, IndexerState.STARTED); - return state.get(); - } - - /** - * Sets the internal state to {@link IndexerState#STOPPING} if an async job is - * running in the background and in such case {@link #onFinish()} will be called - * as soon as the background job detects that the indexer is stopped. If there - * is no job running when this function is called, the state is directly set to - * {@link IndexerState#STOPPED} and {@link #onFinish()} will never be called. - * - * @return The new state for the indexer (STOPPED, STOPPING or ABORTING if the - * job was already aborted). - */ - public synchronized IndexerState stop() { - IndexerState currentState = state.updateAndGet(previousState -> { - if (previousState == IndexerState.INDEXING) { - return IndexerState.STOPPING; - } else if (previousState == IndexerState.STARTED) { - return IndexerState.STOPPED; - } else { - return previousState; - } - }); - return currentState; - } - - /** - * Sets the internal state to {@link IndexerState#ABORTING}. It returns false if - * an async job is running in the background and in such case {@link #onAbort} - * will be called as soon as the background job detects that the indexer is - * aborted. If there is no job running when this function is called, it returns - * true and {@link #onAbort()} will never be called. - * - * @return true if the indexer is aborted, false if a background job is running - * and abort is delayed. - */ - public synchronized boolean abort() { - IndexerState prevState = state.getAndUpdate((prev) -> IndexerState.ABORTING); - return prevState == IndexerState.STOPPED || prevState == IndexerState.STARTED; - } - - /** - * Triggers a background job that builds the index asynchronously iff - * there is no other job that runs and the indexer is started - * ({@link IndexerState#STARTED}. - * - * @param now - * The current time in milliseconds (used to limit the job to - * complete buckets) - * @return true if a job has been triggered, false otherwise - */ - public synchronized boolean maybeTriggerAsyncJob(long now) { - final IndexerState currentState = state.get(); - switch (currentState) { - case INDEXING: - case STOPPING: - case ABORTING: - logger.warn("Schedule was triggered for job [" + getJobId() + "], but prior indexer is still running."); - return false; - - case STOPPED: - logger.debug("Schedule was triggered for job [" + getJobId() + "] but job is stopped. Ignoring trigger."); - return false; - - case STARTED: - logger.debug("Schedule was triggered for job [" + getJobId() + "], state: [" + currentState + "]"); - stats.incrementNumInvocations(1); - onStart(now); - - if (state.compareAndSet(IndexerState.STARTED, IndexerState.INDEXING)) { - // fire off the search. Note this is async, the method will return from here - executor.execute(() -> doNextSearch(buildSearchRequest(), - ActionListener.wrap(this::onSearchResponse, exc -> finishWithFailure(exc)))); - logger.debug("Beginning to index [" + getJobId() + "], state: [" + currentState + "]"); - return true; - } else { - logger.debug("Could not move from STARTED to INDEXING state because current state is [" + state.get() + "]"); - return false; - } - - default: - logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); - throw new IllegalStateException("Job encountered an illegal state [" + currentState + "]"); - } - } - - /** - * Called to get the Id of the job, used for logging. - * - * @return a string with the id of the job - */ - protected abstract String getJobId(); - - /** - * Called to process a response from the 1 search request in order to turn it into a {@link Iteration}. - * - * @param searchResponse response from the search phase. - * @return Iteration object to be passed to indexing phase. - */ - protected abstract Iteration doProcess(SearchResponse searchResponse); - - /** - * Called to build the next search request. - * - * @return SearchRequest to be passed to the search phase. - */ - protected abstract SearchRequest buildSearchRequest(); - - /** - * Called at startup after job has been triggered using {@link #maybeTriggerAsyncJob(long)} and the - * internal state is {@link IndexerState#STARTED}. - * - * @param now The current time in milliseconds passed through from {@link #maybeTriggerAsyncJob(long)} - */ - protected abstract void onStart(long now); - - /** - * Executes the {@link SearchRequest} and calls nextPhase with the - * response or the exception if an error occurs. - * - * @param request - * The search request to execute - * @param nextPhase - * Listener for the next phase - */ - protected abstract void doNextSearch(SearchRequest request, ActionListener nextPhase); - - /** - * Executes the {@link BulkRequest} and calls nextPhase with the - * response or the exception if an error occurs. - * - * @param request - * The bulk request to execute - * @param nextPhase - * Listener for the next phase - */ - protected abstract void doNextBulk(BulkRequest request, ActionListener nextPhase); - - /** - * Called periodically during the execution of a background job. Implementation - * should persists the state somewhere and continue the execution asynchronously - * using next. - * - * @param state - * The current state of the indexer - * @param position - * The current position of the indexer - * @param next - * Runnable for the next phase - */ - protected abstract void doSaveState(IndexerState state, JobPosition position, Runnable next); - - /** - * Called when a failure occurs in an async job causing the execution to stop. - * - * @param exc - * The exception - */ - protected abstract void onFailure(Exception exc); - - /** - * Called when a background job finishes. - */ - protected abstract void onFinish(); - - /** - * Called when a background job detects that the indexer is aborted causing the - * async execution to stop. - */ - protected abstract void onAbort(); - - private void finishWithFailure(Exception exc) { - doSaveState(finishAndSetState(), position.get(), () -> onFailure(exc)); - } - - private IndexerState finishAndSetState() { - return state.updateAndGet(prev -> { - switch (prev) { - case INDEXING: - // ready for another job - return IndexerState.STARTED; - - case STOPPING: - // must be started again - return IndexerState.STOPPED; - - case ABORTING: - // abort and exit - onAbort(); - return IndexerState.ABORTING; // This shouldn't matter, since onAbort() will kill the task first - - case STOPPED: - // No-op. Shouldn't really be possible to get here (should have to go through - // STOPPING - // first which will be handled) but is harmless to no-op and we don't want to - // throw exception here - return IndexerState.STOPPED; - - default: - // any other state is unanticipated at this point - throw new IllegalStateException("Indexer job encountered an illegal state [" + prev + "]"); - } - }); - } - - private void onSearchResponse(SearchResponse searchResponse) { - try { - if (checkState(getState()) == false) { - return; - } - if (searchResponse.getShardFailures().length != 0) { - throw new RuntimeException("Shard failures encountered while running indexer for job [" + getJobId() + "]: " - + Arrays.toString(searchResponse.getShardFailures())); - } - - stats.incrementNumPages(1); - Iteration iteration = doProcess(searchResponse); - - if (iteration.isDone()) { - logger.debug("Finished indexing for job [" + getJobId() + "], saving state and shutting down."); - - // Change state first, then try to persist. This prevents in-progress - // STOPPING/ABORTING from - // being persisted as STARTED but then stop the job - doSaveState(finishAndSetState(), position.get(), this::onFinish); - return; - } - - final List docs = iteration.getToIndex(); - final BulkRequest bulkRequest = new BulkRequest(); - docs.forEach(bulkRequest::add); - - // TODO this might be a valid case, e.g. if implementation filters - assert bulkRequest.requests().size() > 0; - - doNextBulk(bulkRequest, ActionListener.wrap(bulkResponse -> { - // TODO we should check items in the response and move after accordingly to - // resume the failing buckets ? - if (bulkResponse.hasFailures()) { - logger.warn("Error while attempting to bulk index documents: " + bulkResponse.buildFailureMessage()); - } - stats.incrementNumOutputDocuments(bulkResponse.getItems().length); - if (checkState(getState()) == false) { - return; - } - - JobPosition newPosition = iteration.getPosition(); - position.set(newPosition); - - onBulkResponse(bulkResponse, newPosition); - }, exc -> finishWithFailure(exc))); - } catch (Exception e) { - finishWithFailure(e); - } - } - - private void onBulkResponse(BulkResponse response, JobPosition position) { - try { - - ActionListener listener = ActionListener.wrap(this::onSearchResponse, this::finishWithFailure); - // TODO probably something more intelligent than every-50 is needed - if (stats.getNumPages() > 0 && stats.getNumPages() % 50 == 0) { - doSaveState(IndexerState.INDEXING, position, () -> doNextSearch(buildSearchRequest(), listener)); - } else { - doNextSearch(buildSearchRequest(), listener); - } - } catch (Exception e) { - finishWithFailure(e); - } - } - - /** - * Checks the {@link IndexerState} and returns false if the execution should be - * stopped. - */ - private boolean checkState(IndexerState currentState) { - switch (currentState) { - case INDEXING: - // normal state; - return true; - - case STOPPING: - logger.info("Indexer job encountered [" + IndexerState.STOPPING + "] state, halting indexer."); - doSaveState(finishAndSetState(), getPosition(), () -> { - }); - return false; - - case STOPPED: - return false; - - case ABORTING: - logger.info("Requested shutdown of indexer for job [" + getJobId() + "]"); - onAbort(); - return false; - - default: - // Anything other than indexing, aborting or stopping is unanticipated - logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); - throw new IllegalStateException("Indexer job encountered an illegal state [" + currentState + "]"); - } - } - -} \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java index 4bfd5b621e780..50f7931508585 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java @@ -25,9 +25,9 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; +import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; import java.io.IOException; @@ -204,20 +204,20 @@ public final String toString() { public static class JobWrapper implements Writeable, ToXContentObject { private final RollupJobConfig job; - private final IndexerJobStats stats; + private final RollupJobStats stats; private final RollupJobStatus status; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, a -> new JobWrapper((RollupJobConfig) a[0], - (IndexerJobStats) a[1], (RollupJobStatus)a[2])); + (RollupJobStats) a[1], (RollupJobStatus)a[2])); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), IndexerJobStats.PARSER::apply, STATS); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStats.PARSER::apply, STATS); PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStatus.PARSER::apply, STATUS); } - public JobWrapper(RollupJobConfig job, IndexerJobStats stats, RollupJobStatus status) { + public JobWrapper(RollupJobConfig job, RollupJobStats stats, RollupJobStatus status) { this.job = job; this.stats = stats; this.status = status; @@ -225,7 +225,7 @@ public JobWrapper(RollupJobConfig job, IndexerJobStats stats, RollupJobStatus st public JobWrapper(StreamInput in) throws IOException { this.job = new RollupJobConfig(in); - this.stats = new IndexerJobStats(in); + this.stats = new RollupJobStats(in); this.status = new RollupJobStatus(in); } @@ -240,7 +240,7 @@ public RollupJobConfig getJob() { return job; } - public IndexerJobStats getStats() { + public RollupJobStats getStats() { return stats; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/IndexerState.java similarity index 97% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerState.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/IndexerState.java index 1b6b9a943cba2..6e211c1df9e3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/IndexerState.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.indexing; +package org.elasticsearch.xpack.core.rollup.job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStats.java similarity index 62% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStats.java index f64ed5804535d..06cfb520af552 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/indexing/IndexerJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStats.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.indexing; +package org.elasticsearch.xpack.core.rollup.job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -13,6 +13,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; + import java.io.IOException; import java.util.Objects; @@ -23,46 +24,45 @@ * and are only for external monitoring/reference. Statistics are not persisted with the job, so if the * allocated task is shutdown/restarted on a different node all the stats will reset. */ -public class IndexerJobStats implements ToXContentObject, Writeable { +public class RollupJobStats implements ToXContentObject, Writeable { public static final ParseField NAME = new ParseField("job_stats"); private static ParseField NUM_PAGES = new ParseField("pages_processed"); - private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); - // BWC for RollupJobStats - private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed").withDeprecation("rollups_indexed"); + private static ParseField NUM_DOCUMENTS = new ParseField("documents_processed"); + private static ParseField NUM_ROLLUPS = new ParseField("rollups_indexed"); private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); private long numPages = 0; - private long numInputDocuments = 0; - private long numOuputDocuments = 0; + private long numDocuments = 0; + private long numRollups = 0; private long numInvocations = 0; - public static final ConstructingObjectParser PARSER = + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME.getPreferredName(), - args -> new IndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + args -> new RollupJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); static { PARSER.declareLong(constructorArg(), NUM_PAGES); - PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); - PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_ROLLUPS); PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); } - public IndexerJobStats() { + public RollupJobStats() { } - public IndexerJobStats(long numPages, long numDocuments, long numOuputDocuments, long numInvocations) { + public RollupJobStats(long numPages, long numDocuments, long numRollups, long numInvocations) { this.numPages = numPages; - this.numInputDocuments = numDocuments; - this.numOuputDocuments = numOuputDocuments; + this.numDocuments = numDocuments; + this.numRollups = numRollups; this.numInvocations = numInvocations; } - public IndexerJobStats(StreamInput in) throws IOException { + public RollupJobStats(StreamInput in) throws IOException { this.numPages = in.readVLong(); - this.numInputDocuments = in.readVLong(); - this.numOuputDocuments = in.readVLong(); + this.numDocuments = in.readVLong(); + this.numRollups = in.readVLong(); this.numInvocations = in.readVLong(); } @@ -71,15 +71,15 @@ public long getNumPages() { } public long getNumDocuments() { - return numInputDocuments; + return numDocuments; } public long getNumInvocations() { return numInvocations; } - public long getOutputDocuments() { - return numOuputDocuments; + public long getNumRollups() { + return numRollups; } public void incrementNumPages(long n) { @@ -89,7 +89,7 @@ public void incrementNumPages(long n) { public void incrementNumDocuments(long n) { assert(n >= 0); - numInputDocuments += n; + numDocuments += n; } public void incrementNumInvocations(long n) { @@ -97,20 +97,20 @@ public void incrementNumInvocations(long n) { numInvocations += n; } - public void incrementNumOutputDocuments(long n) { + public void incrementNumRollups(long n) { assert(n >= 0); - numOuputDocuments += n; + numRollups += n; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(numPages); - out.writeVLong(numInputDocuments); - out.writeVLong(numOuputDocuments); + out.writeVLong(numDocuments); + out.writeVLong(numRollups); out.writeVLong(numInvocations); } - public static IndexerJobStats fromXContent(XContentParser parser) { + public static RollupJobStats fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); } catch (IOException e) { @@ -122,8 +122,8 @@ public static IndexerJobStats fromXContent(XContentParser parser) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(NUM_PAGES.getPreferredName(), numPages); - builder.field(NUM_INPUT_DOCUMENTS.getPreferredName(), numInputDocuments); - builder.field(NUM_OUTPUT_DOCUMENTS.getPreferredName(), numOuputDocuments); + builder.field(NUM_DOCUMENTS.getPreferredName(), numDocuments); + builder.field(NUM_ROLLUPS.getPreferredName(), numRollups); builder.field(NUM_INVOCATIONS.getPreferredName(), numInvocations); builder.endObject(); return builder; @@ -139,16 +139,18 @@ public boolean equals(Object other) { return false; } - IndexerJobStats that = (IndexerJobStats) other; + RollupJobStats that = (RollupJobStats) other; return Objects.equals(this.numPages, that.numPages) - && Objects.equals(this.numInputDocuments, that.numInputDocuments) - && Objects.equals(this.numOuputDocuments, that.numOuputDocuments) + && Objects.equals(this.numDocuments, that.numDocuments) + && Objects.equals(this.numRollups, that.numRollups) && Objects.equals(this.numInvocations, that.numInvocations); } @Override public int hashCode() { - return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations); + return Objects.hash(numPages, numDocuments, numRollups, numInvocations); } + } + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java index 0a2f046907c80..640385c9c80d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatus.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; import java.util.HashMap; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java deleted file mode 100644 index e60573d3ed071..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerJobStatsTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.indexing; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractSerializingTestCase; - -public class IndexerJobStatsTests extends AbstractSerializingTestCase { - - @Override - protected IndexerJobStats createTestInstance() { - return randomStats(); - } - - @Override - protected Writeable.Reader instanceReader() { - return IndexerJobStats::new; - } - - @Override - protected IndexerJobStats doParseInstance(XContentParser parser) { - return IndexerJobStats.fromXContent(parser); - } - - public static IndexerJobStats randomStats() { - return new IndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong()); - } -} - diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java deleted file mode 100644 index 85066cb42f519..0000000000000 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IterativeIndexerTests.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.core.indexing; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; -import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.test.ESTestCase; - -import java.util.Collections; -import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -import static org.hamcrest.Matchers.equalTo; - -public class IterativeIndexerTests extends ESTestCase { - - AtomicBoolean isFinished = new AtomicBoolean(false); - - private class MockIndexer extends IterativeIndexer { - - // test the execution order - private int step; - - protected MockIndexer(Executor executor, AtomicReference initialState, Integer initialPosition) { - super(executor, initialState, initialPosition); - } - - @Override - protected String getJobId() { - return "mock"; - } - - @Override - protected Iteration doProcess(SearchResponse searchResponse) { - assertThat(step, equalTo(3)); - ++step; - return new Iteration(Collections.emptyList(), 3, true); - } - - @Override - protected SearchRequest buildSearchRequest() { - assertThat(step, equalTo(1)); - ++step; - return null; - } - - @Override - protected void onStart(long now) { - assertThat(step, equalTo(0)); - ++step; - } - - @Override - protected void doNextSearch(SearchRequest request, ActionListener nextPhase) { - assertThat(step, equalTo(2)); - ++step; - final SearchResponseSections sections = new SearchResponseSections(new SearchHits(new SearchHit[0], 0, 0), null, null, false, - null, null, 1); - - nextPhase.onResponse(new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null)); - } - - @Override - protected void doNextBulk(BulkRequest request, ActionListener nextPhase) { - fail("should not be called"); - } - - @Override - protected void doSaveState(IndexerState state, Integer position, Runnable next) { - assertThat(step, equalTo(4)); - ++step; - next.run(); - } - - @Override - protected void onFailure(Exception exc) { - fail(exc.getMessage()); - } - - @Override - protected void onFinish() { - assertThat(step, equalTo(5)); - ++step; - isFinished.set(true); - } - - @Override - protected void onAbort() { - } - - public int getStep() { - return step; - } - - } - - public void testStateMachine() throws InterruptedException { - AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - final ExecutorService executor = Executors.newFixedThreadPool(1); - - try { - - MockIndexer indexer = new MockIndexer(executor, state, 2); - indexer.start(); - assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); - assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); - assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); - assertThat(indexer.getPosition(), equalTo(2)); - ESTestCase.awaitBusy(() -> isFinished.get()); - assertThat(indexer.getStep(), equalTo(6)); - assertThat(indexer.getStats().getNumInvocations(), equalTo(1L)); - assertThat(indexer.getStats().getNumPages(), equalTo(1L)); - assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); - assertTrue(indexer.abort()); - } finally { - executor.shutdownNow(); - } - } -} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerStateEnumTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/IndexerStateEnumTests.java similarity index 98% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerStateEnumTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/IndexerStateEnumTests.java index 329800c2f1a24..ec17a37e23b2b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/IndexerStateEnumTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/IndexerStateEnumTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.indexing; +package org.elasticsearch.xpack.core.rollup.job; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java index cb827c040801d..a0df63bc38dde 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java @@ -8,8 +8,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.indexing.IndexerState; -import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; @@ -42,7 +40,7 @@ protected GetRollupJobsAction.JobWrapper createTestInstance() { } return new GetRollupJobsAction.JobWrapper(ConfigTestHelpers.randomRollupJobConfig(random()), - new IndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), + new RollupJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), new RollupJobStatus(state, Collections.emptyMap(), randomBoolean())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java new file mode 100644 index 0000000000000..0091b21dc40d0 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatsTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.rollup.job; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; + +public class RollupJobStatsTests extends AbstractSerializingTestCase { + + @Override + protected RollupJobStats createTestInstance() { + return randomStats(); + } + + @Override + protected Writeable.Reader instanceReader() { + return RollupJobStats::new; + } + + @Override + protected RollupJobStats doParseInstance(XContentParser parser) { + return RollupJobStats.fromXContent(parser); + } + + public static RollupJobStats randomStats() { + return new RollupJobStats(randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong()); + } +} + diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java index f46bda788bf5b..2c802a7e41dc3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobStatusTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.indexing.IndexerState; import java.util.HashMap; import java.util.Map; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index 594221d921488..9119a5445d42e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -14,10 +14,10 @@ import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; -import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.rollup.Rollup; import java.util.ArrayList; @@ -46,7 +46,7 @@ class IndexerUtils { * @param isUpgradedDocID `true` if this job is using the new ID scheme * @return A list of rolled documents derived from the response */ - static List processBuckets(CompositeAggregation agg, String rollupIndex, IndexerJobStats stats, + static List processBuckets(CompositeAggregation agg, String rollupIndex, RollupJobStats stats, GroupConfig groupConfig, String jobId, boolean isUpgradedDocID) { logger.debug("Buckets: [" + agg.getBuckets().size() + "][" + jobId + "]"); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index c2a07b47349aa..87294706b3b7d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -5,6 +5,11 @@ */ package org.elasticsearch.xpack.rollup.job; +import org.apache.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.unit.TimeValue; @@ -15,16 +20,16 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.core.indexing.IndexerState; -import org.elasticsearch.xpack.core.indexing.Iteration; -import org.elasticsearch.xpack.core.indexing.IterativeIndexer; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; +import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -33,13 +38,25 @@ import java.util.concurrent.atomic.AtomicReference; /** - * An abstract implementation of {@link IterativeIndexer} that builds a rollup index incrementally. + * An abstract class that builds a rollup index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)}, + * it will create the rollup index from the source index up to the last complete bucket that is allowed to be built (based on the current + * time and the delay set on the rollup job). Only one background job can run simultaneously and {@link #onFinish()} is called when the job + * finishes. {@link #onFailure(Exception)} is called if the job fails with an exception and {@link #onAbort()} is called if the indexer is + * aborted while a job is running. The indexer must be started ({@link #start()} to allow a background job to run when + * {@link #maybeTriggerAsyncJob(long)} is called. {@link #stop()} can be used to stop the background job without aborting the indexer. */ -public abstract class RollupIndexer extends IterativeIndexer > { +public abstract class RollupIndexer { + private static final Logger logger = Logger.getLogger(RollupIndexer.class.getName()); + static final String AGGREGATION_NAME = RollupField.NAME; private final RollupJob job; + private final RollupJobStats stats; + private final AtomicReference state; + private final AtomicReference> position; + private final Executor executor; protected final AtomicBoolean upgradedDocumentID; + private final CompositeAggregationBuilder compositeBuilder; private long maxBoundary; @@ -49,16 +66,84 @@ public abstract class RollupIndexer extends IterativeIndexer * @param job The rollup job * @param initialState Initial state for the indexer * @param initialPosition The last indexed bucket of the task - * @param upgradedDocumentID whether job has updated IDs (for BWC) */ - RollupIndexer(Executor executor, RollupJob job, AtomicReference initialState, Map initialPosition, - AtomicBoolean upgradedDocumentID) { - super(executor, initialState, initialPosition); + RollupIndexer(Executor executor, RollupJob job, AtomicReference initialState, + Map initialPosition, AtomicBoolean upgradedDocumentID) { + this.executor = executor; this.job = job; + this.stats = new RollupJobStats(); + this.state = initialState; + this.position = new AtomicReference<>(initialPosition); this.compositeBuilder = createCompositeBuilder(job.getConfig()); this.upgradedDocumentID = upgradedDocumentID; } + /** + * Executes the {@link SearchRequest} and calls nextPhase with the response + * or the exception if an error occurs. + * + * @param request The search request to execute + * @param nextPhase Listener for the next phase + */ + protected abstract void doNextSearch(SearchRequest request, ActionListener nextPhase); + + /** + * Executes the {@link BulkRequest} and calls nextPhase with the response + * or the exception if an error occurs. + * + * @param request The bulk request to execute + * @param nextPhase Listener for the next phase + */ + protected abstract void doNextBulk(BulkRequest request, ActionListener nextPhase); + + /** + * Called periodically during the execution of a background job. Implementation should + * persists the state somewhere and continue the execution asynchronously using next. + * + * @param state The current state of the indexer + * @param position The current position of the indexer + * @param next Runnable for the next phase + */ + protected abstract void doSaveState(IndexerState state, Map position, Runnable next); + + /** + * Called when a failure occurs in an async job causing the execution to stop. + * @param exc The exception + */ + protected abstract void onFailure(Exception exc); + + /** + * Called when a background job finishes. + */ + protected abstract void onFinish(); + + /** + * Called when a background job detects that the indexer is aborted causing the async execution + * to stop. + */ + protected abstract void onAbort(); + + /** + * Get the current state of the indexer. + */ + public IndexerState getState() { + return state.get(); + } + + /** + * Get the current position of the indexer. + */ + public Map getPosition() { + return position.get(); + } + + /** + * Get the stats of this indexer. + */ + public RollupJobStats getStats() { + return stats; + } + /** * Returns if this job has upgraded it's ID scheme yet or not */ @@ -66,28 +151,229 @@ public boolean isUpgradedDocumentID() { return upgradedDocumentID.get(); } - @Override - protected String getJobId() { - return job.getConfig().getId(); + /** + * Sets the internal state to {@link IndexerState#STARTED} if the previous state was {@link IndexerState#STOPPED}. Setting the state to + * STARTED allows a job to run in the background when {@link #maybeTriggerAsyncJob(long)} is called. + * @return The new state for the indexer (STARTED, INDEXING or ABORTING if the job was already aborted). + */ + public synchronized IndexerState start() { + state.compareAndSet(IndexerState.STOPPED, IndexerState.STARTED); + return state.get(); } - @Override - protected void onStart(long now) { - // this is needed to exclude buckets that can still receive new documents. - DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); - long rounded = dateHisto.createRounding().round(now); - if (dateHisto.getDelay() != null) { - // if the job has a delay we filter all documents that appear before it. - maxBoundary = rounded - TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis(); - } else { - maxBoundary = rounded; + /** + * Sets the internal state to {@link IndexerState#STOPPING} if an async job is running in the background and in such case + * {@link #onFinish()} will be called as soon as the background job detects that the indexer is stopped. If there is no job running when + * this function is called, the state is directly set to {@link IndexerState#STOPPED} and {@link #onFinish()} will never be called. + * @return The new state for the indexer (STOPPED, STOPPING or ABORTING if the job was already aborted). + */ + public synchronized IndexerState stop() { + IndexerState currentState = state.updateAndGet(previousState -> { + if (previousState == IndexerState.INDEXING) { + return IndexerState.STOPPING; + } else if (previousState == IndexerState.STARTED) { + return IndexerState.STOPPED; + } else { + return previousState; + } + }); + return currentState; + } + + /** + * Sets the internal state to {@link IndexerState#ABORTING}. It returns false if an async job is running in the background and in such + * case {@link #onAbort} will be called as soon as the background job detects that the indexer is aborted. If there is no job running + * when this function is called, it returns true and {@link #onAbort()} will never be called. + * @return true if the indexer is aborted, false if a background job is running and abort is delayed. + */ + public synchronized boolean abort() { + IndexerState prevState = state.getAndUpdate((prev) -> IndexerState.ABORTING); + return prevState == IndexerState.STOPPED || prevState == IndexerState.STARTED; + } + + /** + * Triggers a background job that builds the rollup index asynchronously iff there is no other job that runs + * and the indexer is started ({@link IndexerState#STARTED}. + * + * @param now The current time in milliseconds (used to limit the job to complete buckets) + * @return true if a job has been triggered, false otherwise + */ + public synchronized boolean maybeTriggerAsyncJob(long now) { + final IndexerState currentState = state.get(); + switch (currentState) { + case INDEXING: + case STOPPING: + case ABORTING: + logger.warn("Schedule was triggered for rollup job [" + job.getConfig().getId() + "], but prior indexer is still running."); + return false; + + case STOPPED: + logger.debug("Schedule was triggered for rollup job [" + job.getConfig().getId() + + "] but job is stopped. Ignoring trigger."); + return false; + + case STARTED: + logger.debug("Schedule was triggered for rollup job [" + job.getConfig().getId() + "], state: [" + currentState + "]"); + // Only valid time to start indexing is when we are STARTED but not currently INDEXING. + stats.incrementNumInvocations(1); + + // rounds the current time to its current bucket based on the date histogram interval. + // this is needed to exclude buckets that can still receive new documents. + DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); + long rounded = dateHisto.createRounding().round(now); + if (dateHisto.getDelay() != null) { + // if the job has a delay we filter all documents that appear before it. + maxBoundary = rounded - TimeValue.parseTimeValue(dateHisto.getDelay().toString(), "").millis(); + } else { + maxBoundary = rounded; + } + + if (state.compareAndSet(IndexerState.STARTED, IndexerState.INDEXING)) { + // fire off the search. Note this is async, the method will return from here + executor.execute(() -> doNextSearch(buildSearchRequest(), + ActionListener.wrap(this::onSearchResponse, exc -> finishWithFailure(exc)))); + logger.debug("Beginning to rollup [" + job.getConfig().getId() + "], state: [" + currentState + "]"); + return true; + } else { + logger.debug("Could not move from STARTED to INDEXING state because current state is [" + state.get() + "]"); + return false; + } + + default: + logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); + throw new IllegalStateException("Rollup job encountered an illegal state [" + currentState + "]"); + } + } + + /** + * Checks the {@link IndexerState} and returns false if the execution + * should be stopped. + */ + private boolean checkState(IndexerState currentState) { + switch (currentState) { + case INDEXING: + // normal state; + return true; + + case STOPPING: + logger.info("Rollup job encountered [" + IndexerState.STOPPING + "] state, halting indexer."); + doSaveState(finishAndSetState(), getPosition(), () -> {}); + return false; + + case STOPPED: + return false; + + case ABORTING: + logger.info("Requested shutdown of indexer for job [" + job.getConfig().getId() + "]"); + onAbort(); + return false; + + default: + // Anything other than indexing, aborting or stopping is unanticipated + logger.warn("Encountered unexpected state [" + currentState + "] while indexing"); + throw new IllegalStateException("Rollup job encountered an illegal state [" + currentState + "]"); } } - - @Override - protected SearchRequest buildSearchRequest() { + + private void onBulkResponse(BulkResponse response, Map after) { + // TODO we should check items in the response and move after accordingly to resume the failing buckets ? + stats.incrementNumRollups(response.getItems().length); + if (response.hasFailures()) { + logger.warn("Error while attempting to bulk index rollup documents: " + response.buildFailureMessage()); + } + try { + if (checkState(getState()) == false) { + return ; + } + position.set(after); + ActionListener listener = ActionListener.wrap(this::onSearchResponse, this::finishWithFailure); + // TODO probably something more intelligent than every-50 is needed + if (stats.getNumPages() > 0 && stats.getNumPages() % 50 == 0) { + doSaveState(IndexerState.INDEXING, after, () -> doNextSearch(buildSearchRequest(), listener)); + } else { + doNextSearch(buildSearchRequest(), listener); + } + } catch (Exception e) { + finishWithFailure(e); + } + } + + private void onSearchResponse(SearchResponse searchResponse) { + try { + if (checkState(getState()) == false) { + return ; + } + if (searchResponse.getShardFailures().length != 0) { + throw new RuntimeException("Shard failures encountered while running indexer for rollup job [" + + job.getConfig().getId() + "]: " + Arrays.toString(searchResponse.getShardFailures())); + } + final CompositeAggregation response = searchResponse.getAggregations().get(AGGREGATION_NAME); + if (response == null) { + throw new IllegalStateException("Missing composite response for query: " + compositeBuilder.toString()); + } + stats.incrementNumPages(1); + if (response.getBuckets().isEmpty()) { + // this is the end... + logger.debug("Finished indexing for job [" + job.getConfig().getId() + "], saving state and shutting down."); + + // Change state first, then try to persist. This prevents in-progress STOPPING/ABORTING from + // being persisted as STARTED but then stop the job + doSaveState(finishAndSetState(), position.get(), this::onFinish); + return; + } + + final BulkRequest bulkRequest = new BulkRequest(); // Indexer is single-threaded, and only place that the ID scheme can get upgraded is doSaveState(), so // we can pass down the boolean value rather than the atomic here + final List docs = IndexerUtils.processBuckets(response, job.getConfig().getRollupIndex(), + stats, job.getConfig().getGroupConfig(), job.getConfig().getId(), upgradedDocumentID.get()); + docs.forEach(bulkRequest::add); + assert bulkRequest.requests().size() > 0; + doNextBulk(bulkRequest, + ActionListener.wrap( + bulkResponse -> onBulkResponse(bulkResponse, response.afterKey()), + exc -> finishWithFailure(exc) + ) + ); + } catch(Exception e) { + finishWithFailure(e); + } + } + + private void finishWithFailure(Exception exc) { + doSaveState(finishAndSetState(), position.get(), () -> onFailure(exc)); + } + + private IndexerState finishAndSetState() { + return state.updateAndGet( + prev -> { + switch (prev) { + case INDEXING: + // ready for another job + return IndexerState.STARTED; + + case STOPPING: + // must be started again + return IndexerState.STOPPED; + + case ABORTING: + // abort and exit + onAbort(); + return IndexerState.ABORTING; // This shouldn't matter, since onAbort() will kill the task first + + case STOPPED: + // No-op. Shouldn't really be possible to get here (should have to go through STOPPING + // first which will be handled) but is harmless to no-op and we don't want to throw exception here + return IndexerState.STOPPED; + + default: + // any other state is unanticipated at this point + throw new IllegalStateException("Rollup job encountered an illegal state [" + prev + "]"); + } + }); + } + + private SearchRequest buildSearchRequest() { final Map position = getPosition(); SearchSourceBuilder searchSource = new SearchSourceBuilder() .size(0) @@ -97,16 +383,6 @@ protected SearchRequest buildSearchRequest() { .aggregation(compositeBuilder.aggregateAfter(position)); return new SearchRequest(job.getConfig().getIndexPattern()).source(searchSource); } - - @Override - protected Iteration> doProcess(SearchResponse searchResponse) { - final CompositeAggregation response = searchResponse.getAggregations().get(AGGREGATION_NAME); - - return new Iteration<>( - IndexerUtils.processBuckets(response, job.getConfig().getRollupIndex(), getStats(), - job.getConfig().getGroupConfig(), job.getConfig().getId(), upgradedDocumentID.get()), - response.afterKey(), response.getBuckets().isEmpty()); - } /** * Creates a skeleton {@link CompositeAggregationBuilder} from the provided job config. diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index 4898e099319b5..65362f9ad9dd3 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -25,13 +25,13 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.indexing.IndexerState; -import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction; +import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; +import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.rollup.Rollup; @@ -218,7 +218,7 @@ public Status getStatus() { * Gets the stats for this task. * @return The stats of this task */ - public IndexerJobStats getStats() { + public RollupJobStats getStats() { return indexer.getStats(); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 5a16740f94611..e8c66f7e8c118 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -35,12 +35,12 @@ import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.xpack.core.indexing.IndexerJobStats; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; +import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTime; import org.mockito.stubbing.Answer; @@ -64,7 +64,7 @@ public class IndexerUtilsTests extends AggregatorTestCase { public void testMissingFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - IndexerJobStats stats = new IndexerJobStats(0, 0, 0, 0); + RollupJobStats stats = new RollupJobStats(0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -126,7 +126,7 @@ public void testMissingFields() throws IOException { public void testCorrectFields() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); + RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -193,7 +193,7 @@ public void testCorrectFields() throws IOException { public void testNumericTerms() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); + RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); String timestampField = "the_histo"; String valueField = "the_avg"; @@ -249,7 +249,7 @@ public void testNumericTerms() throws IOException { public void testEmptyCounts() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); + RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); String timestampField = "ts"; String valueField = "the_avg"; @@ -355,7 +355,7 @@ public void testKeyOrderingOldID() { // The content of the config don't actually matter for this test // because the test is just looking at agg keys GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(123L, "abc"), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", false); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", false); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("1237859798")); } @@ -399,7 +399,7 @@ public void testKeyOrderingNewID() { }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1L, "abc"), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", true); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", true); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$c9LcrFqeFW92uN_Z7sv1hA")); } @@ -449,7 +449,7 @@ public void testKeyOrderingNewIDLong() { }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1, "abc"), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", true); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", true); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$VAFKZpyaEqYRPLyic57_qw")); } @@ -476,14 +476,14 @@ public void testNullKeys() { }); GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), randomHistogramGroupConfig(random()), null); - List docs = IndexerUtils.processBuckets(composite, "foo", new IndexerJobStats(), groupConfig, "foo", randomBoolean()); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(1)); assertFalse(Strings.isNullOrEmpty(docs.get(0).id())); } public void testMissingBuckets() throws IOException { String indexName = randomAlphaOfLengthBetween(1, 10); - IndexerJobStats stats= new IndexerJobStats(0, 0, 0, 0); + RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); String metricField = "metric_field"; String valueField = "value_field"; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 55f1cfbdbb29c..6d29ee9f9ba6d 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -50,10 +50,10 @@ import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Before; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index c74ecbadf4fbe..955dcbc2beb48 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -23,8 +23,8 @@ import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.mockito.stubbing.Answer; @@ -639,7 +639,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws assertThat(indexer.getStats().getNumPages(), equalTo(1L)); // Note: no docs were indexed - assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); + assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -743,7 +743,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws assertThat(indexer.getStats().getNumPages(), equalTo(1L)); // Note: no docs were indexed - assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); + assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -763,7 +763,7 @@ public void testSearchShardFailure() throws Exception { Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); Consumer failureConsumer = e -> { - assertThat(e.getMessage(), startsWith("Shard failures encountered while running indexer for job")); + assertThat(e.getMessage(), startsWith("Shard failures encountered while running indexer for rollup job")); isFinished.set(true); }; @@ -786,7 +786,7 @@ public void testSearchShardFailure() throws Exception { // Note: no pages processed, no docs were indexed assertThat(indexer.getStats().getNumPages(), equalTo(0L)); - assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); + assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); @@ -896,7 +896,7 @@ protected void doNextBulk(BulkRequest request, ActionListener next assertThat(indexer.getStats().getNumPages(), equalTo(1L)); // Note: no docs were indexed - assertThat(indexer.getStats().getOutputDocuments(), equalTo(0L)); + assertThat(indexer.getStats().getNumRollups(), equalTo(0L)); assertTrue(indexer.abort()); } finally { executor.shutdownNow(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index c5f20b06a2314..13290f09e8eb8 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -19,11 +19,11 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction; +import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index d157fd61138d6..298cf27fa2f9d 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -66,7 +66,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - documents_indexed: 0 + rollups_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -113,7 +113,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - documents_indexed: 0 + rollups_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -160,7 +160,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - documents_indexed: 0 + rollups_indexed: 0 trigger_count: 0 status: job_state: "stopped" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml index ec2c6f7581038..f3fa8114ddbd0 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -67,7 +67,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - documents_indexed: 0 + rollups_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -178,7 +178,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - documents_indexed: 0 + rollups_indexed: 0 trigger_count: 0 status: job_state: "stopped" @@ -204,7 +204,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - documents_indexed: 0 + rollups_indexed: 0 trigger_count: 0 status: job_state: "stopped" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml index e4d635bbe3995..516be25be2a2d 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/put_job.yml @@ -67,7 +67,7 @@ setup: stats: pages_processed: 0 documents_processed: 0 - documents_indexed: 0 + rollups_indexed: 0 trigger_count: 0 status: job_state: "stopped" diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java index a26f3cfb2b213..43ad4dc0a45a2 100644 --- a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java +++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java @@ -156,7 +156,7 @@ public void testBigRollup() throws Exception { Map job = getJob(getRollupJobResponse, "rollup-job-test"); if (job != null) { assertThat(ObjectPath.eval("status.job_state", job), equalTo("started")); - assertThat(ObjectPath.eval("stats.documents_indexed", job), equalTo(41)); + assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41)); } }, 30L, TimeUnit.SECONDS); From b2195fb226ebe02140563b9dcb06371f375f71d9 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 5 Sep 2018 14:55:36 +0200 Subject: [PATCH 07/49] adapt to upstream changes --- x-pack/plugin/ml-feature-index-builder/build.gradle | 2 +- .../xpack/ml/featureindexbuilder/FeatureIndexBuilder.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/build.gradle b/x-pack/plugin/ml-feature-index-builder/build.gradle index 30d4cd8a60262..b4706e29487c5 100644 --- a/x-pack/plugin/ml-feature-index-builder/build.gradle +++ b/x-pack/plugin/ml-feature-index-builder/build.gradle @@ -16,7 +16,7 @@ compileTestJava.options.compilerArgs << "-Xlint:-rawtypes" dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - compileOnly project(path: xpackModule('core'), configuration: 'shadow') + compileOnly project(path: xpackModule('core'), configuration: 'default') testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index e209bd5e34038..bd9bcd751be6a 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -100,13 +100,13 @@ public List getRestHandlers(final Settings settings, final RestCont if (!enabled) { return emptyList(); } - + return Arrays.asList( new ActionHandler<>(PutFeatureIndexBuilderJobAction.INSTANCE, TransportPutFeatureIndexBuilderJobAction.class), new ActionHandler<>(StartFeatureIndexBuilderJobAction.INSTANCE, TransportStartFeatureIndexBuilderJobAction.class) ); } - + @Override public List> getExecutorBuilders(Settings settings) { if (false == enabled) { @@ -126,7 +126,7 @@ public List> getPersistentTasksExecutor(ClusterServic return emptyList(); } - SchedulerEngine schedulerEngine = new SchedulerEngine(Clock.systemUTC()); + SchedulerEngine schedulerEngine = new SchedulerEngine(settings, Clock.systemUTC()); return Collections.singletonList(new FeatureIndexBuilderJobTask.FeatureIndexBuilderJobPersistentTasksExecutor(settings, client, schedulerEngine, threadPool)); } From 4946d33ba3e2038f79383baf80f530740e767b59 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Mon, 10 Sep 2018 14:44:53 +0200 Subject: [PATCH 08/49] [ML-Dataframe] Use AsyncTwoPhaseIndexer (#33504) Replace mocked indexer and use AsyncTwoPhaseIndexer (introduced in #32743) instead. --- .../FeatureIndexBuilder.java | 4 +- ...nsportPutFeatureIndexBuilderJobAction.java | 41 +++- .../job/FeatureIndexBuilderIndexer.java | 179 ++++++------------ .../job/FeatureIndexBuilderJob.java | 6 + ...ndexBuilderJobPersistentTasksExecutor.java | 63 ++++++ .../job/FeatureIndexBuilderJobStats.java | 67 +++++++ .../job/FeatureIndexBuilderJobTask.java | 126 +++++++----- 7 files changed, 312 insertions(+), 174 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index bd9bcd751be6a..01537d493c3e4 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -36,7 +36,7 @@ import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobPersistentTasksExecutor; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStartFeatureIndexBuilderJobAction; @@ -127,7 +127,7 @@ public List> getPersistentTasksExecutor(ClusterServic } SchedulerEngine schedulerEngine = new SchedulerEngine(settings, Clock.systemUTC()); - return Collections.singletonList(new FeatureIndexBuilderJobTask.FeatureIndexBuilderJobPersistentTasksExecutor(settings, client, + return Collections.singletonList(new FeatureIndexBuilderJobPersistentTasksExecutor(settings, client, schedulerEngine, threadPool)); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index 16c541db7a01d..37252bf82a410 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -7,9 +7,11 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; +import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -17,6 +19,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksService; @@ -29,8 +32,14 @@ import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; + public class TransportPutFeatureIndexBuilderJobAction extends TransportMasterNodeAction { + + // TODO: hack, to be replaced + private static final String PIVOT_INDEX = "pivot-reviews"; + private final XPackLicenseState licenseState; private final PersistentTasksService persistentTasksService; private final Client client; @@ -67,7 +76,7 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); FeatureIndexBuilderJob job = createFeatureIndexBuilderJob(request.getConfig(), threadPool); - + createIndex(client, job.getConfig().getId()); startPersistentTask(job, listener, persistentTasksService); } @@ -90,4 +99,34 @@ static void startPersistentTask(FeatureIndexBuilderJob job, ActionListener + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)); + request.mapping(DOC_TYPE, // <1> + "{\n" + + " \"" + DOC_TYPE + "\": {\n" + + " \"properties\": {\n" + + " \"reviewerId\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"avg_rating\": {\n" + + " \"type\": \"integer\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}", // <2> + XContentType.JSON); + IndicesAdminClient adminClient = client.admin().indices(); + adminClient.create(request).actionGet(); + } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index e5f33c03fb8b2..8ff748ad265d0 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -7,166 +7,117 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; import org.apache.log4j.Logger; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.IndicesAdminClient; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; +import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.core.indexing.IterationResult; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -public class FeatureIndexBuilderIndexer { +public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, FeatureIndexBuilderJobStats> { private static final String PIVOT_INDEX = "pivot-reviews"; private static final String SOURCE_INDEX = "anonreviews"; private static final Logger logger = Logger.getLogger(FeatureIndexBuilderIndexer.class.getName()); private FeatureIndexBuilderJob job; - private Client client; - public FeatureIndexBuilderIndexer(FeatureIndexBuilderJob job, Client client) { + public FeatureIndexBuilderIndexer(Executor executor, FeatureIndexBuilderJob job, AtomicReference initialState, + Map initialPosition) { + super(executor, initialState, initialPosition, new FeatureIndexBuilderJobStats()); this.job = job; - this.client = client; - logger.info("delete pivot-reviews"); - } - public synchronized void start() { - deleteIndex(client); - - createIndex(client); - - int runs = 0; - - Map after = null; - logger.info("start feature indexing"); - SearchResponse response; - - try { - response = runQuery(client, after); - - CompositeAggregation compositeAggregation = response.getAggregations().get("feature"); - after = compositeAggregation.afterKey(); - - while (after != null) { - indexBuckets(compositeAggregation); - - ++runs; - response = runQuery(client, after); - - compositeAggregation = response.getAggregations().get("feature"); - after = compositeAggregation.afterKey(); - - //after = null; - } - - indexBuckets(compositeAggregation); - } catch (InterruptedException | ExecutionException e) { - logger.error("Failed to build feature index", e); - } - - logger.info("Finished feature indexing"); + @Override + protected String getJobId() { + return job.getConfig().getId(); } - private void indexBuckets(CompositeAggregation compositeAggregation) { - BulkRequest bulkIndexRequest = new BulkRequest(); - try { - for (Bucket b : compositeAggregation.getBuckets()) { + @Override + protected void onStartJob(long now) { + } - InternalAvg avgAgg = b.getAggregations().get("avg_rating"); + @Override + protected IterationResult> doProcess(SearchResponse searchResponse) { + final CompositeAggregation agg = searchResponse.getAggregations().get("feature"); + return new IterationResult<>(processBuckets(agg), agg.afterKey(), agg.getBuckets().isEmpty()); + } - XContentBuilder builder; + /* + * Mocked demo case + * + * TODO: replace with proper implementation + */ + private List processBuckets(CompositeAggregation agg) { + return agg.getBuckets().stream().map(b -> { + InternalAvg avgAgg = b.getAggregations().get("avg_rating"); + XContentBuilder builder; + try { builder = jsonBuilder(); + builder.startObject(); builder.field("reviewerId", b.getKey().get("reviewerId")); builder.field("avg_rating", avgAgg.getValue()); builder.endObject(); - bulkIndexRequest.add(new IndexRequest(PIVOT_INDEX, DOC_TYPE).source(builder)); - + } catch (IOException e) { + throw new UncheckedIOException(e); } - client.bulk(bulkIndexRequest); - } catch (IOException e) { - logger.error("Failed to index", e); - } + + String indexName = PIVOT_INDEX + "_" + job.getConfig().getId(); + IndexRequest request = new IndexRequest(indexName, DOC_TYPE).source(builder); + return request; + }).collect(Collectors.toList()); + } + + @Override + protected SearchRequest buildSearchRequest() { + + final Map position = getPosition(); + SearchRequest request = buildFeatureQuery(position); + return request; } - + /* - * Hardcoded demo case for pivoting + * Mocked demo case + * + * TODO: everything below will be replaced with proper implementation read from job configuration */ - - private static void deleteIndex(Client client) { - DeleteIndexRequest deleteIndex = new DeleteIndexRequest(PIVOT_INDEX); - - IndicesAdminClient adminClient = client.admin().indices(); - try { - adminClient.delete(deleteIndex).actionGet(); - } catch (IndexNotFoundException e) { - } - } - - private static void createIndex(Client client) { - - CreateIndexRequest request = new CreateIndexRequest(PIVOT_INDEX); - request.settings(Settings.builder() // <1> - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 0) - ); - request.mapping(DOC_TYPE, // <1> - "{\n" + - " \"" + DOC_TYPE + "\": {\n" + - " \"properties\": {\n" + - " \"reviewerId\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"avg_rating\": {\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}", // <2> - XContentType.JSON); - IndicesAdminClient adminClient = client.admin().indices(); - adminClient.create(request).actionGet(); - } - private static SearchRequest buildFeatureQuery(Map after) { QueryBuilder queryBuilder = new MatchAllQueryBuilder(); SearchRequest searchRequest = new SearchRequest(SOURCE_INDEX); - + List> sources = new ArrayList<>(); sources.add(new TermsValuesSourceBuilder("reviewerId").field("reviewerId")); - + CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder("feature", sources); compositeAggregation.size(1000); - + if (after != null) { compositeAggregation.aggregateAfter(after); } - + compositeAggregation.subAggregation(AggregationBuilders.avg("avg_rating").field("rating")); compositeAggregation.subAggregation(AggregationBuilders.cardinality("dc_vendors").field("vendorId")); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); @@ -174,21 +125,7 @@ private static SearchRequest buildFeatureQuery(Map after) { sourceBuilder.size(0); sourceBuilder.query(queryBuilder); searchRequest.source(sourceBuilder); - + return searchRequest; - } - - private static SearchResponse runQuery(Client client, Map after) throws InterruptedException, ExecutionException { - - SearchRequest request = buildFeatureQuery(after); - SearchResponse response = client.search(request).get(); - - return response; - } - - private static void indexResult() { - - - } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java index 16a4163e8135a..a1edfca2684a4 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java @@ -16,6 +16,8 @@ import org.elasticsearch.xpack.core.XPackPlugin; import java.io.IOException; +import java.util.Collections; +import java.util.Map; import java.util.Objects; public class FeatureIndexBuilderJob implements XPackPlugin.XPackPersistentTaskParams { @@ -92,4 +94,8 @@ public boolean equals(Object other) { public int hashCode() { return Objects.hash(config); } + + public Map getHeaders() { + return Collections.emptyMap(); + } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java new file mode 100644 index 0000000000000..fefb383f94b05 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; + +import java.util.Map; + +public class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTasksExecutor { + private final Client client; + private final SchedulerEngine schedulerEngine; + private final ThreadPool threadPool; + + public FeatureIndexBuilderJobPersistentTasksExecutor(Settings settings, Client client, SchedulerEngine schedulerEngine, + ThreadPool threadPool) { + super(settings, "xpack/feature_index_builder/job", FeatureIndexBuilder.TASK_THREAD_POOL_NAME); + this.client = client; + this.schedulerEngine = schedulerEngine; + this.threadPool = threadPool; + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, @Nullable FeatureIndexBuilderJob params, PersistentTaskState state) { + FeatureIndexBuilderJobTask buildTask = (FeatureIndexBuilderJobTask) task; + SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job( + FeatureIndexBuilderJobTask.SCHEDULE_NAME + "_" + params.getConfig().getId(), next()); + + // Note that while the task is added to the scheduler here, the internal state + // will prevent + // it from doing any work until the task is "started" via the StartJob api + schedulerEngine.register(buildTask); + schedulerEngine.add(schedulerJob); + + logger.info("FeatureIndexBuilder job [" + params.getConfig().getId() + "] created."); + } + + static SchedulerEngine.Schedule next() { + return (startTime, now) -> { + return now + 1000; // to be fixed, hardcode something + }; + } + + @Override + protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, + PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { + return new FeatureIndexBuilderJobTask(id, type, action, parentTaskId, persistentTask.getParams(), + (FeatureIndexBuilderJobState) persistentTask.getState(), client, schedulerEngine, threadPool, headers); + } +} \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java new file mode 100644 index 0000000000000..a7c9392800f09 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexing.IndexerJobStats; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class FeatureIndexBuilderJobStats extends IndexerJobStats { + private static ParseField NUM_PAGES = new ParseField("pages_processed"); + private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); + private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); + private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME.getPreferredName(), + args -> new FeatureIndexBuilderJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); + + static { + PARSER.declareLong(constructorArg(), NUM_PAGES); + PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); + } + + public FeatureIndexBuilderJobStats() { + super(); + } + + public FeatureIndexBuilderJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) { + super(numPages, numInputDocuments, numOuputDocuments, numInvocations); + } + + public FeatureIndexBuilderJobStats(StreamInput in) throws IOException { + super(in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NUM_PAGES.getPreferredName(), numPages); + builder.field(NUM_INPUT_DOCUMENTS.getPreferredName(), numInputDocuments); + builder.field(NUM_OUTPUT_DOCUMENTS.getPreferredName(), numOuputDocuments); + builder.field(NUM_INVOCATIONS.getPreferredName(), numInvocations); + builder.endObject(); + return builder; + } + + public static FeatureIndexBuilderJobStats fromXContent(XContentParser parser) { + try { + return PARSER.parse(parser, null); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java index a4de3927e5bc1..381e57e9027ca 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java @@ -8,81 +8,47 @@ import org.apache.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.persistent.AllocatedPersistentTask; -import org.elasticsearch.persistent.PersistentTaskState; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction.Response; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; public class FeatureIndexBuilderJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { private static final Logger logger = Logger.getLogger(FeatureIndexBuilderJobTask.class.getName()); + + private final FeatureIndexBuilderJob job; + private final ThreadPool threadPool; private final FeatureIndexBuilderIndexer indexer; static final String SCHEDULE_NAME = "xpack/feature_index_builder/job" + "/schedule"; - public static class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTasksExecutor { - private final Client client; - private final SchedulerEngine schedulerEngine; - private final ThreadPool threadPool; - - public FeatureIndexBuilderJobPersistentTasksExecutor(Settings settings, Client client, SchedulerEngine schedulerEngine, - ThreadPool threadPool) { - super(settings, "xpack/feature_index_builder/job", FeatureIndexBuilder.TASK_THREAD_POOL_NAME); - this.client = client; - this.schedulerEngine = schedulerEngine; - this.threadPool = threadPool; - } - - @Override - protected void nodeOperation(AllocatedPersistentTask task, @Nullable FeatureIndexBuilderJob params, PersistentTaskState state) { - FeatureIndexBuilderJobTask buildTask = (FeatureIndexBuilderJobTask) task; - SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job(SCHEDULE_NAME + "_" + params.getConfig().getId(), next()); - - // Note that while the task is added to the scheduler here, the internal state - // will prevent - // it from doing any work until the task is "started" via the StartJob api - schedulerEngine.register(buildTask); - schedulerEngine.add(schedulerJob); - - logger.info("FeatureIndexBuilder job [" + params.getConfig().getId() + "] created."); - } - - static SchedulerEngine.Schedule next() { - return (startTime, now) -> { - return now + 1000; // to be fixed, hardcode something - }; - } - - @Override - protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, - PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { - return new FeatureIndexBuilderJobTask(id, type, action, parentTaskId, persistentTask.getParams(), - (FeatureIndexBuilderJobState) persistentTask.getState(), client, schedulerEngine, threadPool, headers); - } - } - - private final FeatureIndexBuilderJob job; - public FeatureIndexBuilderJobTask(long id, String type, String action, TaskId parentTask, FeatureIndexBuilderJob job, FeatureIndexBuilderJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { super(id, type, action, "" + "_" + job.getConfig().getId(), parentTask, headers); this.job = job; + this.threadPool = threadPool; logger.info("construct job task"); // todo: simplistic implementation for now - this.indexer = new FeatureIndexBuilderIndexer(job, client); + IndexerState initialState = IndexerState.STOPPED; + Map initialPosition = null; + this.indexer = new ClientFeatureIndexBuilderIndexer(job, new AtomicReference<>(initialState), initialPosition, client); } public FeatureIndexBuilderJobConfig getConfig() { @@ -96,6 +62,66 @@ public synchronized void start(ActionListener listener) { @Override public void triggered(Event event) { + if (event.getJobName().equals(SCHEDULE_NAME + "_" + job.getConfig().getId())) { + logger.debug( + "FeatureIndexBuilder indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); + indexer.maybeTriggerAsyncJob(System.currentTimeMillis()); + } } + protected class ClientFeatureIndexBuilderIndexer extends FeatureIndexBuilderIndexer { + private final Client client; + + public ClientFeatureIndexBuilderIndexer(FeatureIndexBuilderJob job, AtomicReference initialState, + Map initialPosition, Client client) { + super(threadPool.executor(ThreadPool.Names.GENERIC), job, initialState, initialPosition); + this.client = client; + } + + @Override + protected void doNextSearch(SearchRequest request, ActionListener nextPhase) { + ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.ML_ORIGIN, client, SearchAction.INSTANCE, request, + nextPhase); + } + + @Override + protected void doNextBulk(BulkRequest request, ActionListener nextPhase) { + ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.ML_ORIGIN, client, BulkAction.INSTANCE, request, nextPhase); + } + + @Override + protected void doSaveState(IndexerState indexerState, Map position, Runnable next) { + if (indexerState.equals(IndexerState.ABORTING)) { + // If we're aborting, just invoke `next` (which is likely an onFailure handler) + next.run(); + } else { + // to be implemented + + final FeatureIndexBuilderJobState state = new FeatureIndexBuilderJobState(indexerState); + logger.info("Updating persistent state of job [" + job.getConfig().getId() + "] to [" + state.toString() + "]"); + + // TODO: we can not persist the state right now, need to be called from the task + updatePersistentTaskState(state, ActionListener.wrap(task -> next.run(), exc -> { + // We failed to update the persistent task for some reason, + // set our flag back to what it was before + next.run(); + })); + } + } + + @Override + protected void onFailure(Exception exc) { + logger.warn("FeatureIndexBuilder job [" + job.getConfig().getId() + "] failed with an exception: ", exc); + } + + @Override + protected void onFinish() { + logger.info("Finished indexing for job [" + job.getConfig().getId() + "]"); + } + + @Override + protected void onAbort() { + logger.info("FeatureIndexBuilder job [" + job.getConfig().getId() + "] received abort request, stopping indexer"); + } + } } From b13cca3b7d8b6b68e2579c4e338048a6118ad70d Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 12 Sep 2018 07:55:24 +0200 Subject: [PATCH 09/49] [ML-Dataframe] add stop and delete endpoints (#33597) add rest endpoints to stop and delete feature build indexer tasks and fix task saving/reloading --- .../FeatureIndexBuilder.java | 39 +++- .../DeleteFeatureIndexBuilderJobAction.java | 100 ++++++++++ .../StartFeatureIndexBuilderJobAction.java | 20 +- .../StopFeatureIndexBuilderJobAction.java | 180 ++++++++++++++++++ ...ortDeleteFeatureIndexBuilderJobAction.java | 105 ++++++++++ ...sportStopFeatureIndexBuilderJobAction.java | 84 ++++++++ .../job/FeatureIndexBuilderJob.java | 4 + .../job/FeatureIndexBuilderJobState.java | 45 ++++- .../job/FeatureIndexBuilderJobTask.java | 14 +- ...estDeleteFeatureIndexBuilderJobAction.java | 41 ++++ .../RestStopFeatureIndexBuilderJobAction.java | 39 ++++ ...tureIndexBuilderJobActionRequestTests.java | 22 +++ ...tureIndexBuilderJobActionRequestTests.java | 35 ++++ 13 files changed, 706 insertions(+), 22 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index 01537d493c3e4..e1e384b57415f 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -19,26 +19,36 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportDeleteFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStartFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStopFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobPersistentTasksExecutor; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobState; +import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestDeleteFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStartFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStopFeatureIndexBuilderJobAction; import java.time.Clock; import java.util.ArrayList; @@ -64,22 +74,30 @@ public class FeatureIndexBuilder extends Plugin implements ActionPlugin, Persist private final boolean enabled; private final Settings settings; + private final boolean transportClientMode; public FeatureIndexBuilder(Settings settings) { this.settings = settings; // todo: XPackSettings.FEATURE_INDEX_BUILDER_ENABLED.get(settings); this.enabled = true; + this.transportClientMode = XPackPlugin.transportClientMode(settings); } @Override public Collection createGuiceModules() { List modules = new ArrayList<>(); + if (transportClientMode) { + return modules; + } + modules.add(b -> XPackPlugin.bindFeatureSet(b, FeatureIndexBuilderFeatureSet.class)); return modules; } + protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } + @Override public List getRestHandlers(final Settings settings, final RestController restController, final ClusterSettings clusterSettings, final IndexScopedSettings indexScopedSettings, final SettingsFilter settingsFilter, @@ -91,7 +109,9 @@ public List getRestHandlers(final Settings settings, final RestCont return Arrays.asList( new RestPutFeatureIndexBuilderJobAction(settings, restController), - new RestStartFeatureIndexBuilderJobAction(settings, restController) + new RestStartFeatureIndexBuilderJobAction(settings, restController), + new RestStopFeatureIndexBuilderJobAction(settings, restController), + new RestDeleteFeatureIndexBuilderJobAction(settings, restController) ); } @@ -103,13 +123,15 @@ public List getRestHandlers(final Settings settings, final RestCont return Arrays.asList( new ActionHandler<>(PutFeatureIndexBuilderJobAction.INSTANCE, TransportPutFeatureIndexBuilderJobAction.class), - new ActionHandler<>(StartFeatureIndexBuilderJobAction.INSTANCE, TransportStartFeatureIndexBuilderJobAction.class) + new ActionHandler<>(StartFeatureIndexBuilderJobAction.INSTANCE, TransportStartFeatureIndexBuilderJobAction.class), + new ActionHandler<>(StopFeatureIndexBuilderJobAction.INSTANCE, TransportStopFeatureIndexBuilderJobAction.class), + new ActionHandler<>(DeleteFeatureIndexBuilderJobAction.INSTANCE, TransportDeleteFeatureIndexBuilderJobAction.class) ); } @Override public List> getExecutorBuilders(Settings settings) { - if (false == enabled) { + if (false == enabled || transportClientMode) { return emptyList(); } @@ -122,7 +144,7 @@ public List> getExecutorBuilders(Settings settings) { @Override public List> getPersistentTasksExecutor(ClusterService clusterService, ThreadPool threadPool, Client client) { - if (enabled == false) { + if (enabled == false || transportClientMode) { return emptyList(); } @@ -130,14 +152,19 @@ public List> getPersistentTasksExecutor(ClusterServic return Collections.singletonList(new FeatureIndexBuilderJobPersistentTasksExecutor(settings, client, schedulerEngine, threadPool)); } + @Override public List getNamedXContent() { if (enabled == false) { return emptyList(); } - return Collections.singletonList( + return Arrays.asList( new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField("xpack/feature_index_builder/job"), - FeatureIndexBuilderJob::fromXContent) + FeatureIndexBuilderJob::fromXContent), + new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(FeatureIndexBuilderJobState.NAME), + FeatureIndexBuilderJobState::fromXContent), + new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(FeatureIndexBuilderJobState.NAME), + FeatureIndexBuilderJobState::fromXContent) ); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..bdb4ef32a5d2c --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteFeatureIndexBuilderJobAction extends Action { + + public static final DeleteFeatureIndexBuilderJobAction INSTANCE = new DeleteFeatureIndexBuilderJobAction(); + public static final String NAME = "cluster:admin/xpack/feature_index_builder/delete"; + + private DeleteFeatureIndexBuilderJobAction() { + super(NAME); + } + + @Override + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + public static class Request extends AcknowledgedRequest implements ToXContent { + private String id; + + public Request(String id) { + this.id = ExceptionsHelper.requireNonNull(id, FeatureIndexBuilderJob.ID.getPreferredName()); + } + + public Request() { + } + + public String getId() { + return id; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(id, other.id); + } + } + + public static class RequestBuilder extends MasterNodeOperationRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, DeleteFeatureIndexBuilderJobAction action) { + super(client, action, new Request()); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java index 61312538fff99..6fcfdf4e8aa71 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java @@ -19,7 +19,8 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.core.rollup.RollupField; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + import java.io.IOException; import java.util.Collections; import java.util.Objects; @@ -42,7 +43,7 @@ public static class Request extends BaseTasksRequest implements ToXCont private String id; public Request(String id) { - this.id = ExceptionsHelper.requireNonNull(id, RollupField.ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, FeatureIndexBuilderJob.ID.getPreferredName()); } public Request() { @@ -71,7 +72,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(RollupField.ID.getPreferredName(), id); + builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); return builder; } @@ -92,7 +93,7 @@ public boolean equals(Object obj) { return Objects.equals(id, other.id); } } - + public static class RequestBuilder extends ActionRequestBuilder { protected RequestBuilder(ElasticsearchClient client, StartFeatureIndexBuilderJobAction action) { @@ -142,12 +143,15 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public boolean equals(Object o) { - if (this == o) + public boolean equals(Object obj) { + if (this == obj) { return true; - if (o == null || getClass() != o.getClass()) + } + + if (obj == null || getClass() != obj.getClass()) { return false; - Response response = (Response) o; + } + Response response = (Response) obj; return started == response.started; } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..49f3db0346246 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java @@ -0,0 +1,180 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +import java.io.IOException; +import java.util.Collections; +import java.util.Objects; + +public class StopFeatureIndexBuilderJobAction extends Action { + + public static final StopFeatureIndexBuilderJobAction INSTANCE = new StopFeatureIndexBuilderJobAction(); + public static final String NAME = "cluster:admin/xpack/feature_index_builder/stop"; + + private StopFeatureIndexBuilderJobAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends BaseTasksRequest implements ToXContent { + private String id; + + public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + + static { + PARSER.declareString(Request::setId, FeatureIndexBuilderJob.ID); + } + + public Request(String id) { + this.id = ExceptionsHelper.requireNonNull(id, FeatureIndexBuilderJob.ID.getPreferredName()); + } + + public Request() { + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(id, other.id); + } + + @Override + public boolean match(Task task) { + String expectedDescription = FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id; + + return task.getDescription().equals(expectedDescription); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, StopFeatureIndexBuilderJobAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { + + private boolean stopped; + + public Response() { + super(Collections.emptyList(), Collections.emptyList()); + } + + public Response(StreamInput in) throws IOException { + super(Collections.emptyList(), Collections.emptyList()); + readFrom(in); + } + + public Response(boolean stopped) { + super(Collections.emptyList(), Collections.emptyList()); + this.stopped = stopped; + } + + public boolean isStopped() { + return stopped; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + stopped = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(stopped); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("stopped", stopped); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Response response = (Response) o; + return stopped == response.stopped; + } + + @Override + public int hashCode() { + return Objects.hash(stopped); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..56c118ac00436 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +public class TransportDeleteFeatureIndexBuilderJobAction + extends TransportMasterNodeAction { + + private final PersistentTasksService persistentTasksService; + + @Inject + public TransportDeleteFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + PersistentTasksService persistentTasksService, ClusterService clusterService) { + super(settings, DeleteFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, DeleteFeatureIndexBuilderJobAction.Request::new); + this.persistentTasksService = persistentTasksService; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + @Override + protected void masterOperation(DeleteFeatureIndexBuilderJobAction.Request request, ClusterState state, + ActionListener listener) throws Exception { + + String jobId = request.getId(); + TimeValue timeout = new TimeValue(60, TimeUnit.SECONDS); // TODO make this a config option + + // Step 1. Cancel the persistent task + persistentTasksService.sendRemoveRequest(jobId, new ActionListener>() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { + logger.debug("Request to cancel Task for Feature Index Builder job [" + jobId + "] successful."); + + // Step 2. Wait for the task to finish cancellation internally + persistentTasksService.waitForPersistentTaskCondition(jobId, Objects::isNull, timeout, + new PersistentTasksService.WaitForPersistentTaskListener() { + @Override + public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { + logger.debug("Task for Feature Index Builder job [" + jobId + "] successfully canceled."); + listener.onResponse(new AcknowledgedResponse(true)); + } + + @Override + public void onFailure(Exception e) { + logger.error("Error while cancelling task for Feature Index Builder job [" + jobId + + "]." + e); + listener.onFailure(e); + } + + @Override + public void onTimeout(TimeValue timeout) { + String msg = "Stopping of Feature Index Builder job [" + jobId + "] timed out after [" + timeout + "]."; + logger.warn(msg); + listener.onFailure(new ElasticsearchException(msg)); + } + }); + } + + @Override + public void onFailure(Exception e) { + logger.error("Error while requesting to cancel task for Feature Index Builder job [" + jobId + "]" + e); + listener.onFailure(e); + } + }); + + } + + @Override + protected ClusterBlockException checkBlock(DeleteFeatureIndexBuilderJobAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..68a4fee37fa77 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; + +import java.io.IOException; +import java.util.List; + +public class TransportStopFeatureIndexBuilderJobAction extends TransportTasksAction { + + + @Inject + public TransportStopFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, + ActionFilters actionFilters, ClusterService clusterService) { + super(settings, StopFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, + StopFeatureIndexBuilderJobAction.Request::new, StopFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); + } + + @Override + protected void doExecute(Task task, StopFeatureIndexBuilderJobAction.Request request, ActionListener listener) { + super.doExecute(task, request, listener); + } + + @Override + protected void taskOperation(StopFeatureIndexBuilderJobAction.Request request, + FeatureIndexBuilderJobTask jobTask, + ActionListener listener) { + if (jobTask.getConfig().getId().equals(request.getId())) { + jobTask.stop(listener); + } else { + listener.onFailure(new RuntimeException("ID of feature index builder task [" + jobTask.getConfig().getId() + + "] does not match request's ID [" + request.getId() + "]")); + } + } + + @Override + protected StopFeatureIndexBuilderJobAction.Response newResponse(StopFeatureIndexBuilderJobAction.Request request, List tasks, + List taskOperationFailures, + List failedNodeExceptions) { + + if (taskOperationFailures.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(taskOperationFailures.get(0).getCause()); + } else if (failedNodeExceptions.isEmpty() == false) { + throw org.elasticsearch.ExceptionsHelper + .convertToElastic(failedNodeExceptions.get(0)); + } + + // Either the job doesn't exist (the user didn't create it yet) or was deleted after the Stop API executed. + // In either case, let the user know + if (tasks.size() == 0) { + throw new ResourceNotFoundException("Task for Feature Index Builder Job [" + request.getId() + "] not found"); + } + + assert tasks.size() == 1; + + boolean allStopped = tasks.stream().allMatch(StopFeatureIndexBuilderJobAction.Response::isStopped); + return new StopFeatureIndexBuilderJobAction.Response(allStopped); + } + + @Override + protected StopFeatureIndexBuilderJobAction.Response readTaskResponse(StreamInput in) throws IOException { + return new StopFeatureIndexBuilderJobAction.Response(in); + } + +} \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java index a1edfca2684a4..368a9e0897a05 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java @@ -22,8 +22,12 @@ public class FeatureIndexBuilderJob implements XPackPlugin.XPackPersistentTaskParams { + public static final ParseField ID = new ParseField("id"); public static final String NAME = "xpack/feature_index_builder/job"; + // note: this is used to match tasks + public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "feature_index_builder-"; + private FeatureIndexBuilderJobConfig config; private static final ParseField CONFIG = new ParseField("config"); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java index 5634910e6c529..580a3fe81ce30 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -18,18 +19,30 @@ import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.Objects; +import java.util.SortedMap; +import java.util.TreeMap; + import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskState { public static final String NAME = "xpack/feature_index_builder/job"; private final IndexerState state; + @Nullable + private final SortedMap currentPosition; + private static final ParseField STATE = new ParseField("job_state"); + private static final ParseField CURRENT_POSITION = new ParseField("current_position"); + @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - args -> new FeatureIndexBuilderJobState((IndexerState) args[0])); + args -> new FeatureIndexBuilderJobState((IndexerState) args[0], (HashMap) args[1])); static { PARSER.declareField(constructorArg(), p -> { @@ -37,21 +50,38 @@ public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskS return IndexerState.fromString(p.text()); } throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, STATE, ObjectParser.ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.START_OBJECT) { + return p.map(); + } + if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY); } - public FeatureIndexBuilderJobState(IndexerState state) { + public FeatureIndexBuilderJobState(IndexerState state, @Nullable Map position) { this.state = state; + this.currentPosition = Collections.unmodifiableSortedMap(position == null ? null : new TreeMap<>(position)); + } public FeatureIndexBuilderJobState(StreamInput in) throws IOException { state = IndexerState.fromStream(in); + currentPosition = in.readBoolean() ? Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())) : null; } public IndexerState getJobState() { return state; } + public Map getPosition() { + return currentPosition; + } + public static FeatureIndexBuilderJobState fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); @@ -64,6 +94,9 @@ public static FeatureIndexBuilderJobState fromXContent(XContentParser parser) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(STATE.getPreferredName(), state.value()); + if (currentPosition != null) { + builder.field(CURRENT_POSITION.getPreferredName(), currentPosition); + } builder.endObject(); return builder; } @@ -76,6 +109,10 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { state.writeTo(out); + out.writeBoolean(currentPosition != null); + if (currentPosition != null) { + out.writeMap(currentPosition); + } } @Override @@ -90,11 +127,11 @@ public boolean equals(Object other) { FeatureIndexBuilderJobState that = (FeatureIndexBuilderJobState) other; - return Objects.equals(this.state, that.state); + return Objects.equals(this.state, that.state) && Objects.equals(this.currentPosition, that.currentPosition); } @Override public int hashCode() { - return Objects.hash(state); + return Objects.hash(state, currentPosition); } } \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java index 381e57e9027ca..173b9125d33ee 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction.Response; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -41,7 +42,7 @@ public class FeatureIndexBuilderJobTask extends AllocatedPersistentTask implemen public FeatureIndexBuilderJobTask(long id, String type, String action, TaskId parentTask, FeatureIndexBuilderJob job, FeatureIndexBuilderJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { - super(id, type, action, "" + "_" + job.getConfig().getId(), parentTask, headers); + super(id, type, action, FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getConfig().getId(), parentTask, headers); this.job = job; this.threadPool = threadPool; logger.info("construct job task"); @@ -56,10 +57,17 @@ public FeatureIndexBuilderJobConfig getConfig() { } public synchronized void start(ActionListener listener) { + // TODO: safeguards missing, see rollup code indexer.start(); listener.onResponse(new StartFeatureIndexBuilderJobAction.Response(true)); } + public void stop(ActionListener listener) { + // TODO: safeguards missing, see rollup code + indexer.stop(); + listener.onResponse(new StopFeatureIndexBuilderJobAction.Response(true)); + } + @Override public void triggered(Event event) { if (event.getJobName().equals(SCHEDULE_NAME + "_" + job.getConfig().getId())) { @@ -95,9 +103,7 @@ protected void doSaveState(IndexerState indexerState, Map positi // If we're aborting, just invoke `next` (which is likely an onFailure handler) next.run(); } else { - // to be implemented - - final FeatureIndexBuilderJobState state = new FeatureIndexBuilderJobState(indexerState); + final FeatureIndexBuilderJobState state = new FeatureIndexBuilderJobState(indexerState, getPosition()); logger.info("Updating persistent state of job [" + job.getConfig().getId() + "] to [" + state.toString() + "]"); // TODO: we can not persist the state right now, need to be called from the task diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..d4f0b542d4896 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; + + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; + +import java.io.IOException; + +public class RestDeleteFeatureIndexBuilderJobAction extends BaseRestHandler { + public static final ParseField ID = new ParseField("id"); + + public RestDeleteFeatureIndexBuilderJobAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.DELETE, FeatureIndexBuilder.BASE_PATH + "job/{id}/", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String id = restRequest.param(ID.getPreferredName()); + DeleteFeatureIndexBuilderJobAction.Request request = new DeleteFeatureIndexBuilderJobAction.Request(id); + + return channel -> client.execute(DeleteFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "feature_index_builder_delete_job_action"; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java new file mode 100644 index 0000000000000..f20ca65c766bf --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +import java.io.IOException; + +public class RestStopFeatureIndexBuilderJobAction extends BaseRestHandler { + + public RestStopFeatureIndexBuilderJobAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH + "job/{id}/_stop", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); + StopFeatureIndexBuilderJobAction.Request request = new StopFeatureIndexBuilderJobAction.Request(id); + + return channel -> client.execute(StopFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "feature_index_builder_stop_job_action"; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java new file mode 100644 index 0000000000000..81eda8351f614 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction.Request; + +public class DeleteFeatureIndexBuilderJobActionRequestTests extends AbstractStreamableTestCase { + @Override + protected Request createTestInstance() { + return new Request(randomAlphaOfLengthBetween(1, 20)); + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java new file mode 100644 index 0000000000000..89c61a021f2b2 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction.Request; +import java.io.IOException; + +public class StopFeatureIndexBuilderJobActionRequestTests extends AbstractStreamableXContentTestCase { + + @Override + protected Request createTestInstance() { + return new Request(randomAlphaOfLengthBetween(1, 10)); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected Request doParseInstance(XContentParser parser) throws IOException { + return Request.PARSER.parse(parser, null); + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } +} From f3de9fb74f6d37ec7a255aff0f980564fe9f9fa0 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 12 Sep 2018 13:51:41 +0200 Subject: [PATCH 10/49] adapt to upstream changes --- .../ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index 8ff748ad265d0..998a295f1297e 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -18,7 +18,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.indexing.IndexerState; From 28e426a778dbd04da3231b3fa53143545a96f1ab Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 25 Sep 2018 17:48:50 +0200 Subject: [PATCH 11/49] [ML-Dataframe] add basic configuration (#33813) add basic configuration for feature index builder job, reads the source, destination index, aggregation source and aggregation config --- ...nsportPutFeatureIndexBuilderJobAction.java | 8 +- ...sportStopFeatureIndexBuilderJobAction.java | 35 +++--- .../job/AggregationConfig.java | 76 ++++++++++++ .../job/FeatureIndexBuilderIndexer.java | 46 +++---- .../job/FeatureIndexBuilderJobConfig.java | 74 +++++++++++- .../job/FeatureIndexBuilderJobState.java | 1 - .../featureindexbuilder/job/SourceConfig.java | 112 ++++++++++++++++++ ...tureIndexBuilderJobActionRequestTests.java | 3 +- ...erializingFeatureIndexBuilderTestCase.java | 47 ++++++++ .../job/AggregationConfigTests.java | 65 ++++++++++ .../FeatureIndexBuilderJobConfigTests.java | 49 ++++++++ .../job/SourceConfigTests.java | 59 +++++++++ 12 files changed, 516 insertions(+), 59 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfigTests.java diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index 37252bf82a410..7e28f68a23eda 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -37,9 +37,6 @@ public class TransportPutFeatureIndexBuilderJobAction extends TransportMasterNodeAction { - // TODO: hack, to be replaced - private static final String PIVOT_INDEX = "pivot-reviews"; - private final XPackLicenseState licenseState; private final PersistentTasksService persistentTasksService; private final Client client; @@ -76,7 +73,7 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); FeatureIndexBuilderJob job = createFeatureIndexBuilderJob(request.getConfig(), threadPool); - createIndex(client, job.getConfig().getId()); + createIndex(client, job.getConfig().getDestinationIndex()); startPersistentTask(job, listener, persistentTasksService); } @@ -105,9 +102,8 @@ protected ClusterBlockException checkBlock(PutFeatureIndexBuilderJobAction.Reque * * TODO: everything below will be replaced with proper implementation read from job configuration */ - private static void createIndex(Client client, String suffix) { + private static void createIndex(Client client, String indexName) { - String indexName = PIVOT_INDEX + "_" + suffix; CreateIndexRequest request = new CreateIndexRequest(indexName); request.settings(Settings.builder() // <1> diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java index 68a4fee37fa77..938e3d71a4f9c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ml.featureindexbuilder.action; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; @@ -23,26 +24,26 @@ import java.io.IOException; import java.util.List; -public class TransportStopFeatureIndexBuilderJobAction extends TransportTasksAction { - +public class TransportStopFeatureIndexBuilderJobAction extends + TransportTasksAction { @Inject - public TransportStopFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, - ActionFilters actionFilters, ClusterService clusterService) { + public TransportStopFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService) { super(settings, StopFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, StopFeatureIndexBuilderJobAction.Request::new, StopFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); } @Override - protected void doExecute(Task task, StopFeatureIndexBuilderJobAction.Request request, ActionListener listener) { + protected void doExecute(Task task, StopFeatureIndexBuilderJobAction.Request request, + ActionListener listener) { super.doExecute(task, request, listener); } @Override - protected void taskOperation(StopFeatureIndexBuilderJobAction.Request request, - FeatureIndexBuilderJobTask jobTask, - ActionListener listener) { + protected void taskOperation(StopFeatureIndexBuilderJobAction.Request request, FeatureIndexBuilderJobTask jobTask, + ActionListener listener) { if (jobTask.getConfig().getId().equals(request.getId())) { jobTask.stop(listener); } else { @@ -52,19 +53,18 @@ protected void taskOperation(StopFeatureIndexBuilderJobAction.Request request, } @Override - protected StopFeatureIndexBuilderJobAction.Response newResponse(StopFeatureIndexBuilderJobAction.Request request, List tasks, - List taskOperationFailures, - List failedNodeExceptions) { + protected StopFeatureIndexBuilderJobAction.Response newResponse(StopFeatureIndexBuilderJobAction.Request request, + List tasks, List taskOperationFailures, + List failedNodeExceptions) { if (taskOperationFailures.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(taskOperationFailures.get(0).getCause()); + throw ExceptionsHelper.convertToElastic(taskOperationFailures.get(0).getCause()); } else if (failedNodeExceptions.isEmpty() == false) { - throw org.elasticsearch.ExceptionsHelper - .convertToElastic(failedNodeExceptions.get(0)); + throw ExceptionsHelper.convertToElastic(failedNodeExceptions.get(0)); } - // Either the job doesn't exist (the user didn't create it yet) or was deleted after the Stop API executed. + // Either the job doesn't exist (the user didn't create it yet) or was deleted + // after the Stop API executed. // In either case, let the user know if (tasks.size() == 0) { throw new ResourceNotFoundException("Task for Feature Index Builder Job [" + request.getId() + "] not found"); @@ -80,5 +80,4 @@ protected StopFeatureIndexBuilderJobAction.Response newResponse(StopFeatureIndex protected StopFeatureIndexBuilderJobAction.Response readTaskResponse(StreamInput in) throws IOException { return new StopFeatureIndexBuilderJobAction.Response(in); } - } \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java new file mode 100644 index 0000000000000..d124453c07893 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/* + * Wrapper for the aggregations config part of a composite aggregation. + * + * For now just wraps aggregations from composite aggs. + * + */ +public class AggregationConfig implements Writeable, ToXContentObject { + + private final AggregatorFactories.Builder aggregatorFactoryBuilder; + + public AggregationConfig(AggregatorFactories.Builder aggregatorFactoryBuilder) { + this.aggregatorFactoryBuilder = aggregatorFactoryBuilder; + } + + AggregationConfig(final StreamInput in) throws IOException { + aggregatorFactoryBuilder = new AggregatorFactories.Builder(in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return aggregatorFactoryBuilder.toXContent(builder, params); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + aggregatorFactoryBuilder.writeTo(out); + } + + public List getAggregatorFactories() { + return aggregatorFactoryBuilder.getAggregatorFactories(); + } + + public static AggregationConfig fromXContent(final XContentParser parser) throws IOException { + return new AggregationConfig(AggregatorFactories.parseAggregators(parser)); + } + + @Override + public int hashCode() { + return Objects.hash(aggregatorFactoryBuilder); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final AggregationConfig that = (AggregationConfig) other; + + return Objects.equals(this.aggregatorFactoryBuilder, that.aggregatorFactoryBuilder); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index 998a295f1297e..015b73c732eaf 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -13,12 +13,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.indexing.IndexerState; @@ -26,7 +25,6 @@ import java.io.IOException; import java.io.UncheckedIOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; @@ -37,8 +35,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, FeatureIndexBuilderJobStats> { - private static final String PIVOT_INDEX = "pivot-reviews"; - private static final String SOURCE_INDEX = "anonreviews"; private static final Logger logger = Logger.getLogger(FeatureIndexBuilderIndexer.class.getName()); private FeatureIndexBuilderJob job; @@ -71,21 +67,24 @@ protected IterationResult> doProcess(SearchResponse searchRe * TODO: replace with proper implementation */ private List processBuckets(CompositeAggregation agg) { + // for now only 1 source supported + String destinationFieldName = job.getConfig().getSourceConfig().getSources().get(0).name(); + String aggName = job.getConfig().getAggregationConfig().getAggregatorFactories().get(0).getName(); + return agg.getBuckets().stream().map(b -> { - InternalAvg avgAgg = b.getAggregations().get("avg_rating"); + NumericMetricsAggregation.SingleValue aggResult = b.getAggregations().get(aggName); XContentBuilder builder; try { builder = jsonBuilder(); - builder.startObject(); - builder.field("reviewerId", b.getKey().get("reviewerId")); - builder.field("avg_rating", avgAgg.getValue()); + builder.field(destinationFieldName, b.getKey().get(destinationFieldName)); + builder.field(aggName, aggResult.value()); builder.endObject(); } catch (IOException e) { throw new UncheckedIOException(e); } - String indexName = PIVOT_INDEX + "_" + job.getConfig().getId(); + String indexName = job.getConfig().getDestinationIndex(); IndexRequest request = new IndexRequest(indexName, DOC_TYPE).source(builder); return request; }).collect(Collectors.toList()); @@ -93,33 +92,24 @@ private List processBuckets(CompositeAggregation agg) { @Override protected SearchRequest buildSearchRequest() { - final Map position = getPosition(); - SearchRequest request = buildFeatureQuery(position); - return request; - } - /* - * Mocked demo case - * - * TODO: everything below will be replaced with proper implementation read from job configuration - */ - private static SearchRequest buildFeatureQuery(Map after) { QueryBuilder queryBuilder = new MatchAllQueryBuilder(); - SearchRequest searchRequest = new SearchRequest(SOURCE_INDEX); + SearchRequest searchRequest = new SearchRequest(job.getConfig().getIndexPattern()); - List> sources = new ArrayList<>(); - sources.add(new TermsValuesSourceBuilder("reviewerId").field("reviewerId")); + List> sources = job.getConfig().getSourceConfig().getSources(); CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder("feature", sources); compositeAggregation.size(1000); - if (after != null) { - compositeAggregation.aggregateAfter(after); + if (position != null) { + compositeAggregation.aggregateAfter(position); + } + + for (AggregationBuilder agg : job.getConfig().getAggregationConfig().getAggregatorFactories()) { + compositeAggregation.subAggregation(agg); } - compositeAggregation.subAggregation(AggregationBuilders.avg("avg_rating").field("rating")); - compositeAggregation.subAggregation(AggregationBuilders.cardinality("dc_vendors").field("vendorId")); SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); sourceBuilder.aggregation(compositeAggregation); sourceBuilder.size(0); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java index 645a97ab8d928..e3a1035b24fc5 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java @@ -16,9 +16,11 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** @@ -28,25 +30,55 @@ public class FeatureIndexBuilderJobConfig implements NamedWriteable, ToXContentO private static final String NAME = "xpack/feature_index_builder/jobconfig"; private static final ParseField ID = new ParseField("id"); + private static final ParseField INDEX_PATTERN = new ParseField("index_pattern"); + private static final ParseField DESTINATION_INDEX = new ParseField("destination_index"); + private static final ParseField SOURCES = new ParseField("sources"); + private static final ParseField AGGREGATIONS = new ParseField("aggregations"); private final String id; + private final String indexPattern; + private final String destinationIndex; + private final SourceConfig sourceConfig; + private final AggregationConfig aggregationConfig; private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, (args, optionalId) -> { String id = args[0] != null ? (String) args[0] : optionalId; - return new FeatureIndexBuilderJobConfig(id); + String indexPattern = (String) args[1]; + String destinationIndex = (String) args[2]; + SourceConfig sourceConfig= (SourceConfig) args[3]; + AggregationConfig aggregationConfig = (AggregationConfig) args[4]; + return new FeatureIndexBuilderJobConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); }); static { PARSER.declareString(optionalConstructorArg(), ID); + PARSER.declareString(constructorArg(), INDEX_PATTERN); + PARSER.declareString(constructorArg(), DESTINATION_INDEX); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.fromXContent(p), SOURCES); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); } - public FeatureIndexBuilderJobConfig(final String id) { - this.id = id; + public FeatureIndexBuilderJobConfig(final String id, + final String indexPattern, + final String destinationIndex, + final SourceConfig sourceConfig, + final AggregationConfig aggregationConfig) { + this.id = ExceptionsHelper.requireNonNull(id, ID.getPreferredName()); + this.indexPattern = ExceptionsHelper.requireNonNull(indexPattern, INDEX_PATTERN.getPreferredName()); + this.destinationIndex = ExceptionsHelper.requireNonNull(destinationIndex, DESTINATION_INDEX.getPreferredName()); + + // TODO: check for null? + this.sourceConfig = sourceConfig; + this.aggregationConfig = aggregationConfig; } public FeatureIndexBuilderJobConfig(final StreamInput in) throws IOException { id = in.readString(); + indexPattern = in.readString(); + destinationIndex = in.readString(); + sourceConfig = in.readOptionalWriteable(SourceConfig::new); + aggregationConfig = in.readOptionalWriteable(AggregationConfig::new); } public String getId() { @@ -57,13 +89,41 @@ public String getCron() { return "*"; } + public String getIndexPattern() { + return indexPattern; + } + + public String getDestinationIndex() { + return destinationIndex; + } + + public SourceConfig getSourceConfig() { + return sourceConfig; + } + + public AggregationConfig getAggregationConfig() { + return aggregationConfig; + } + public void writeTo(final StreamOutput out) throws IOException { out.writeString(id); + out.writeString(indexPattern); + out.writeString(destinationIndex); + out.writeOptionalWriteable(sourceConfig); + out.writeOptionalWriteable(aggregationConfig); } public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); builder.field(ID.getPreferredName(), id); + builder.field(INDEX_PATTERN.getPreferredName(), indexPattern); + builder.field(DESTINATION_INDEX.getPreferredName(), destinationIndex); + if (sourceConfig != null) { + builder.field(SOURCES.getPreferredName(), sourceConfig); + } + if (aggregationConfig!=null) { + builder.field(AGGREGATIONS.getPreferredName(), aggregationConfig); + } builder.endObject(); return builder; } @@ -85,12 +145,16 @@ public boolean equals(Object other) { final FeatureIndexBuilderJobConfig that = (FeatureIndexBuilderJobConfig) other; - return Objects.equals(this.id, that.id); + return Objects.equals(this.id, that.id) + && Objects.equals(this.indexPattern, that.indexPattern) + && Objects.equals(this.destinationIndex, that.destinationIndex) + && Objects.equals(this.sourceConfig, that.sourceConfig) + && Objects.equals(this.aggregationConfig, that.aggregationConfig); } @Override public int hashCode() { - return Objects.hash(id); + return Objects.hash(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java index 580a3fe81ce30..49e387d6d98dc 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java @@ -66,7 +66,6 @@ public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskS public FeatureIndexBuilderJobState(IndexerState state, @Nullable Map position) { this.state = state; this.currentPosition = Collections.unmodifiableSortedMap(position == null ? null : new TreeMap<>(position)); - } public FeatureIndexBuilderJobState(StreamInput in) throws IOException { diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java new file mode 100644 index 0000000000000..42e5634fc4bc4 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceParserHelper; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/* + * Wrapper for the Source config part of a composite aggregation. + * + * For now just wraps sources from composite aggs. + */ +public class SourceConfig implements Writeable, ToXContentObject { + + private static final String NAME = "feature_index_builder_source"; + + private final List> sources; + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, (args) -> { + @SuppressWarnings("unchecked") + List> sources = (List>) args[0]; + return new SourceConfig(sources); + }); + + static { + PARSER.declareFieldArray(constructorArg(), (parser, builder) -> CompositeValuesSourceParserHelper.fromXContent(parser), + CompositeAggregationBuilder.SOURCES_FIELD_NAME, ObjectParser.ValueType.OBJECT_ARRAY); + } + + SourceConfig(final StreamInput in) throws IOException { + int num = in.readVInt(); + List> sources = new ArrayList<>(num); + for (int i = 0; i < num; i++) { + CompositeValuesSourceBuilder builder = CompositeValuesSourceParserHelper.readFrom(in); + getSources().add(builder); + } + this.sources = Collections.unmodifiableList(sources); + } + + public SourceConfig(List> sources) { + this.sources = Collections.unmodifiableList(sources); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(CompositeAggregationBuilder.SOURCES_FIELD_NAME.getPreferredName()); + for (CompositeValuesSourceBuilder source : getSources()) { + CompositeValuesSourceParserHelper.toXContent(source, builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(getSources().size()); + for (CompositeValuesSourceBuilder builder : getSources()) { + CompositeValuesSourceParserHelper.writeTo(builder, out); + } + } + + @Override + public int hashCode() { + return Objects.hash(getSources()); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final SourceConfig that = (SourceConfig) other; + + return Objects.equals(this.getSources(), that.getSources()); + } + + public static SourceConfig fromXContent(final XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + public List> getSources() { + return sources; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java index 14bedcbe909b1..3efed4c21e801 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Request; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfigTests; import org.junit.Before; import java.io.IOException; @@ -40,7 +41,7 @@ protected boolean supportsUnknownFields() { @Override protected Request createTestInstance() { - FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(randomAlphaOfLengthBetween(1,10)); + FeatureIndexBuilderJobConfig config = FeatureIndexBuilderJobConfigTests.randomFeatureIndexBuilderJobConfig(); return new Request(config); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java new file mode 100644 index 0000000000000..c2ed9e3d4e281 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.junit.Before; + +import static java.util.Collections.emptyList; + +public abstract class AbstractSerializingFeatureIndexBuilderTestCase + extends AbstractSerializingTestCase { + + /** + * Test case that ensure aggregation named objects are registered + */ + + private NamedWriteableRegistry namedWriteableRegistry; + private NamedXContentRegistry namedXContentRegistry; + + @Before + public void registerAggregationNamedObjects() throws Exception { + // register aggregations as NamedWriteable + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); + namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); + namedXContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return namedWriteableRegistry; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return namedXContentRegistry; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java new file mode 100644 index 0000000000000..a46d598d8897b --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +// broken upstream +@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33942") +public class AggregationConfigTests extends AbstractSerializingFeatureIndexBuilderTestCase { + + public static AggregationConfig randomAggregationConfig() { + AggregatorFactories.Builder builder = new AggregatorFactories.Builder(); + + for (int i = 0; i < randomIntBetween(1, 20); ++i) { + builder.addAggregator(getRandomSupportedAggregation()); + } + + return new AggregationConfig(builder); + } + + @Override + protected AggregationConfig doParseInstance(XContentParser parser) throws IOException { + // parseAggregators expects to be already inside the xcontent object + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + return AggregationConfig.fromXContent(parser); + } + + @Override + protected AggregationConfig createTestInstance() { + return randomAggregationConfig(); + } + + @Override + protected Reader instanceReader() { + return AggregationConfig::new; + } + + private static AggregationBuilder getRandomSupportedAggregation() { + final int numberOfSupportedAggs = 4; + switch (randomIntBetween(1, numberOfSupportedAggs)) { + case 1: + return AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)); + case 2: + return AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)); + case 3: + return AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)); + case 4: + return AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)); + } + + return null; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java new file mode 100644 index 0000000000000..847aa93e8ee31 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.junit.Before; + +import java.io.IOException; + +public class FeatureIndexBuilderJobConfigTests extends AbstractSerializingFeatureIndexBuilderTestCase { + + private String jobId; + + public static FeatureIndexBuilderJobConfig randomFeatureIndexBuilderJobConfig() { + // AggregationConfig disabled, see: https://github.com/elastic/elasticsearch/pull/33942 + return new FeatureIndexBuilderJobConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10), SourceConfigTests.randomSourceConfig(), + null /* AggregationConfigTests.randonAggregationConfig() */); + } + + @Before + public void setUpOptionalId() { + jobId = randomAlphaOfLengthBetween(1, 10); + } + + @Override + protected FeatureIndexBuilderJobConfig doParseInstance(XContentParser parser) throws IOException { + if (randomBoolean()) { + return FeatureIndexBuilderJobConfig.fromXContent(parser, jobId); + } else { + return FeatureIndexBuilderJobConfig.fromXContent(parser, null); + } + } + + @Override + protected FeatureIndexBuilderJobConfig createTestInstance() { + return randomFeatureIndexBuilderJobConfig(); + } + + @Override + protected Reader instanceReader() { + return FeatureIndexBuilderJobConfig::new; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfigTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfigTests.java new file mode 100644 index 0000000000000..e94754c825d27 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfigTests.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class SourceConfigTests extends AbstractSerializingFeatureIndexBuilderTestCase { + + public static SourceConfig randomSourceConfig() { + int numSources = randomIntBetween(1, 10); + List> sources = new ArrayList<>(); + for (int i = 0; i < numSources; i++) { + sources.add(randomTermsSourceBuilder()); + } + return new SourceConfig(sources); + } + + @Override + protected SourceConfig doParseInstance(XContentParser parser) throws IOException { + return SourceConfig.fromXContent(parser); + } + + @Override + protected SourceConfig createTestInstance() { + return randomSourceConfig(); + } + + @Override + protected Reader instanceReader() { + return SourceConfig::new; + } + + private static TermsValuesSourceBuilder randomTermsSourceBuilder() { + TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10)); + if (randomBoolean()) { + terms.field(randomAlphaOfLengthBetween(1, 20)); + } else { + terms.script(new Script(randomAlphaOfLengthBetween(10, 20))); + } + terms.order(randomFrom(SortOrder.values())); + if (randomBoolean()) { + terms.missingBucket(true); + } + return terms; + } +} From b1802f1c97834e665b9b43b321e7de0b63eb1c38 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 10 Oct 2018 08:42:10 +0200 Subject: [PATCH 12/49] adapt to upstream changes --- .../xpack/ml/featureindexbuilder/job/AggregationConfig.java | 4 ++-- .../featureindexbuilder/job/FeatureIndexBuilderIndexer.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java index d124453c07893..5e20d4d258997 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java @@ -15,7 +15,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import java.io.IOException; -import java.util.List; +import java.util.Collection; import java.util.Objects; /* @@ -46,7 +46,7 @@ public void writeTo(StreamOutput out) throws IOException { aggregatorFactoryBuilder.writeTo(out); } - public List getAggregatorFactories() { + public Collection getAggregatorFactories() { return aggregatorFactoryBuilder.getAggregatorFactories(); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index 015b73c732eaf..8718eba3d7a1c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -69,7 +69,7 @@ protected IterationResult> doProcess(SearchResponse searchRe private List processBuckets(CompositeAggregation agg) { // for now only 1 source supported String destinationFieldName = job.getConfig().getSourceConfig().getSources().get(0).name(); - String aggName = job.getConfig().getAggregationConfig().getAggregatorFactories().get(0).getName(); + String aggName = job.getConfig().getAggregationConfig().getAggregatorFactories().iterator().next().getName(); return agg.getBuckets().stream().map(b -> { NumericMetricsAggregation.SingleValue aggResult = b.getAggregations().get(aggName); From f477852272e69d12bbb8b614216f4e9ea5fa0c08 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 16 Oct 2018 08:30:26 +0200 Subject: [PATCH 13/49] [ML-Dataframe] Feature/fib reenable agg config tests (#34453) re-enables tests after upstream fixes, see #34005 --- .../featureindexbuilder/job/SourceConfig.java | 2 +- ...tureIndexBuilderJobActionRequestTests.java | 31 +++++++++++++++++-- .../job/AggregationConfigTests.java | 14 ++++++--- .../FeatureIndexBuilderJobConfigTests.java | 3 +- 4 files changed, 40 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java index 42e5634fc4bc4..65b462ba6c367 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java @@ -53,7 +53,7 @@ public class SourceConfig implements Writeable, ToXContentObject { List> sources = new ArrayList<>(num); for (int i = 0; i < num; i++) { CompositeValuesSourceBuilder builder = CompositeValuesSourceParserHelper.readFrom(in); - getSources().add(builder); + sources.add(builder); } this.sources = Collections.unmodifiableList(sources); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java index 3efed4c21e801..6e947cc823dbe 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java @@ -6,7 +6,11 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.action; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Request; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; @@ -15,18 +19,41 @@ import java.io.IOException; +import static java.util.Collections.emptyList; + public class PutFeatureIndexBuilderJobActionRequestTests extends AbstractStreamableXContentTestCase { private String jobId; + private NamedWriteableRegistry namedWriteableRegistry; + private NamedXContentRegistry namedXContentRegistry; + + @Before + public void registerAggregationNamedObjects() throws Exception { + // register aggregations as NamedWriteable + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); + namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); + namedXContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return namedWriteableRegistry; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return namedXContentRegistry; + } + @Before public void setupJobID() { - jobId = randomAlphaOfLengthBetween(1,10); + jobId = randomAlphaOfLengthBetween(1, 10); } @Override protected Request doParseInstance(XContentParser parser) throws IOException { - return Request.fromXContent(parser, jobId); + return Request.fromXContent(parser, jobId); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java index a46d598d8897b..96fbb0142e08e 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java @@ -6,25 +6,29 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import java.io.IOException; +import java.util.HashSet; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; -// broken upstream -@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33942") public class AggregationConfigTests extends AbstractSerializingFeatureIndexBuilderTestCase { public static AggregationConfig randomAggregationConfig() { AggregatorFactories.Builder builder = new AggregatorFactories.Builder(); - for (int i = 0; i < randomIntBetween(1, 20); ++i) { - builder.addAggregator(getRandomSupportedAggregation()); + // ensure that the unlikely does not happen: 2 aggs share the same name + Set names = new HashSet<>(); + for (int i = 1; i < randomIntBetween(1, 20); ++i) { + AggregationBuilder aggBuilder = getRandomSupportedAggregation(); + if (names.add(aggBuilder.getName())) { + builder.addAggregator(aggBuilder); + } } return new AggregationConfig(builder); diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java index 847aa93e8ee31..a16b51aadcf09 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java @@ -17,10 +17,9 @@ public class FeatureIndexBuilderJobConfigTests extends AbstractSerializingFeatur private String jobId; public static FeatureIndexBuilderJobConfig randomFeatureIndexBuilderJobConfig() { - // AggregationConfig disabled, see: https://github.com/elastic/elasticsearch/pull/33942 return new FeatureIndexBuilderJobConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), SourceConfigTests.randomSourceConfig(), - null /* AggregationConfigTests.randonAggregationConfig() */); + AggregationConfigTests.randomAggregationConfig()); } @Before From ad8cc92b1d959302e7a7f2e1d29c9aaf1657b12c Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 19 Oct 2018 07:47:33 +0200 Subject: [PATCH 14/49] [ML-Dataframe] Feature/fib multi aggs and sources (#34525) implement support for multiple sources and aggregations --- .../job/AggregationResultUtils.java | 62 ++++ .../job/FeatureIndexBuilderIndexer.java | 32 +- .../job/AggregationResultUtilsTests.java | 295 ++++++++++++++++++ 3 files changed, 374 insertions(+), 15 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java new file mode 100644 index 0000000000000..b42df5ed29f37 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.apache.log4j.Logger; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; + +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +final class AggregationResultUtils { + private static final Logger logger = Logger.getLogger(AggregationResultUtils.class.getName()); + + /** + * Extracts aggregation results from a composite aggregation and puts it into a map. + * + * @param agg The aggregation result + * @param sources The original sources used for querying + * @param aggregationBuilders the aggregation used for querying + * @return a map containing the results of the aggregation in a consumable way + */ + public static Stream> extractCompositeAggregationResults(CompositeAggregation agg, + List> sources, Collection aggregationBuilders) { + return agg.getBuckets().stream().map(bucket -> { + Map document = new HashMap<>(); + for (CompositeValuesSourceBuilder source : sources) { + String destinationFieldName = source.name(); + document.put(destinationFieldName, bucket.getKey().get(destinationFieldName)); + } + for (AggregationBuilder aggregationBuilder : aggregationBuilders) { + String aggName = aggregationBuilder.getName(); + + // TODO: support other aggregation types + Aggregation aggResult = bucket.getAggregations().get(aggName); + + if (aggResult instanceof NumericMetricsAggregation.SingleValue) { + NumericMetricsAggregation.SingleValue aggResultSingleValue = (SingleValue) aggResult; + document.put(aggName, aggResultSingleValue.value()); + } else { + // Execution should never reach this point! + // Creating jobs with unsupported aggregations shall not be possible + logger.error("Dataframe Internal Error: unsupported aggregation ["+ aggResult.getName() +"], ignoring"); + assert false; + } + } + return document; + }); + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index 8718eba3d7a1c..1fc22c828042e 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.indexing.IndexerState; @@ -25,14 +24,16 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; +import java.util.stream.Stream; -import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, FeatureIndexBuilderJobStats> { @@ -58,36 +59,37 @@ protected void onStartJob(long now) { @Override protected IterationResult> doProcess(SearchResponse searchResponse) { final CompositeAggregation agg = searchResponse.getAggregations().get("feature"); - return new IterationResult<>(processBuckets(agg), agg.afterKey(), agg.getBuckets().isEmpty()); + return new IterationResult<>(processBucketsToIndexRequests(agg).collect(Collectors.toList()), agg.afterKey(), + agg.getBuckets().isEmpty()); } /* - * Mocked demo case + * Parses the result and creates a stream of indexable documents * - * TODO: replace with proper implementation + * Implementation decisions: + * + * Extraction uses generic maps as intermediate exchange format in order to hook in ingest pipelines/processors + * in later versions, see {@link IngestDocument). */ - private List processBuckets(CompositeAggregation agg) { - // for now only 1 source supported - String destinationFieldName = job.getConfig().getSourceConfig().getSources().get(0).name(); - String aggName = job.getConfig().getAggregationConfig().getAggregatorFactories().iterator().next().getName(); + private Stream processBucketsToIndexRequests(CompositeAggregation agg) { + String indexName = job.getConfig().getDestinationIndex(); + List> sources = job.getConfig().getSourceConfig().getSources(); + Collection aggregationBuilders = job.getConfig().getAggregationConfig().getAggregatorFactories(); - return agg.getBuckets().stream().map(b -> { - NumericMetricsAggregation.SingleValue aggResult = b.getAggregations().get(aggName); + return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders).map(document -> { XContentBuilder builder; try { builder = jsonBuilder(); builder.startObject(); - builder.field(destinationFieldName, b.getKey().get(destinationFieldName)); - builder.field(aggName, aggResult.value()); + builder.map(document); builder.endObject(); } catch (IOException e) { throw new UncheckedIOException(e); } - String indexName = job.getConfig().getDestinationIndex(); IndexRequest request = new IndexRequest(indexName, DOC_TYPE).source(builder); return request; - }).collect(Collectors.toList()); + }); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java new file mode 100644 index 0000000000000..e3ac7bf052120 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java @@ -0,0 +1,295 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ContextParser; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.aggregations.Aggregation; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.ParsedComposite; +import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; +import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; +import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms; +import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; +import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; +import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedAvg; +import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedMax; +import org.elasticsearch.search.aggregations.metrics.ParsedMin; +import org.elasticsearch.search.aggregations.metrics.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedSum; +import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static java.util.Arrays.asList; + +public class AggregationResultUtilsTests extends ESTestCase { + + private final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(namedXContents); + + private final String KEY = Aggregation.CommonFields.KEY.getPreferredName(); + + // aggregations potentially useful for writing tests, to be expanded as necessary + private static final List namedXContents; + static { + Map> map = new HashMap<>(); + map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); + map.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c)); + map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c)); + map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c)); + map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c)); + map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c)); + map.put(StatsAggregationBuilder.NAME, (p, c) -> ParsedStats.fromXContent(p, (String) c)); + map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c)); + map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c)); + map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); + map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); + map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); + + namedXContents = map.entrySet().stream() + .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return namedXContentRegistry; + } + + public void testExtractCompositeAggregationResults() throws IOException { + String targetField = randomAlphaOfLengthBetween(5, 10); + + List> sources = Collections.singletonList( + new TermsValuesSourceBuilder(targetField).field("doesn't_matter_for_this_test") + ); + + String aggName = randomAlphaOfLengthBetween(5, 10); + String aggTypedName = "avg#" + aggName; + Collection aggregationBuilders = Collections.singletonList(AggregationBuilders.avg(aggName)); + + Map input = asMap( + "buckets", + asList( + asMap( + KEY, asMap( + targetField, "ID1"), + aggTypedName, asMap( + "value", 42.33)), + asMap( + KEY, asMap( + targetField, "ID2"), + aggTypedName, asMap( + "value", 28.99)), + asMap( + KEY, asMap( + targetField, "ID3"), + aggTypedName, asMap( + "value", 12.55)) + )); + + List> expected = asList( + asMap( + targetField, "ID1", + aggName, 42.33 + ), + asMap( + targetField, "ID2", + aggName, 28.99 + ), + asMap( + targetField, "ID3", + aggName, 12.55 + ) + ); + + executeTest(sources, aggregationBuilders, input, expected); + } + + public void testExtractCompositeAggregationResultsMultiSources() throws IOException { + String targetField = randomAlphaOfLengthBetween(5, 10); + String targetField2 = randomAlphaOfLengthBetween(5, 10) + "_2"; + + List> sources = asList( + new TermsValuesSourceBuilder(targetField).field("doesn't_matter_for_this_test"), + new TermsValuesSourceBuilder(targetField2).field("doesn't_matter_for_this_test_too") + ); + + String aggName = randomAlphaOfLengthBetween(5, 10); + String aggTypedName = "avg#" + aggName; + Collection aggregationBuilders = Collections.singletonList(AggregationBuilders.avg(aggName)); + + Map input = asMap( + "buckets", + asList( + asMap( + KEY, asMap( + targetField, "ID1", + targetField2, "ID1_2" + ), + aggTypedName, asMap( + "value", 42.33)), + asMap( + KEY, asMap( + targetField, "ID1", + targetField2, "ID2_2" + ), + aggTypedName, asMap( + "value", 8.4)), + asMap( + KEY, asMap( + targetField, "ID2", + targetField2, "ID1_2" + ), + aggTypedName, asMap( + "value", 28.99)), + asMap( + KEY, asMap( + targetField, "ID3", + targetField2, "ID2_2" + ), + aggTypedName, asMap( + "value", 12.55)) + )); + + List> expected = asList( + asMap( + targetField, "ID1", + targetField2, "ID1_2", + aggName, 42.33 + ), + asMap( + targetField, "ID1", + targetField2, "ID2_2", + aggName, 8.4 + ), + asMap( + targetField, "ID2", + targetField2, "ID1_2", + aggName, 28.99 + ), + asMap( + targetField, "ID3", + targetField2, "ID2_2", + aggName, 12.55 + ) + ); + executeTest(sources, aggregationBuilders, input, expected); + } + + public void testExtractCompositeAggregationResultsMultiAggregations() throws IOException { + String targetField = randomAlphaOfLengthBetween(5, 10); + List> sources = Collections.singletonList( + new TermsValuesSourceBuilder(targetField).field("doesn't_matter_for_this_test") + ); + + String aggName = randomAlphaOfLengthBetween(5, 10); + String aggTypedName = "avg#" + aggName; + + String aggName2 = randomAlphaOfLengthBetween(5, 10) + "_2"; + String aggTypedName2 = "max#" + aggName2; + + Collection aggregationBuilders = asList(AggregationBuilders.avg(aggName), AggregationBuilders.max(aggName2)); + + Map input = asMap( + "buckets", + asList( + asMap( + KEY, asMap( + targetField, "ID1"), + aggTypedName, asMap( + "value", 42.33), + aggTypedName2, asMap( + "value", 9.9)), + asMap( + KEY, asMap( + targetField, "ID2"), + aggTypedName, asMap( + "value", 28.99), + aggTypedName2, asMap( + "value", 222.33)), + asMap( + KEY, asMap( + targetField, "ID3"), + aggTypedName, asMap( + "value", 12.55), + aggTypedName2, asMap( + "value", -2.44)) + )); + + List> expected = asList( + asMap( + targetField, "ID1", + aggName, 42.33, + aggName2, 9.9 + ), + asMap( + targetField, "ID2", + aggName, 28.99, + aggName2, 222.33 + ), + asMap( + targetField, "ID3", + aggName, 12.55, + aggName2, -2.44 + ) + ); + executeTest(sources, aggregationBuilders, input, expected); + } + + private void executeTest(List> sources, Collection aggregationBuilders, + Map input, List> expected) throws IOException { + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.map(input); + + try (XContentParser parser = createParser(builder)) { + CompositeAggregation agg = ParsedComposite.fromXContent(parser, "my_feature"); + List> result = AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders) + .collect(Collectors.toList()); + + assertEquals(expected, result); + } + } + + static Map asMap(Object... fields) { + assert fields.length % 2 == 0; + final Map map = new HashMap<>(); + for (int i = 0; i < fields.length; i += 2) { + String field = (String) fields[i]; + map.put(field, fields[i + 1]); + } + return map; + } +} From 0310769df4c259cd4e1600a916d46f1982690275 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 25 Oct 2018 15:27:53 +0200 Subject: [PATCH 15/49] fix NPE and creating XContent from search phase --- .../ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java | 2 -- .../ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index 1fc22c828042e..8e381f0d9d2c5 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -80,9 +80,7 @@ private Stream processBucketsToIndexRequests(CompositeAggregation XContentBuilder builder; try { builder = jsonBuilder(); - builder.startObject(); builder.map(document); - builder.endObject(); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java index 49e387d6d98dc..e4731366d1861 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java @@ -65,7 +65,7 @@ public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskS public FeatureIndexBuilderJobState(IndexerState state, @Nullable Map position) { this.state = state; - this.currentPosition = Collections.unmodifiableSortedMap(position == null ? null : new TreeMap<>(position)); + this.currentPosition = position == null ? null : Collections.unmodifiableSortedMap(new TreeMap<>(position)); } public FeatureIndexBuilderJobState(StreamInput in) throws IOException { From 2ff80fd79fe0f8313d53e0cc3a6cd8aec501efa2 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 13 Nov 2018 15:52:18 +0100 Subject: [PATCH 16/49] adapt to upstream changes --- .../xpack/ml/featureindexbuilder/FeatureIndexBuilder.java | 5 +++-- .../action/TransportDeleteFeatureIndexBuilderJobAction.java | 2 +- .../action/TransportPutFeatureIndexBuilderJobAction.java | 2 +- .../action/TransportStartFeatureIndexBuilderJobAction.java | 2 +- .../action/TransportStopFeatureIndexBuilderJobAction.java | 2 +- .../ml/featureindexbuilder/job/FeatureIndexBuilderJob.java | 3 ++- .../job/FeatureIndexBuilderJobPersistentTasksExecutor.java | 5 ++--- 7 files changed, 11 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index e1e384b57415f..8f864082ed07e 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTaskParams; @@ -143,13 +144,13 @@ public List> getExecutorBuilders(Settings settings) { @Override public List> getPersistentTasksExecutor(ClusterService clusterService, ThreadPool threadPool, - Client client) { + Client client, SettingsModule settingsModule) { if (enabled == false || transportClientMode) { return emptyList(); } SchedulerEngine schedulerEngine = new SchedulerEngine(settings, Clock.systemUTC()); - return Collections.singletonList(new FeatureIndexBuilderJobPersistentTasksExecutor(settings, client, + return Collections.singletonList(new FeatureIndexBuilderJobPersistentTasksExecutor(client, schedulerEngine, threadPool)); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java index 56c118ac00436..31c00a8b1841b 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java @@ -36,7 +36,7 @@ public class TransportDeleteFeatureIndexBuilderJobAction public TransportDeleteFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PersistentTasksService persistentTasksService, ClusterService clusterService) { - super(settings, DeleteFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + super(DeleteFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, DeleteFeatureIndexBuilderJobAction.Request::new); this.persistentTasksService = persistentTasksService; } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index 7e28f68a23eda..6ec1427d25391 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -45,7 +45,7 @@ public class TransportPutFeatureIndexBuilderJobAction public TransportPutFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, XPackLicenseState licenseState, PersistentTasksService persistentTasksService, Client client) { - super(settings, PutFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + super(PutFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutFeatureIndexBuilderJobAction.Request::new); this.licenseState = licenseState; this.persistentTasksService = persistentTasksService; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java index aaaf695e346d9..23a8e2812951f 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java @@ -37,7 +37,7 @@ public class TransportStartFeatureIndexBuilderJobAction extends @Inject public TransportStartFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, XPackLicenseState licenseState) { - super(settings, StartFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, + super(StartFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, StartFeatureIndexBuilderJobAction.Request::new, StartFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); this.licenseState = licenseState; } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java index 938e3d71a4f9c..e157c77784161 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java @@ -31,7 +31,7 @@ public class TransportStopFeatureIndexBuilderJobAction extends @Inject public TransportStopFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(settings, StopFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, + super(StopFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, StopFeatureIndexBuilderJobAction.Request::new, StopFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java index 368a9e0897a05..0857bddab9731 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java @@ -55,7 +55,8 @@ public String getWriteableName() { @Override public Version getMinimalSupportedVersion() { - return Version.V_7_0_0_alpha1; + // TODO: to be changed once target version has been defined + return Version.CURRENT; } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java index fefb383f94b05..91c0578fb907d 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java @@ -8,7 +8,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; @@ -25,9 +24,9 @@ public class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTas private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; - public FeatureIndexBuilderJobPersistentTasksExecutor(Settings settings, Client client, SchedulerEngine schedulerEngine, + public FeatureIndexBuilderJobPersistentTasksExecutor(Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { - super(settings, "xpack/feature_index_builder/job", FeatureIndexBuilder.TASK_THREAD_POOL_NAME); + super("xpack/feature_index_builder/job", FeatureIndexBuilder.TASK_THREAD_POOL_NAME); this.client = client; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; From 341091bdffaef2628792a7e1d4780d97d9fb774f Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 13 Nov 2018 19:32:59 +0100 Subject: [PATCH 17/49] fix imports --- .../featureindexbuilder/job/AggregationResultUtilsTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java index e3ac7bf052120..9e3ef63fd7e8e 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java @@ -42,8 +42,8 @@ import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder; +import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; +import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESTestCase; import java.io.IOException; From 7036ffa5e2edc49110508c5f88a03d5745c48525 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 15 Nov 2018 13:16:52 +0100 Subject: [PATCH 18/49] [ML-Dataframe] add validation and mapping detection (#34844) add job validation and deduction of field mappings for the target index --- ...nsportPutFeatureIndexBuilderJobAction.java | 55 ++--- .../job/FeatureIndexBuilderIndexer.java | 7 +- .../persistence/DataframeIndex.java | 69 ++++++ .../support/Aggregations.java | 58 +++++ .../support/JobValidator.java | 208 ++++++++++++++++ .../support/AggregationsTests.java | 23 ++ .../support/JobValidatorTests.java | 230 ++++++++++++++++++ 7 files changed, 610 insertions(+), 40 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/Aggregations.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/AggregationsTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidatorTests.java diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index 6ec1427d25391..9bce651651446 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; @@ -19,7 +17,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksService; @@ -31,8 +28,8 @@ import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Response; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; - -import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; +import org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataframeIndex; +import org.elasticsearch.xpack.ml.featureindexbuilder.support.JobValidator; public class TransportPutFeatureIndexBuilderJobAction extends TransportMasterNodeAction { @@ -72,9 +69,22 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); - FeatureIndexBuilderJob job = createFeatureIndexBuilderJob(request.getConfig(), threadPool); - createIndex(client, job.getConfig().getDestinationIndex()); - startPersistentTask(job, listener, persistentTasksService); + JobValidator jobCreator = new JobValidator(request.getConfig(), client); + + jobCreator.validate(ActionListener.wrap(validationResult -> { + jobCreator.deduceMappings(ActionListener.wrap(mappings -> { + FeatureIndexBuilderJob job = createFeatureIndexBuilderJob(request.getConfig(), threadPool); + DataframeIndex.createDestinationIndex(client, job, mappings, ActionListener.wrap(createIndexResult -> { + startPersistentTask(job, listener, persistentTasksService); + }, e3 -> { + listener.onFailure(new RuntimeException("Failed to create index", e3)); + })); + }, e2 -> { + listener.onFailure(new RuntimeException("Failed to deduce targe mappings", e2)); + })); + }, e -> { + listener.onFailure(new RuntimeException("Failed to validate", e)); + })); } private static FeatureIndexBuilderJob createFeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config, ThreadPool threadPool) { @@ -96,33 +106,4 @@ static void startPersistentTask(FeatureIndexBuilderJob job, ActionListener - .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)); - request.mapping(DOC_TYPE, // <1> - "{\n" + - " \"" + DOC_TYPE + "\": {\n" + - " \"properties\": {\n" + - " \"reviewerId\": {\n" + - " \"type\": \"keyword\"\n" + - " },\n" + - " \"avg_rating\": {\n" + - " \"type\": \"integer\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}", // <2> - XContentType.JSON); - IndicesAdminClient adminClient = client.admin().indices(); - adminClient.create(request).actionGet(); - } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index 8e381f0d9d2c5..65be05c2cca54 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -33,10 +33,11 @@ import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings.DOC_TYPE; +import static org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataframeIndex.DOC_TYPE; public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, FeatureIndexBuilderJobStats> { + private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; private static final Logger logger = Logger.getLogger(FeatureIndexBuilderIndexer.class.getName()); private FeatureIndexBuilderJob job; @@ -58,7 +59,7 @@ protected void onStartJob(long now) { @Override protected IterationResult> doProcess(SearchResponse searchResponse) { - final CompositeAggregation agg = searchResponse.getAggregations().get("feature"); + final CompositeAggregation agg = searchResponse.getAggregations().get(COMPOSITE_AGGREGATION_NAME); return new IterationResult<>(processBucketsToIndexRequests(agg).collect(Collectors.toList()), agg.afterKey(), agg.getBuckets().isEmpty()); } @@ -99,7 +100,7 @@ protected SearchRequest buildSearchRequest() { List> sources = job.getConfig().getSourceConfig().getSources(); - CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder("feature", sources); + CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder(COMPOSITE_AGGREGATION_NAME, sources); compositeAggregation.size(1000); if (position != null) { diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java new file mode 100644 index 0000000000000..851259ee0ac3f --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.persistence; + +import org.apache.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +import java.io.IOException; +import java.util.Map; +import java.util.Map.Entry; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +public final class DataframeIndex { + private static final Logger logger = Logger.getLogger(DataframeIndex.class.getName()); + + public static final String DOC_TYPE = "_doc"; + private static final String PROPERTIES = "properties"; + private static final String TYPE = "type"; + + private DataframeIndex() { + } + + public static void createDestinationIndex(Client client, FeatureIndexBuilderJob job, Map mappings, + final ActionListener listener) { + CreateIndexRequest request = new CreateIndexRequest(job.getConfig().getDestinationIndex()); + + // TODO: revisit number of shards, number of replicas + request.settings(Settings.builder() // <1> + .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)); + + request.mapping(DOC_TYPE, createMappingXContent(mappings)); + + client.execute(CreateIndexAction.INSTANCE, request, ActionListener.wrap(createIndexResponse -> { + listener.onResponse(true); + }, e -> { + String message = "Could not create destination index [" + job.getConfig().getDestinationIndex() + "] for job[" + + job.getConfig().getId() + "]"; + logger.error(message); + listener.onFailure(new RuntimeException(message, e)); + })); + } + + private static XContentBuilder createMappingXContent(Map mappings) { + try { + XContentBuilder builder = jsonBuilder().startObject(); + builder.startObject(DOC_TYPE); + builder.startObject(PROPERTIES); + for (Entry field : mappings.entrySet()) { + builder.startObject(field.getKey()).field(TYPE, field.getValue()).endObject(); + } + builder.endObject(); // properties + builder.endObject(); // doc_type + return builder.endObject(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/Aggregations.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/Aggregations.java new file mode 100644 index 0000000000000..6f2308d1e0a1b --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/Aggregations.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.support; + +import java.util.Locale; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public final class Aggregations { + private Aggregations() {} + + /** + * Supported aggregation by dataframe and corresponding meta information. + * + * aggregationType - the name of the aggregation as returned by + * {@link org.elasticsearch.search.aggregations.BaseAggregationBuilder#getType()}} + * + * targetMapping - the field type for the output, if null, the source type should be used + * + */ + enum AggregationType { + AVG("avg", "double"), + MAX("max", null); + + private final String aggregationType; + private final String targetMapping; + + AggregationType(String name, String targetMapping) { + this.aggregationType = name; + this.targetMapping = targetMapping; + } + + public String getName() { + return aggregationType; + } + + public String getTargetMapping() { + return targetMapping; + } + } + + private static Set aggregationSupported = Stream.of(AggregationType.values()).map(AggregationType::name) + .collect(Collectors.toSet()); + + public static boolean isSupportedByDataframe(String aggregationType) { + return aggregationSupported.contains(aggregationType.toUpperCase(Locale.ROOT)); + } + + public static String resolveTargetMapping(String aggregationType, String sourceType) { + AggregationType agg = AggregationType.valueOf(aggregationType.toUpperCase(Locale.ROOT)); + return agg.getTargetMapping() == null ? sourceType : agg.getTargetMapping(); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java new file mode 100644 index 0000000000000..3c8cc9bd0f63f --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java @@ -0,0 +1,208 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.support; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class JobValidator { + private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; + + private static final Logger logger = LogManager.getLogger(JobValidator.class.getName()); + + private final Client client; + private final FeatureIndexBuilderJobConfig config; + + public JobValidator(FeatureIndexBuilderJobConfig config, Client client) { + this.client = Objects.requireNonNull(client); + this.config = Objects.requireNonNull(config); + } + + public void validate(final ActionListener listener) { + // step 1: check if used aggregations are supported + for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { + if (Aggregations.isSupportedByDataframe(agg.getType()) == false) { + listener.onFailure(new RuntimeException("Unsupported aggregation type [" + agg.getType() + "]")); + return; + } + } + + // step 2: run a query to validate that config is valid + runTestQuery(listener); + } + + public void deduceMappings(final ActionListener> listener) { + // collects the fieldnames used as source for aggregations + Map aggregationSourceFieldNames = new HashMap<>(); + // collects the aggregation types by source name + Map aggregationTypes = new HashMap<>(); + // collects the fieldnames and target fieldnames used for grouping + Map fieldNamesForGrouping = new HashMap<>(); + config.getSourceConfig().getSources().forEach(source -> { + fieldNamesForGrouping.put(source.name(), source.field()); + }); + + for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { + if (agg instanceof ValuesSourceAggregationBuilder) { + ValuesSourceAggregationBuilder valueSourceAggregation = (ValuesSourceAggregationBuilder) agg; + aggregationSourceFieldNames.put(valueSourceAggregation.getName(), valueSourceAggregation.field()); + aggregationTypes.put(valueSourceAggregation.getName(), valueSourceAggregation.getType()); + } else { + // execution should not reach this point + listener.onFailure(new RuntimeException("Unsupported aggregation type [" + agg.getType() + "]")); + return; + } + } + + Map allFieldNames = new HashMap<>(); + allFieldNames.putAll(aggregationSourceFieldNames); + allFieldNames.putAll(fieldNamesForGrouping); + + getSourceFieldMappings(config.getIndexPattern(), allFieldNames.values().toArray(new String[0]), + ActionListener.wrap(sourceMappings -> { + Map targetMapping = resolveMappings(aggregationSourceFieldNames, aggregationTypes, + fieldNamesForGrouping, sourceMappings); + + listener.onResponse(targetMapping); + }, e -> { + listener.onFailure(e); + })); + } + + Map resolveMappings(Map aggregationSourceFieldNames, Map aggregationTypes, + Map fieldNamesForGrouping, Map sourceMappings) { + Map targetMapping = new HashMap<>(); + + aggregationTypes.forEach((targetFieldName, aggregationName) -> { + String sourceFieldName = aggregationSourceFieldNames.get(targetFieldName); + String destinationMapping = Aggregations.resolveTargetMapping(aggregationName, sourceMappings.get(sourceFieldName)); + + logger.debug("[" + config.getId() + "] Deduced mapping for: [" + targetFieldName + "], agg type [" + aggregationName + + "] to [" + destinationMapping + "]"); + if (destinationMapping != null) { + targetMapping.put(targetFieldName, destinationMapping); + } else { + logger.warn("[" + config.getId() + "] Failed to deduce mapping for [" + targetFieldName + + "], fall back to double."); + targetMapping.put(targetFieldName, "double"); + } + }); + + fieldNamesForGrouping.forEach((targetFieldName, sourceFieldName) -> { + String destinationMapping = sourceMappings.get(sourceFieldName); + logger.debug( + "[" + config.getId() + "] Deduced mapping for: [" + targetFieldName + "] to [" + destinationMapping + "]"); + if (destinationMapping != null) { + targetMapping.put(targetFieldName, destinationMapping); + } else { + logger.warn("[" + config.getId() + "] Failed to deduce mapping for [" + targetFieldName + + "], fall back to keyword."); + targetMapping.put(targetFieldName, "keyword"); + } + }); + return targetMapping; + } + + private void runTestQuery(final ActionListener listener) { + QueryBuilder queryBuilder = new MatchAllQueryBuilder(); + SearchRequest searchRequest = new SearchRequest(config.getIndexPattern()); + + List> sources = config.getSourceConfig().getSources(); + + CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder(COMPOSITE_AGGREGATION_NAME, sources); + compositeAggregation.size(1); + + for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { + compositeAggregation.subAggregation(agg); + } + + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.aggregation(compositeAggregation); + sourceBuilder.size(0); + sourceBuilder.query(queryBuilder); + searchRequest.source(sourceBuilder); + + client.execute(SearchAction.INSTANCE, searchRequest, ActionListener.wrap(response -> { + if (response == null) { + listener.onFailure(new RuntimeException("Unexpected null response from test query")); + return; + } + if (response.status() != RestStatus.OK) { + listener.onFailure(new RuntimeException("Unexpected status from response of test query: " + response.status())); + return; + } + listener.onResponse(true); + }, e->{ + listener.onFailure(new RuntimeException("Failed to test query",e)); + })); + } + + /* + * Very "magic" helper method to extract the source mappings + */ + private void getSourceFieldMappings(String index, String[] fields, + ActionListener> listener) { + GetFieldMappingsRequest fieldMappingRequest = new GetFieldMappingsRequest(); + fieldMappingRequest.indices(index); + fieldMappingRequest.fields(fields); + + client.execute(GetFieldMappingsAction.INSTANCE, fieldMappingRequest, ActionListener.wrap(response -> { + listener.onResponse(extractSourceFieldMappings(response.mappings())); + }, e -> { + listener.onFailure(e); + })); + } + + Map extractSourceFieldMappings(Map>> mappings) { + Map extractedTypes = new HashMap<>(); + + mappings.forEach((indexName, docTypeToMapping) -> { + // "_doc" -> + docTypeToMapping.forEach((docType, fieldNameToMapping) -> { + // "my_field" -> + fieldNameToMapping.forEach((fieldName, fieldMapping) -> { + // "mapping" -> "my_field" -> + fieldMapping.sourceAsMap().forEach((name, typeMap) -> { + // expected object: { "type": type } + if (typeMap instanceof Map) { + final Map map = (Map) typeMap; + if (map.containsKey("type")) { + String type = map.get("type").toString(); + logger.debug("[" + config.getId() + "] Extracted type for [" + fieldName + "] : [" + type + "]"); + // TODO: overwrites types, requires resolve if + // types are mixed + extractedTypes.put(fieldName, type); + } + } + }); + }); + }); + }); + return extractedTypes; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/AggregationsTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/AggregationsTests.java new file mode 100644 index 0000000000000..465f9f501401c --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/AggregationsTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.support; + +import org.elasticsearch.test.ESTestCase; + +public class AggregationsTests extends ESTestCase { + public void testResolveTargetMapping() { + + // avg + assertEquals("double", Aggregations.resolveTargetMapping("avg", "int")); + assertEquals("double", Aggregations.resolveTargetMapping("avg", "double")); + + // max + assertEquals("int", Aggregations.resolveTargetMapping("max", "int")); + assertEquals("double", Aggregations.resolveTargetMapping("max", "double")); + assertEquals("half_float", Aggregations.resolveTargetMapping("max", "half_float")); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidatorTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidatorTests.java new file mode 100644 index 0000000000000..87b49fd755da1 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidatorTests.java @@ -0,0 +1,230 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.support; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.AggregationConfig; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.SourceConfig; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static java.util.Collections.emptyList; +import static org.hamcrest.Matchers.equalTo; + +public class JobValidatorTests extends ESTestCase { + + private NamedXContentRegistry namedXContentRegistry; + private Client client; + private final String[] supportedAggregations = { "avg", "max" }; + private final String[] unsupportedAggregations = { "min" }; + + @Before + public void registerAggregationNamedObjects() throws Exception { + // register aggregations as NamedWriteable + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); + namedXContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + @Before + public void setupClient() { + if (client != null) { + client.close(); + } + client = new MyMockClient(getTestName()); + } + + @After + public void tearDownClient() { + client.close(); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return namedXContentRegistry; + } + + public void testValidateExistingIndex() throws Exception { + SourceConfig sourceConfig = getValidSourceConfig(); + AggregationConfig aggregationConfig = getValidAggregationConfig(); + + FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source_index", "non_existing_dest", + sourceConfig, aggregationConfig); + + assertValidJob(client, config); + } + + public void testValidateNonExistingIndex() throws Exception { + SourceConfig sourceConfig = getValidSourceConfig(); + AggregationConfig aggregationConfig = getValidAggregationConfig(); + + FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "non_existing_source_index", + "non_existing_dest", sourceConfig, aggregationConfig); + + assertInvalidJob(client, config); + } + + public void testSearchFailure() throws Exception { + SourceConfig sourceConfig = getValidSourceConfig(); + AggregationConfig aggregationConfig = getValidAggregationConfig(); + + // test a failure during the search operation, job creation fails if + // search has failures although they might just be temporary + FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source_index_with_failing_shards", + "non_existing_dest", sourceConfig, aggregationConfig); + + assertInvalidJob(client, config); + } + + public void testValidateAllSupportedAggregations() throws Exception { + SourceConfig sourceConfig = getValidSourceConfig(); + + for (String agg : supportedAggregations) { + AggregationConfig aggregationConfig = getAggregationConfig(agg); + + FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source", "non_existing_dest", + sourceConfig, aggregationConfig); + + assertValidJob(client, config); + } + } + + public void testValidateAllUnsupportedAggregations() throws Exception { + SourceConfig sourceConfig = getValidSourceConfig(); + + for (String agg : unsupportedAggregations) { + AggregationConfig aggregationConfig = getAggregationConfig(agg); + + FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source", "non_existing_dest", + sourceConfig, aggregationConfig); + + assertInvalidJob(client, config); + } + } + + private class MyMockClient extends NoOpClient { + MyMockClient(String testName) { + super(testName); + } + + @SuppressWarnings("unchecked") + @Override + protected void doExecute(Action action, Request request, + ActionListener listener) { + + if (request instanceof SearchRequest) { + SearchRequest searchRequest = (SearchRequest) request; + List searchFailures = new ArrayList<>(); + + for (String index : searchRequest.indices()) { + if (index.contains("non_existing")) { + listener.onFailure(new IndexNotFoundException(index)); + return; + } + + if (index.contains("with_failing_shards")) { + searchFailures.add(new ShardSearchFailure(new RuntimeException("shard failed"))); + } + } + + final SearchResponseSections sections = new SearchResponseSections(new SearchHits(new SearchHit[0], 0, 0), null, null, + false, null, null, 1); + final SearchResponse response = new SearchResponse(sections, null, 10, searchFailures.size() > 0 ? 0 : 5, 0, 0, + searchFailures.toArray(new ShardSearchFailure[searchFailures.size()]), null); + + listener.onResponse((Response) response); + return; + } + + super.doExecute(action, request, listener); + } + } + + private SourceConfig getValidSourceConfig() throws IOException { + return parseSource("{\"sources\": [\n" + " {\n" + " \"pivot\": {\n" + " \"terms\": {\n" + " \"field\": \"terms\"\n" + + " }\n" + " }\n" + " }\n" + "]}"); + } + + private SourceConfig parseSource(String json) throws IOException { + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); + + return SourceConfig.fromXContent(parser); + } + + private AggregationConfig getValidAggregationConfig() throws IOException { + return getAggregationConfig(supportedAggregations[randomIntBetween(0, supportedAggregations.length - 1)]); + } + + private AggregationConfig getAggregationConfig(String agg) throws IOException { + return parseAggregations("{\n" + " \"pivot_" + agg + "\": {\n" + " \"" + agg + "\": {\n" + " \"field\": \"values\"\n" + + " }\n" + " }" + "}"); + } + + private AggregationConfig parseAggregations(String json) throws IOException { + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); + // parseAggregators expects to be already inside the xcontent object + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + return AggregationConfig.fromXContent(parser); + } + + private static void assertValidJob(Client client, FeatureIndexBuilderJobConfig config) throws Exception { + validate(client, config, true); + } + + private static void assertInvalidJob(Client client, FeatureIndexBuilderJobConfig config) throws Exception { + validate(client, config, false); + } + + private static void validate(Client client, FeatureIndexBuilderJobConfig config, boolean expectValid) throws Exception { + JobValidator validator = new JobValidator(config, client); + + CountDownLatch latch = new CountDownLatch(1); + final AtomicReference exceptionHolder = new AtomicReference<>(); + validator.validate(ActionListener.wrap(validity -> { + assertEquals(expectValid, validity); + latch.countDown(); + }, e -> { + exceptionHolder.set(e); + latch.countDown(); + })); + + assertTrue(latch.await(100, TimeUnit.MILLISECONDS)); + if (expectValid == true && exceptionHolder.get() != null) { + throw exceptionHolder.get(); + } else if (expectValid == false && exceptionHolder.get() == null) { + fail("Expected config to be invalid"); + } + } +} From 6b66fe9d0aa883f2ea5c80a73b97e4e78882abb4 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 16 Nov 2018 19:19:55 +0100 Subject: [PATCH 19/49] [ML-Dataframe] re-factor code to use LogManager and own loggers (#35631) re-factor code to use LogManager and own loggers --- .../action/TransportDeleteFeatureIndexBuilderJobAction.java | 3 +++ .../ml/featureindexbuilder/job/AggregationResultUtils.java | 5 +++-- .../featureindexbuilder/job/FeatureIndexBuilderIndexer.java | 5 +++-- .../job/FeatureIndexBuilderJobPersistentTasksExecutor.java | 5 +++++ .../featureindexbuilder/job/FeatureIndexBuilderJobTask.java | 5 +++-- .../ml/featureindexbuilder/persistence/DataframeIndex.java | 5 +++-- .../xpack/ml/featureindexbuilder/support/JobValidator.java | 2 +- 7 files changed, 21 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java index 31c00a8b1841b..b5bd39ec529b7 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.ml.featureindexbuilder.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -31,6 +33,7 @@ public class TransportDeleteFeatureIndexBuilderJobAction extends TransportMasterNodeAction { private final PersistentTasksService persistentTasksService; + private static final Logger logger = LogManager.getLogger(TransportDeleteFeatureIndexBuilderJobAction.class); @Inject public TransportDeleteFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java index b42df5ed29f37..ddd4c12d54798 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java @@ -6,7 +6,8 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; @@ -21,7 +22,7 @@ import java.util.stream.Stream; final class AggregationResultUtils { - private static final Logger logger = Logger.getLogger(AggregationResultUtils.class.getName()); + private static final Logger logger = LogManager.getLogger(AggregationResultUtils.class); /** * Extracts aggregation results from a composite aggregation and puts it into a map. diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index 65be05c2cca54..c1d6e987a4691 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -6,7 +6,8 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -38,7 +39,7 @@ public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, FeatureIndexBuilderJobStats> { private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; - private static final Logger logger = Logger.getLogger(FeatureIndexBuilderIndexer.class.getName()); + private static final Logger logger = LogManager.getLogger(FeatureIndexBuilderIndexer.class); private FeatureIndexBuilderJob job; public FeatureIndexBuilderIndexer(Executor executor, FeatureIndexBuilderJob job, AtomicReference initialState, diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java index 91c0578fb907d..ab9e0ac12e42d 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -20,6 +22,9 @@ import java.util.Map; public class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTasksExecutor { + + private static final Logger logger = LogManager.getLogger(FeatureIndexBuilderJobPersistentTasksExecutor.class); + private final Client client; private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java index 173b9125d33ee..9db41eb5efb32 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java @@ -6,7 +6,8 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.job; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; @@ -31,7 +32,7 @@ public class FeatureIndexBuilderJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { - private static final Logger logger = Logger.getLogger(FeatureIndexBuilderJobTask.class.getName()); + private static final Logger logger = LogManager.getLogger(FeatureIndexBuilderJobTask.class); private final FeatureIndexBuilderJob job; private final ThreadPool threadPool; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java index 851259ee0ac3f..b8c353debe07a 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java @@ -6,7 +6,8 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.persistence; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; @@ -22,7 +23,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; public final class DataframeIndex { - private static final Logger logger = Logger.getLogger(DataframeIndex.class.getName()); + private static final Logger logger = LogManager.getLogger(DataframeIndex.class); public static final String DOC_TYPE = "_doc"; private static final String PROPERTIES = "properties"; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java index 3c8cc9bd0f63f..78c990a989b36 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java @@ -33,7 +33,7 @@ public class JobValidator { private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; - private static final Logger logger = LogManager.getLogger(JobValidator.class.getName()); + private static final Logger logger = LogManager.getLogger(JobValidator.class); private final Client client; private final FeatureIndexBuilderJobConfig config; From 2b005b0a39401e2fd9bdcf4a7ecfc036ac2d1ed4 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 20 Nov 2018 20:14:32 +0100 Subject: [PATCH 20/49] [ML-Dataframe] change licensing check and plugin name/description (#35618) change the internal naming to data frame, adds its own license check and fixes tests --- docs/reference/rest-api/info.asciidoc | 6 +++--- .../license/XPackLicenseState.java | 9 +++++++++ .../xpack/core/XPackClientPlugin.java | 6 ++++-- .../elasticsearch/xpack/core/XPackField.java | 4 ++-- .../xpack/core/XPackSettings.java | 5 +++++ .../dataframe/DataFrameFeatureSetUsage.java} | 10 +++++----- ...atureSet.java => DataFrameFeatureSet.java} | 20 ++++++++++--------- .../FeatureIndexBuilder.java | 6 +++--- ...nsportPutFeatureIndexBuilderJobAction.java | 4 ++-- ...portStartFeatureIndexBuilderJobAction.java | 4 ++-- 10 files changed, 46 insertions(+), 28 deletions(-) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java => core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java} (58%) rename x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/{FeatureIndexBuilderFeatureSet.java => DataFrameFeatureSet.java} (65%) diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index 235cc3e0e7e9c..2e756276f59be 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -63,9 +63,9 @@ Example response: "expiry_date_in_millis" : 1542665112332 }, "features" : { - "fib" : { - "description" : "Time series feature index creation", - "available" : false, + "data_frame" : { + "description" : "Data Frame for the Elastic Stack", + "available" : true, "enabled" : true }, "graph" : { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 3cb189b5795d0..bd0c112e4745c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -557,6 +557,15 @@ public static boolean isMachineLearningAllowedForOperationMode(final OperationMo return isPlatinumOrTrialOperationMode(operationMode); } + /** + * Data Frame is always available as long as there is a valid license + * + * @return true if the license is active + */ + public synchronized boolean isDataFrameAllowed() { + return status.active; + } + /** * Rollup is always available as long as there is a valid license * diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index a8167187bb2e6..7419bef537544 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageAction; import org.elasticsearch.xpack.core.beats.BeatsFeatureSetUsage; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.dataframe.DataFrameFeatureSetUsage; import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; import org.elasticsearch.xpack.core.graph.GraphFeatureSetUsage; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; @@ -420,8 +421,9 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(LifecycleAction.class, ReadOnlyAction.NAME, ReadOnlyAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new) - ); + new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new), + // Data Frame + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_FRAME, DataFrameFeatureSetUsage::new)); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index 44acf81797c89..4b83c2546382e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -33,8 +33,8 @@ public final class XPackField { public static final String ROLLUP = "rollup"; /** Name constant for the index lifecycle feature. */ public static final String INDEX_LIFECYCLE = "ilm"; - /** Name constant for the feature index builder feature. */ - public static final String FIB = "fib"; + /** Name constant for the data frame feature. */ + public static final String DATA_FRAME = "data_frame"; private XPackField() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 111d8a9a68ca9..9e7886b92193b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -42,6 +42,10 @@ private XPackSettings() { */ public static final Setting CCR_ENABLED_SETTING = Setting.boolSetting("xpack.ccr.enabled", true, Property.NodeScope); + /** Setting for enabling or disabling data frame. Defaults to true. */ + public static final Setting DATA_FRAME_ENABLED = Setting.boolSetting("xpack.data_frame.enabled", true, + Setting.Property.NodeScope); + /** Setting for enabling or disabling security. Defaults to true. */ public static final Setting SECURITY_ENABLED = Setting.boolSetting("xpack.security.enabled", true, Setting.Property.NodeScope); @@ -194,6 +198,7 @@ public static List> getAllSettings() { settings.add(ROLLUP_ENABLED); settings.add(PASSWORD_HASHING_ALGORITHM); settings.add(INDEX_LIFECYCLE_ENABLED); + settings.add(DATA_FRAME_ENABLED); return Collections.unmodifiableList(settings); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java similarity index 58% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java index 232fb7c54c42a..9bcc47d98a71c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder; +package org.elasticsearch.xpack.core.dataframe; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.core.XPackField; @@ -12,12 +12,12 @@ import java.io.IOException; -public class FeatureIndexBuilderFeatureSetUsage extends Usage { - public FeatureIndexBuilderFeatureSetUsage(StreamInput input) throws IOException { +public class DataFrameFeatureSetUsage extends Usage { + public DataFrameFeatureSetUsage(StreamInput input) throws IOException { super(input); } - public FeatureIndexBuilderFeatureSetUsage(boolean available, boolean enabled) { - super(XPackField.FIB, available, enabled); + public DataFrameFeatureSetUsage(boolean available, boolean enabled) { + super(XPackField.DATA_FRAME, available, enabled); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/DataFrameFeatureSet.java similarity index 65% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java rename to x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/DataFrameFeatureSet.java index 9f6ca4daca8b5..b28d5908dd38c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilderFeatureSet.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/DataFrameFeatureSet.java @@ -13,32 +13,35 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.dataframe.DataFrameFeatureSetUsage; + import java.util.Map; -public class FeatureIndexBuilderFeatureSet implements XPackFeatureSet { +public class DataFrameFeatureSet implements XPackFeatureSet { private final boolean enabled; private final XPackLicenseState licenseState; @Inject - public FeatureIndexBuilderFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState) { - this.enabled = true; // XPackSettings.FEATURE_INDEX_BUILDER_ENABLED.get(settings); + public DataFrameFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState) { + this.enabled = XPackSettings.DATA_FRAME_ENABLED.get(settings); this.licenseState = licenseState; } @Override public String name() { - return XPackField.FIB; + return XPackField.DATA_FRAME; } @Override public String description() { - return "Time series feature index creation"; + return "Data Frame for the Elastic Stack"; } @Override public boolean available() { - return licenseState != null && licenseState.isMachineLearningAllowed(); // todo: part of ML? + return licenseState != null && licenseState.isDataFrameAllowed(); } @Override @@ -53,8 +56,7 @@ public Map nativeCodeInfo() { @Override public void usage(ActionListener listener) { - // TODO expose the currently running rollup tasks on this node? Unclear the best - // way to do that - listener.onResponse(new FeatureIndexBuilderFeatureSetUsage(available(), enabled())); + // TODO retrieve and send something useful + listener.onResponse(new DataFrameFeatureSetUsage(available(), enabled())); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index 8f864082ed07e..1d6a072cd0dbe 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -34,6 +34,7 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; @@ -80,8 +81,7 @@ public class FeatureIndexBuilder extends Plugin implements ActionPlugin, Persist public FeatureIndexBuilder(Settings settings) { this.settings = settings; - // todo: XPackSettings.FEATURE_INDEX_BUILDER_ENABLED.get(settings); - this.enabled = true; + this.enabled = XPackSettings.DATA_FRAME_ENABLED.get(settings); this.transportClientMode = XPackPlugin.transportClientMode(settings); } @@ -93,7 +93,7 @@ public Collection createGuiceModules() { return modules; } - modules.add(b -> XPackPlugin.bindFeatureSet(b, FeatureIndexBuilderFeatureSet.class)); + modules.add(b -> XPackPlugin.bindFeatureSet(b, DataFrameFeatureSet.class)); return modules; } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index 9bce651651446..a47928c548620 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -62,8 +62,8 @@ protected PutFeatureIndexBuilderJobAction.Response newResponse() { @Override protected void masterOperation(Request request, ClusterState clusterState, ActionListener listener) throws Exception { - if (!licenseState.isMachineLearningAllowed()) { - listener.onFailure(LicenseUtils.newComplianceException(XPackField.FIB)); + if (!licenseState.isDataFrameAllowed()) { + listener.onFailure(LicenseUtils.newComplianceException(XPackField.DATA_FRAME)); return; } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java index 23a8e2812951f..c43d48858a2d2 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java @@ -67,8 +67,8 @@ protected void processTasks(StartFeatureIndexBuilderJobAction.Request request, C protected void doExecute(Task task, StartFeatureIndexBuilderJobAction.Request request, ActionListener listener) { - if (!licenseState.isMachineLearningAllowed()) { - listener.onFailure(LicenseUtils.newComplianceException(XPackField.FIB)); + if (!licenseState.isDataFrameAllowed()) { + listener.onFailure(LicenseUtils.newComplianceException(XPackField.DATA_FRAME)); return; } From 2da7e303968d7ed7eb70c8c04ad131ace97d4168 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 22 Nov 2018 19:09:53 +0100 Subject: [PATCH 21/49] remove settings parameter after upstream re-factoring (#35831) remove settings from transport actions --- .../TransportDeleteFeatureIndexBuilderJobAction.java | 7 +++---- .../action/TransportPutFeatureIndexBuilderJobAction.java | 7 +++---- .../action/TransportStartFeatureIndexBuilderJobAction.java | 3 +-- .../action/TransportStopFeatureIndexBuilderJobAction.java | 3 +-- 4 files changed, 8 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java index b5bd39ec529b7..52bc39dbe8ae6 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; @@ -36,9 +35,9 @@ public class TransportDeleteFeatureIndexBuilderJobAction private static final Logger logger = LogManager.getLogger(TransportDeleteFeatureIndexBuilderJobAction.class); @Inject - public TransportDeleteFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - PersistentTasksService persistentTasksService, ClusterService clusterService) { + public TransportDeleteFeatureIndexBuilderJobAction(TransportService transportService, ThreadPool threadPool, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + PersistentTasksService persistentTasksService, ClusterService clusterService) { super(DeleteFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, DeleteFeatureIndexBuilderJobAction.Request::new); this.persistentTasksService = persistentTasksService; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java index a47928c548620..166745e809bd3 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksService; @@ -39,9 +38,9 @@ public class TransportPutFeatureIndexBuilderJobAction private final Client client; @Inject - public TransportPutFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, - XPackLicenseState licenseState, PersistentTasksService persistentTasksService, Client client) { + public TransportPutFeatureIndexBuilderJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, XPackLicenseState licenseState, + PersistentTasksService persistentTasksService, Client client) { super(PutFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutFeatureIndexBuilderJobAction.Request::new); this.licenseState = licenseState; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java index c43d48858a2d2..6a4d4e1656397 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; @@ -35,7 +34,7 @@ public class TransportStartFeatureIndexBuilderJobAction extends private final XPackLicenseState licenseState; @Inject - public TransportStartFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + public TransportStartFeatureIndexBuilderJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, XPackLicenseState licenseState) { super(StartFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, StartFeatureIndexBuilderJobAction.Request::new, StartFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java index e157c77784161..c4fb95cdca945 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -29,7 +28,7 @@ public class TransportStopFeatureIndexBuilderJobAction extends StopFeatureIndexBuilderJobAction.Response, StopFeatureIndexBuilderJobAction.Response> { @Inject - public TransportStopFeatureIndexBuilderJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + public TransportStopFeatureIndexBuilderJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super(StopFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, StopFeatureIndexBuilderJobAction.Request::new, StopFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); From 926abc2e6685281f4196685339c2e5ddc180cb82 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Sun, 25 Nov 2018 22:07:00 +0100 Subject: [PATCH 22/49] [ML-Dataframe] Feature/fib get jobs (#35825) add a get job API to data frames --- .../FeatureIndexBuilder.java | 13 +- .../action/GetDataFrameJobsAction.java | 205 ++++++++++++++++++ .../TransportGetDataFrameJobsAction.java | 127 +++++++++++ .../job/FeatureIndexBuilderJobConfig.java | 2 +- ...ndexBuilderJobPersistentTasksExecutor.java | 2 +- .../job/FeatureIndexBuilderJobTask.java | 4 + ...estDeleteFeatureIndexBuilderJobAction.java | 2 +- .../action/RestGetDataFrameJobsAction.java | 38 ++++ .../RestPutFeatureIndexBuilderJobAction.java | 2 +- ...RestStartFeatureIndexBuilderJobAction.java | 2 +- .../RestStopFeatureIndexBuilderJobAction.java | 2 +- .../GetDataFrameJobsActionRequestTests.java | 27 +++ .../job/FeatureIndexBuilderJobStateTests.java | 56 +++++ 13 files changed, 473 insertions(+), 9 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsActionRequestTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index 1d6a072cd0dbe..a49d9be8e605c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -37,10 +37,12 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportDeleteFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportGetDataFrameJobsAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStopFeatureIndexBuilderJobAction; @@ -48,6 +50,7 @@ import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobPersistentTasksExecutor; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobState; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestDeleteFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestGetDataFrameJobsAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStopFeatureIndexBuilderJobAction; @@ -67,7 +70,9 @@ public class FeatureIndexBuilder extends Plugin implements ActionPlugin, PersistentTaskPlugin { public static final String NAME = "feature_index_builder"; + public static final String TASK_NAME = "xpack/feature_index_builder/job"; public static final String BASE_PATH = "/_xpack/feature_index_builder/"; + public static final String BASE_PATH_JOBS_BY_ID = BASE_PATH + "jobs/{id}/"; public static final String TASK_THREAD_POOL_NAME = "ml_feature_index_builder_indexing"; // list of headers that will be stored when a job is created @@ -112,7 +117,8 @@ public List getRestHandlers(final Settings settings, final RestCont new RestPutFeatureIndexBuilderJobAction(settings, restController), new RestStartFeatureIndexBuilderJobAction(settings, restController), new RestStopFeatureIndexBuilderJobAction(settings, restController), - new RestDeleteFeatureIndexBuilderJobAction(settings, restController) + new RestDeleteFeatureIndexBuilderJobAction(settings, restController), + new RestGetDataFrameJobsAction(settings, restController) ); } @@ -126,7 +132,8 @@ public List getRestHandlers(final Settings settings, final RestCont new ActionHandler<>(PutFeatureIndexBuilderJobAction.INSTANCE, TransportPutFeatureIndexBuilderJobAction.class), new ActionHandler<>(StartFeatureIndexBuilderJobAction.INSTANCE, TransportStartFeatureIndexBuilderJobAction.class), new ActionHandler<>(StopFeatureIndexBuilderJobAction.INSTANCE, TransportStopFeatureIndexBuilderJobAction.class), - new ActionHandler<>(DeleteFeatureIndexBuilderJobAction.INSTANCE, TransportDeleteFeatureIndexBuilderJobAction.class) + new ActionHandler<>(DeleteFeatureIndexBuilderJobAction.INSTANCE, TransportDeleteFeatureIndexBuilderJobAction.class), + new ActionHandler<>(GetDataFrameJobsAction.INSTANCE, TransportGetDataFrameJobsAction.class) ); } @@ -160,7 +167,7 @@ public List getNamedXContent() { return emptyList(); } return Arrays.asList( - new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField("xpack/feature_index_builder/job"), + new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(TASK_NAME), FeatureIndexBuilderJob::fromXContent), new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(FeatureIndexBuilderJobState.NAME), FeatureIndexBuilderJobState::fromXContent), diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsAction.java new file mode 100644 index 0000000000000..9e87008c6d2cc --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsAction.java @@ -0,0 +1,205 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +public class GetDataFrameJobsAction extends Action{ + + public static final GetDataFrameJobsAction INSTANCE = new GetDataFrameJobsAction(); + public static final String NAME = "cluster:monitor/data_frame/get"; + public static final ParseField JOBS = new ParseField("jobs"); + public static final ParseField COUNT = new ParseField("count"); + + private GetDataFrameJobsAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends BaseTasksRequest implements ToXContent { + private String id; + + public Request(String id) { + if (Strings.isNullOrEmpty(id) || id.equals("*")) { + this.id = MetaData.ALL; + } else { + this.id = id; + } + } + + public Request() {} + + @Override + public boolean match(Task task) { + // If we are retrieving all the jobs, the task description does not contain the id + if (id.equals(MetaData.ALL)) { + return task.getDescription().startsWith(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX); + } + // Otherwise find the task by ID + return task.getDescription().equals(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); + } + + public String getId() { + return id; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(id, other.id); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, GetDataFrameJobsAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { + + private List jobConfigurations; + + public Response(List jobConfigs) { + super(Collections.emptyList(), Collections.emptyList()); + this.jobConfigurations = jobConfigs; + } + + public Response(List jobResponses, List taskFailures, + List nodeFailures) { + super(taskFailures, nodeFailures); + this.jobConfigurations = jobResponses; + } + + public Response() { + super(Collections.emptyList(), Collections.emptyList()); + } + + public Response(StreamInput in) throws IOException { + super(Collections.emptyList(), Collections.emptyList()); + readFrom(in); + } + + public List getJobConfigurations() { + return jobConfigurations; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobConfigurations = in.readList(FeatureIndexBuilderJobConfig::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(jobConfigurations); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(COUNT.getPreferredName(), jobConfigurations.size()); + // XContentBuilder does not support passing the params object for Iterables + builder.field(JOBS.getPreferredName()); + builder.startArray(); + for (FeatureIndexBuilderJobConfig jobResponse : jobConfigurations) { + jobResponse.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobConfigurations); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final Response that = (Response) other; + return Objects.equals(this.jobConfigurations, that.jobConfigurations); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java new file mode 100644 index 0000000000000..941375dc31331 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Request; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Response; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +public class TransportGetDataFrameJobsAction extends + TransportTasksAction { + + @Inject + public TransportGetDataFrameJobsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { + super(GetDataFrameJobsAction.NAME, clusterService, transportService, actionFilters, GetDataFrameJobsAction.Request::new, + GetDataFrameJobsAction.Response::new, ThreadPool.Names.SAME); + } + + @Override + protected Response newResponse(Request request, List tasks, List taskOperationFailures, + List failedNodeExceptions) { + List configs = tasks.stream().map(GetDataFrameJobsAction.Response::getJobConfigurations) + .flatMap(Collection::stream).collect(Collectors.toList()); + return new Response(configs, taskOperationFailures, failedNodeExceptions); + } + + @Override + protected Response readTaskResponse(StreamInput in) throws IOException { + return new Response(in); + } + + @Override + protected void taskOperation(Request request, FeatureIndexBuilderJobTask task, ActionListener listener) { + List configs = Collections.emptyList(); + + assert task.getConfig().getId().equals(request.getId()) || request.getId().equals(MetaData.ALL); + + // Little extra insurance, make sure we only return jobs that aren't cancelled + if (task.isCancelled() == false) { + configs = Collections.singletonList(task.getConfig()); + } + + listener.onResponse(new Response(configs)); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + final DiscoveryNodes nodes = state.nodes(); + + if (nodes.isLocalNodeElectedMaster()) { + if (stateHasDataFrameJobs(request, state)) { + super.doExecute(task, request, listener); + } else { + // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this GET + // and we can just send an empty response, no need to go looking for the allocated task + listener.onResponse(new Response(Collections.emptyList())); + } + + } else { + // Delegates GetJobs to elected master node, so it becomes the coordinating node. + // Non-master nodes may have a stale cluster state that shows jobs which are cancelled + // on the master, which makes testing difficult. + if (nodes.getMasterNode() == null) { + listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); + } else { + transportService.sendRequest(nodes.getMasterNode(), actionName, request, + new ActionListenerResponseHandler<>(listener, Response::new)); + } + } + } + + /** + * Check to see if the PersistentTask's cluster state contains the job(s) we are interested in + */ + static boolean stateHasDataFrameJobs(Request request, ClusterState state) { + boolean hasJobs = false; + PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + + if (pTasksMeta != null) { + // If the request was for _all jobs, we need to look through the list of + // persistent tasks and see if at least once has a DataFrameJob param + if (request.getId().equals(MetaData.ALL)) { + hasJobs = pTasksMeta.tasks() + .stream() + .anyMatch(persistentTask -> persistentTask.getTaskName().equals(FeatureIndexBuilder.TASK_NAME)); + + } else if (pTasksMeta.getTask(request.getId()) != null) { + // If we're looking for a single job, we can just check directly + hasJobs = true; + } + } + return hasJobs; + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java index e3a1035b24fc5..e450b0870fffc 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java @@ -41,7 +41,7 @@ public class FeatureIndexBuilderJobConfig implements NamedWriteable, ToXContentO private final SourceConfig sourceConfig; private final AggregationConfig aggregationConfig; - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, (args, optionalId) -> { String id = args[0] != null ? (String) args[0] : optionalId; String indexPattern = (String) args[1]; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java index ab9e0ac12e42d..0edd5ceb0084c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java @@ -31,7 +31,7 @@ public class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTas public FeatureIndexBuilderJobPersistentTasksExecutor(Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { - super("xpack/feature_index_builder/job", FeatureIndexBuilder.TASK_THREAD_POOL_NAME); + super(FeatureIndexBuilder.TASK_NAME, FeatureIndexBuilder.TASK_THREAD_POOL_NAME); this.client = client; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java index 9db41eb5efb32..b2e8e813a83d0 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java @@ -57,6 +57,10 @@ public FeatureIndexBuilderJobConfig getConfig() { return job.getConfig(); } + public FeatureIndexBuilderJobState getState() { + return new FeatureIndexBuilderJobState(indexer.getState(), indexer.getPosition()); + } + public synchronized void start(ActionListener listener) { // TODO: safeguards missing, see rollup code indexer.start(); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java index d4f0b542d4896..8fc1375499edd 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java @@ -23,7 +23,7 @@ public class RestDeleteFeatureIndexBuilderJobAction extends BaseRestHandler { public RestDeleteFeatureIndexBuilderJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.DELETE, FeatureIndexBuilder.BASE_PATH + "job/{id}/", this); + controller.registerHandler(RestRequest.Method.DELETE, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID, this); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java new file mode 100644 index 0000000000000..712a10ae5fbe5 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction; + +public class RestGetDataFrameJobsAction extends BaseRestHandler { + public static final ParseField ID = new ParseField("id"); + + public RestGetDataFrameJobsAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID, this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String id = restRequest.param(ID.getPreferredName()); + GetDataFrameJobsAction.Request request = new GetDataFrameJobsAction.Request(id); + return channel -> client.execute(GetDataFrameJobsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "data_frame_get_job_action"; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java index afcda67b241d2..1b4bb4311bc40 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java @@ -24,7 +24,7 @@ public class RestPutFeatureIndexBuilderJobAction extends BaseRestHandler { public RestPutFeatureIndexBuilderJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.PUT, FeatureIndexBuilder.BASE_PATH + "job/{id}/", this); + controller.registerHandler(RestRequest.Method.PUT, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID, this); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java index 6d0e8e6f3c22e..206e2691670cc 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java @@ -22,7 +22,7 @@ public class RestStartFeatureIndexBuilderJobAction extends BaseRestHandler { public RestStartFeatureIndexBuilderJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH + "job/{id}/_start", this); + controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID + "_start", this); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java index f20ca65c766bf..24c5b6598d6de 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java @@ -21,7 +21,7 @@ public class RestStopFeatureIndexBuilderJobAction extends BaseRestHandler { public RestStopFeatureIndexBuilderJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH + "job/{id}/_stop", this); + controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID + "_stop", this); } @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsActionRequestTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsActionRequestTests.java new file mode 100644 index 0000000000000..63662c81b81b3 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsActionRequestTests.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Request; + +public class GetDataFrameJobsActionRequestTests extends AbstractStreamableTestCase { + + @Override + protected Request createTestInstance() { + if (randomBoolean()) { + return new Request(MetaData.ALL); + } + return new Request(randomAlphaOfLengthBetween(1, 20)); + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java new file mode 100644 index 0000000000000..cade3ae67563e --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.indexing.IndexerState; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class FeatureIndexBuilderJobStateTests extends AbstractSerializingTestCase { + + public static FeatureIndexBuilderJobState randomFeatureIndexBuilderJobState() { + return new FeatureIndexBuilderJobState(randomFrom(IndexerState.values()), randomPosition()); + } + + @Override + protected FeatureIndexBuilderJobState doParseInstance(XContentParser parser) throws IOException { + return FeatureIndexBuilderJobState.fromXContent(parser); + } + + @Override + protected FeatureIndexBuilderJobState createTestInstance() { + return randomFeatureIndexBuilderJobState(); + } + + @Override + protected Reader instanceReader() { + return FeatureIndexBuilderJobState::new; + } + + private static Map randomPosition() { + if (randomBoolean()) { + return null; + } + int numFields = randomIntBetween(1, 5); + Map position = new HashMap<>(); + for (int i = 0; i < numFields; i++) { + Object value; + if (randomBoolean()) { + value = randomLong(); + } else { + value = randomAlphaOfLengthBetween(1, 10); + } + position.put(randomAlphaOfLengthBetween(3, 10), value); + } + return position; + } +} From 7a78e1f1d03f463c1c2ca502f9646eff9ecdf576 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 28 Nov 2018 15:42:16 +0100 Subject: [PATCH 23/49] [ML-DataFrame] add a stats endpoint (#35911) add a stats endpoint for data frame jobs which returns stats and state of 1 or more jobs. --- .../FeatureIndexBuilder.java | 9 +- .../action/DataFrameJobStateAndStats.java | 103 +++++++++ .../action/GetDataFrameJobsStatsAction.java | 198 ++++++++++++++++++ .../TransportGetDataFrameJobsAction.java | 29 +-- .../TransportGetDataFrameJobsStatsAction.java | 104 +++++++++ .../job/AggregationResultUtils.java | 6 +- .../job/DataFrameIndexerJobStats.java | 88 ++++++++ .../job/FeatureIndexBuilderIndexer.java | 6 +- .../job/FeatureIndexBuilderJobStats.java | 67 ------ .../job/FeatureIndexBuilderJobTask.java | 4 + .../DataFramePersistentTaskUtils.java | 41 ++++ .../RestGetDataFrameJobsStatsAction.java | 37 ++++ .../DataFrameJobStateAndStatsTests.java | 37 ++++ ...tDataFrameJobsStatsActionRequestTests.java | 26 +++ .../job/AggregationResultUtilsTests.java | 45 ++-- .../job/DataFrameIndexerJobStatsTests.java | 34 +++ 16 files changed, 718 insertions(+), 116 deletions(-) create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStats.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsStatsAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStats.java delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataFramePersistentTaskUtils.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsStatsAction.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStatsTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsActionRequestTests.java create mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStatsTests.java diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java index a49d9be8e605c..eb6cc2ecb56be 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java @@ -38,11 +38,13 @@ import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportDeleteFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportGetDataFrameJobsAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportGetDataFrameJobsStatsAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStopFeatureIndexBuilderJobAction; @@ -51,6 +53,7 @@ import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobState; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestDeleteFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestGetDataFrameJobsAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestGetDataFrameJobsStatsAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestPutFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStartFeatureIndexBuilderJobAction; import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStopFeatureIndexBuilderJobAction; @@ -118,7 +121,8 @@ public List getRestHandlers(final Settings settings, final RestCont new RestStartFeatureIndexBuilderJobAction(settings, restController), new RestStopFeatureIndexBuilderJobAction(settings, restController), new RestDeleteFeatureIndexBuilderJobAction(settings, restController), - new RestGetDataFrameJobsAction(settings, restController) + new RestGetDataFrameJobsAction(settings, restController), + new RestGetDataFrameJobsStatsAction(settings, restController) ); } @@ -133,7 +137,8 @@ public List getRestHandlers(final Settings settings, final RestCont new ActionHandler<>(StartFeatureIndexBuilderJobAction.INSTANCE, TransportStartFeatureIndexBuilderJobAction.class), new ActionHandler<>(StopFeatureIndexBuilderJobAction.INSTANCE, TransportStopFeatureIndexBuilderJobAction.class), new ActionHandler<>(DeleteFeatureIndexBuilderJobAction.INSTANCE, TransportDeleteFeatureIndexBuilderJobAction.class), - new ActionHandler<>(GetDataFrameJobsAction.INSTANCE, TransportGetDataFrameJobsAction.class) + new ActionHandler<>(GetDataFrameJobsAction.INSTANCE, TransportGetDataFrameJobsAction.class), + new ActionHandler<>(GetDataFrameJobsStatsAction.INSTANCE, TransportGetDataFrameJobsStatsAction.class) ); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStats.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStats.java new file mode 100644 index 0000000000000..2a44aace47fd5 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStats.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobState; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +import java.io.IOException; +import java.util.Objects; + +public class DataFrameJobStateAndStats implements Writeable, ToXContentObject { + + public static final ParseField STATE_FIELD = new ParseField("state"); + public static final ParseField STATS_FIELD = new ParseField("stats"); + + private final String id; + private final FeatureIndexBuilderJobState jobState; + private final DataFrameIndexerJobStats jobStats; + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + GetDataFrameJobsAction.NAME, + a -> new DataFrameJobStateAndStats((String) a[0], (FeatureIndexBuilderJobState) a[1], (DataFrameIndexerJobStats) a[2])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), FeatureIndexBuilderJob.ID); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), FeatureIndexBuilderJobState.PARSER::apply, STATE_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerJobStats.fromXContent(p), STATS_FIELD); + } + + public DataFrameJobStateAndStats(String id, FeatureIndexBuilderJobState state, DataFrameIndexerJobStats stats) { + this.id = Objects.requireNonNull(id); + this.jobState = Objects.requireNonNull(state); + this.jobStats = Objects.requireNonNull(stats); + } + + public DataFrameJobStateAndStats(StreamInput in) throws IOException { + this.id = in.readString(); + this.jobState = new FeatureIndexBuilderJobState(in); + this.jobStats = new DataFrameIndexerJobStats(in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + builder.field(STATE_FIELD.getPreferredName(), jobState); + builder.field(STATS_FIELD.getPreferredName(), jobStats); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + jobState.writeTo(out); + jobStats.writeTo(out); + } + + @Override + public int hashCode() { + return Objects.hash(id, jobState, jobStats); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DataFrameJobStateAndStats that = (DataFrameJobStateAndStats) other; + + return Objects.equals(this.id, that.id) && Objects.equals(this.jobState, that.jobState) + && Objects.equals(this.jobStats, that.jobStats); + } + + public String getId() { + return id; + } + + public DataFrameIndexerJobStats getJobStats() { + return jobStats; + } + + public FeatureIndexBuilderJobState getJobState() { + return jobState; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsAction.java new file mode 100644 index 0000000000000..be2b70084676e --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsAction.java @@ -0,0 +1,198 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +public class GetDataFrameJobsStatsAction extends Action { + + public static final GetDataFrameJobsStatsAction INSTANCE = new GetDataFrameJobsStatsAction(); + public static final String NAME = "cluster:monitor/data_frame_stats/get"; + public static final ParseField COUNT = new ParseField("count"); + public static final ParseField JOBS = new ParseField("jobs"); + + public GetDataFrameJobsStatsAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends BaseTasksRequest implements ToXContent { + private String id; + + public Request(String id) { + if (Strings.isNullOrEmpty(id) || id.equals("*")) { + this.id = MetaData.ALL; + } else { + this.id = id; + } + } + + public Request() {} + + @Override + public boolean match(Task task) { + // If we are retrieving all the jobs, the task description does not contain the id + if (id.equals(MetaData.ALL)) { + return task.getDescription().startsWith(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX); + } + // Otherwise find the task by ID + return task.getDescription().equals(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); + } + + public String getId() { + return id; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(id, other.id); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, GetDataFrameJobsStatsAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { + private List jobsStateAndStats; + + public Response(List jobsStateAndStats) { + super(Collections.emptyList(), Collections.emptyList()); + this.jobsStateAndStats = jobsStateAndStats; + } + + public Response(List jobsStateAndStats, List taskFailures, + List nodeFailures) { + super(taskFailures, nodeFailures); + this.jobsStateAndStats = jobsStateAndStats; + } + + public Response() { + super(Collections.emptyList(), Collections.emptyList()); + this.jobsStateAndStats = Collections.emptyList(); + } + + public Response(StreamInput in) throws IOException { + super(Collections.emptyList(), Collections.emptyList()); + readFrom(in); + } + + public List getJobsStateAndStats() { + return jobsStateAndStats; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + jobsStateAndStats = in.readList(DataFrameJobStateAndStats::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(jobsStateAndStats); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(COUNT.getPreferredName(), jobsStateAndStats.size()); + builder.field(JOBS.getPreferredName(), jobsStateAndStats); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobsStateAndStats); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final Response that = (Response) other; + return Objects.equals(this.jobsStateAndStats, that.jobsStateAndStats); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java index 941375dc31331..46b44c70c5fb8 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java @@ -19,15 +19,14 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.discovery.MasterNotDiscoveredException; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Request; import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Response; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; +import org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataFramePersistentTaskUtils; import java.io.IOException; import java.util.Collection; @@ -80,7 +79,7 @@ protected void doExecute(Task task, Request request, ActionListener li final DiscoveryNodes nodes = state.nodes(); if (nodes.isLocalNodeElectedMaster()) { - if (stateHasDataFrameJobs(request, state)) { + if (DataFramePersistentTaskUtils.stateHasDataFrameJobs(request.getId(), state)) { super.doExecute(task, request, listener); } else { // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this GET @@ -100,28 +99,4 @@ protected void doExecute(Task task, Request request, ActionListener li } } } - - /** - * Check to see if the PersistentTask's cluster state contains the job(s) we are interested in - */ - static boolean stateHasDataFrameJobs(Request request, ClusterState state) { - boolean hasJobs = false; - PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - - if (pTasksMeta != null) { - // If the request was for _all jobs, we need to look through the list of - // persistent tasks and see if at least once has a DataFrameJob param - if (request.getId().equals(MetaData.ALL)) { - hasJobs = pTasksMeta.tasks() - .stream() - .anyMatch(persistentTask -> persistentTask.getTaskName().equals(FeatureIndexBuilder.TASK_NAME)); - - } else if (pTasksMeta.getTask(request.getId()) != null) { - // If we're looking for a single job, we can just check directly - hasJobs = true; - } - } - return hasJobs; - } - } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsStatsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsStatsAction.java new file mode 100644 index 0000000000000..efb90a601a596 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsStatsAction.java @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.tasks.TransportTasksAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction.Request; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction.Response; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; +import org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataFramePersistentTaskUtils; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +public class TransportGetDataFrameJobsStatsAction extends + TransportTasksAction { + + @Inject + public TransportGetDataFrameJobsStatsAction(TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService) { + super(GetDataFrameJobsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, + ThreadPool.Names.SAME); + } + + @Override + protected Response newResponse(Request request, List tasks, List taskOperationFailures, + List failedNodeExceptions) { + List responses = tasks.stream().map(GetDataFrameJobsStatsAction.Response::getJobsStateAndStats) + .flatMap(Collection::stream).collect(Collectors.toList()); + return new Response(responses, taskOperationFailures, failedNodeExceptions); + } + + @Override + protected Response readTaskResponse(StreamInput in) throws IOException { + return new Response(in); + } + + @Override + protected void taskOperation(Request request, FeatureIndexBuilderJobTask task, ActionListener listener) { + List jobsStateAndStats = Collections.emptyList(); + + assert task.getConfig().getId().equals(request.getId()) || request.getId().equals(MetaData.ALL); + + // Little extra insurance, make sure we only return jobs that aren't cancelled + if (task.isCancelled() == false) { + DataFrameJobStateAndStats jobStateAndStats = new DataFrameJobStateAndStats(task.getConfig().getId(), task.getState(), + task.getStats()); + jobsStateAndStats = Collections.singletonList(jobStateAndStats); + } + + listener.onResponse(new Response(jobsStateAndStats)); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + final DiscoveryNodes nodes = state.nodes(); + + if (nodes.isLocalNodeElectedMaster()) { + if (DataFramePersistentTaskUtils.stateHasDataFrameJobs(request.getId(), state)) { + super.doExecute(task, request, listener); + } else { + // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this GET + // and we can just send an empty response, no need to go looking for the allocated task + listener.onResponse(new Response(Collections.emptyList())); + } + + } else { + // Delegates GetJobs to elected master node, so it becomes the coordinating node. + // Non-master nodes may have a stale cluster state that shows jobs which are cancelled + // on the master, which makes testing difficult. + if (nodes.getMasterNode() == null) { + listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); + } else { + transportService.sendRequest(nodes.getMasterNode(), actionName, request, + new ActionListenerResponseHandler<>(listener, Response::new)); + } + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java index ddd4c12d54798..63b06cb369ba1 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java @@ -30,11 +30,15 @@ final class AggregationResultUtils { * @param agg The aggregation result * @param sources The original sources used for querying * @param aggregationBuilders the aggregation used for querying + * @param dataFrameIndexerJobStats stats collector * @return a map containing the results of the aggregation in a consumable way */ public static Stream> extractCompositeAggregationResults(CompositeAggregation agg, - List> sources, Collection aggregationBuilders) { + List> sources, Collection aggregationBuilders, + DataFrameIndexerJobStats dataFrameIndexerJobStats) { return agg.getBuckets().stream().map(bucket -> { + dataFrameIndexerJobStats.incrementNumDocuments(bucket.getDocCount()); + Map document = new HashMap<>(); for (CompositeValuesSourceBuilder source : sources) { String destinationFieldName = source.name(); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStats.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStats.java new file mode 100644 index 0000000000000..7cf5f430db25d --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStats.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.indexing.IndexerJobStats; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class DataFrameIndexerJobStats extends IndexerJobStats { + private static ParseField NUM_PAGES = new ParseField("pages_processed"); + private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); + private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); + private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); + private static ParseField INDEX_TIME_IN_MS = new ParseField("index_time_in_ms"); + private static ParseField SEARCH_TIME_IN_MS = new ParseField("search_time_in_ms"); + private static ParseField INDEX_TOTAL = new ParseField("index_total"); + private static ParseField SEARCH_TOTAL = new ParseField("search_total"); + private static ParseField SEARCH_FAILURES = new ParseField("search_failures"); + private static ParseField INDEX_FAILURES = new ParseField("index_failures"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME.getPreferredName(), + args -> new DataFrameIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], (long) args[4], + (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9])); + + static { + PARSER.declareLong(constructorArg(), NUM_PAGES); + PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); + PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); + PARSER.declareLong(constructorArg(), INDEX_TIME_IN_MS); + PARSER.declareLong(constructorArg(), SEARCH_TIME_IN_MS); + PARSER.declareLong(constructorArg(), INDEX_TOTAL); + PARSER.declareLong(constructorArg(), SEARCH_TOTAL); + PARSER.declareLong(constructorArg(), INDEX_FAILURES); + PARSER.declareLong(constructorArg(), SEARCH_FAILURES); + } + + public DataFrameIndexerJobStats() { + super(); + } + + public DataFrameIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, long indexTime, + long searchTime, long indexTotal, long searchTotal, long indexFailures, long searchFailures) { + super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, indexFailures, + searchFailures); + } + + public DataFrameIndexerJobStats(StreamInput in) throws IOException { + super(in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NUM_PAGES.getPreferredName(), numPages); + builder.field(NUM_INPUT_DOCUMENTS.getPreferredName(), numInputDocuments); + builder.field(NUM_OUTPUT_DOCUMENTS.getPreferredName(), numOuputDocuments); + builder.field(NUM_INVOCATIONS.getPreferredName(), numInvocations); + builder.field(INDEX_TIME_IN_MS.getPreferredName(), indexTime); + builder.field(INDEX_TOTAL.getPreferredName(), indexTotal); + builder.field(INDEX_FAILURES.getPreferredName(), indexFailures); + builder.field(SEARCH_TIME_IN_MS.getPreferredName(), searchTime); + builder.field(SEARCH_TOTAL.getPreferredName(), searchTotal); + builder.field(SEARCH_FAILURES.getPreferredName(), searchFailures); + builder.endObject(); + return builder; + } + + public static DataFrameIndexerJobStats fromXContent(XContentParser parser) { + try { + return PARSER.parse(parser, null); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java index c1d6e987a4691..e3d3de8c432d0 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java @@ -36,7 +36,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataframeIndex.DOC_TYPE; -public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, FeatureIndexBuilderJobStats> { +public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, DataFrameIndexerJobStats> { private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; private static final Logger logger = LogManager.getLogger(FeatureIndexBuilderIndexer.class); @@ -44,7 +44,7 @@ public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer initialState, Map initialPosition) { - super(executor, initialState, initialPosition, new FeatureIndexBuilderJobStats()); + super(executor, initialState, initialPosition, new DataFrameIndexerJobStats()); this.job = job; } @@ -78,7 +78,7 @@ private Stream processBucketsToIndexRequests(CompositeAggregation List> sources = job.getConfig().getSourceConfig().getSources(); Collection aggregationBuilders = job.getConfig().getAggregationConfig().getAggregatorFactories(); - return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders).map(document -> { + return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders, getStats()).map(document -> { XContentBuilder builder; try { builder = jsonBuilder(); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java deleted file mode 100644 index a7c9392800f09..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStats.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.ml.featureindexbuilder.job; - -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.indexing.IndexerJobStats; - -import java.io.IOException; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; - -public class FeatureIndexBuilderJobStats extends IndexerJobStats { - private static ParseField NUM_PAGES = new ParseField("pages_processed"); - private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); - private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); - private static ParseField NUM_INVOCATIONS = new ParseField("trigger_count"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - args -> new FeatureIndexBuilderJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3])); - - static { - PARSER.declareLong(constructorArg(), NUM_PAGES); - PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS); - PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS); - PARSER.declareLong(constructorArg(), NUM_INVOCATIONS); - } - - public FeatureIndexBuilderJobStats() { - super(); - } - - public FeatureIndexBuilderJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) { - super(numPages, numInputDocuments, numOuputDocuments, numInvocations); - } - - public FeatureIndexBuilderJobStats(StreamInput in) throws IOException { - super(in); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NUM_PAGES.getPreferredName(), numPages); - builder.field(NUM_INPUT_DOCUMENTS.getPreferredName(), numInputDocuments); - builder.field(NUM_OUTPUT_DOCUMENTS.getPreferredName(), numOuputDocuments); - builder.field(NUM_INVOCATIONS.getPreferredName(), numInvocations); - builder.endObject(); - return builder; - } - - public static FeatureIndexBuilderJobStats fromXContent(XContentParser parser) { - try { - return PARSER.parse(parser, null); - } catch (IOException e) { - throw new RuntimeException(e); - } - } -} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java index b2e8e813a83d0..659cec9fbb8ae 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java @@ -61,6 +61,10 @@ public FeatureIndexBuilderJobState getState() { return new FeatureIndexBuilderJobState(indexer.getState(), indexer.getPosition()); } + public DataFrameIndexerJobStats getStats() { + return indexer.getStats(); + } + public synchronized void start(ActionListener listener) { // TODO: safeguards missing, see rollup code indexer.start(); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataFramePersistentTaskUtils.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataFramePersistentTaskUtils.java new file mode 100644 index 0000000000000..b3b70008f0c04 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataFramePersistentTaskUtils.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.persistence; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; + +public final class DataFramePersistentTaskUtils { + + private DataFramePersistentTaskUtils() { + } + + /** + * Check to see if the PersistentTask's cluster state contains the job(s) we + * are interested in + */ + public static boolean stateHasDataFrameJobs(String id, ClusterState state) { + boolean hasJobs = false; + PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + + if (pTasksMeta != null) { + // If the request was for _all jobs, we need to look through the list of + // persistent tasks and see if at least once has a DataFrameJob param + if (id.equals(MetaData.ALL)) { + hasJobs = pTasksMeta.tasks().stream() + .anyMatch(persistentTask -> persistentTask.getTaskName().equals(FeatureIndexBuilder.TASK_NAME)); + + } else if (pTasksMeta.getTask(id) != null) { + // If we're looking for a single job, we can just check directly + hasJobs = true; + } + } + return hasJobs; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsStatsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsStatsAction.java new file mode 100644 index 0000000000000..ca01ca43caf2a --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsStatsAction.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; + +public class RestGetDataFrameJobsStatsAction extends BaseRestHandler { + + public RestGetDataFrameJobsStatsAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID + "_stats", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); + GetDataFrameJobsStatsAction.Request request = new GetDataFrameJobsStatsAction.Request(id); + return channel -> client.execute(GetDataFrameJobsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "data_frame_get_jobs_stats_action"; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStatsTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStatsTests.java new file mode 100644 index 0000000000000..d23d23f97c91a --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStatsTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.AbstractSerializingFeatureIndexBuilderTestCase; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.DataFrameIndexerJobStatsTests; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobStateTests; + +import java.io.IOException; + +public class DataFrameJobStateAndStatsTests + extends AbstractSerializingFeatureIndexBuilderTestCase { + + @Override + protected DataFrameJobStateAndStats doParseInstance(XContentParser parser) throws IOException { + return DataFrameJobStateAndStats.PARSER.apply(parser, null); + } + + @Override + protected DataFrameJobStateAndStats createTestInstance() { + return new DataFrameJobStateAndStats(randomAlphaOfLengthBetween(1,10), + FeatureIndexBuilderJobStateTests.randomFeatureIndexBuilderJobState(), + DataFrameIndexerJobStatsTests.randomStats()); + } + + @Override + protected Reader instanceReader() { + return DataFrameJobStateAndStats::new; + } + +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsActionRequestTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsActionRequestTests.java new file mode 100644 index 0000000000000..2d311cd9f6c13 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsActionRequestTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.action; + +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction.Request; + +public class GetDataFrameJobsStatsActionRequestTests extends AbstractStreamableTestCase { + @Override + protected Request createTestInstance() { + if (randomBoolean()) { + return new Request(MetaData.ALL); + } + return new Request(randomAlphaOfLengthBetween(1, 20)); + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java index 9e3ef63fd7e8e..4a62c56623ad3 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java @@ -61,6 +61,7 @@ public class AggregationResultUtilsTests extends ESTestCase { private final NamedXContentRegistry namedXContentRegistry = new NamedXContentRegistry(namedXContents); private final String KEY = Aggregation.CommonFields.KEY.getPreferredName(); + private final String DOC_COUNT = Aggregation.CommonFields.DOC_COUNT.getPreferredName(); // aggregations potentially useful for writing tests, to be expanded as necessary private static final List namedXContents; @@ -107,17 +108,20 @@ public void testExtractCompositeAggregationResults() throws IOException { KEY, asMap( targetField, "ID1"), aggTypedName, asMap( - "value", 42.33)), + "value", 42.33), + DOC_COUNT, 8), asMap( KEY, asMap( targetField, "ID2"), aggTypedName, asMap( - "value", 28.99)), + "value", 28.99), + DOC_COUNT, 3), asMap( KEY, asMap( targetField, "ID3"), aggTypedName, asMap( - "value", 12.55)) + "value", 12.55), + DOC_COUNT, 9) )); List> expected = asList( @@ -135,7 +139,7 @@ aggTypedName, asMap( ) ); - executeTest(sources, aggregationBuilders, input, expected); + executeTest(sources, aggregationBuilders, input, expected, 20); } public void testExtractCompositeAggregationResultsMultiSources() throws IOException { @@ -160,28 +164,32 @@ KEY, asMap( targetField2, "ID1_2" ), aggTypedName, asMap( - "value", 42.33)), + "value", 42.33), + DOC_COUNT, 1), asMap( KEY, asMap( targetField, "ID1", targetField2, "ID2_2" ), aggTypedName, asMap( - "value", 8.4)), + "value", 8.4), + DOC_COUNT, 2), asMap( KEY, asMap( targetField, "ID2", targetField2, "ID1_2" ), aggTypedName, asMap( - "value", 28.99)), + "value", 28.99), + DOC_COUNT, 3), asMap( KEY, asMap( targetField, "ID3", targetField2, "ID2_2" ), aggTypedName, asMap( - "value", 12.55)) + "value", 12.55), + DOC_COUNT, 4) )); List> expected = asList( @@ -206,7 +214,7 @@ aggTypedName, asMap( aggName, 12.55 ) ); - executeTest(sources, aggregationBuilders, input, expected); + executeTest(sources, aggregationBuilders, input, expected, 10); } public void testExtractCompositeAggregationResultsMultiAggregations() throws IOException { @@ -232,21 +240,24 @@ KEY, asMap( aggTypedName, asMap( "value", 42.33), aggTypedName2, asMap( - "value", 9.9)), + "value", 9.9), + DOC_COUNT, 111), asMap( KEY, asMap( targetField, "ID2"), aggTypedName, asMap( "value", 28.99), aggTypedName2, asMap( - "value", 222.33)), + "value", 222.33), + DOC_COUNT, 88), asMap( KEY, asMap( targetField, "ID3"), aggTypedName, asMap( "value", 12.55), aggTypedName2, asMap( - "value", -2.44)) + "value", -2.44), + DOC_COUNT, 1) )); List> expected = asList( @@ -266,20 +277,22 @@ aggTypedName2, asMap( aggName2, -2.44 ) ); - executeTest(sources, aggregationBuilders, input, expected); + executeTest(sources, aggregationBuilders, input, expected, 200); } private void executeTest(List> sources, Collection aggregationBuilders, - Map input, List> expected) throws IOException { + Map input, List> expected, long expectedDocCounts) throws IOException { + DataFrameIndexerJobStats stats = new DataFrameIndexerJobStats(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.map(input); try (XContentParser parser = createParser(builder)) { CompositeAggregation agg = ParsedComposite.fromXContent(parser, "my_feature"); - List> result = AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders) - .collect(Collectors.toList()); + List> result = AggregationResultUtils + .extractCompositeAggregationResults(agg, sources, aggregationBuilders, stats).collect(Collectors.toList()); assertEquals(expected, result); + assertEquals(expectedDocCounts, stats.getNumDocuments()); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStatsTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStatsTests.java new file mode 100644 index 0000000000000..a71368d3ef031 --- /dev/null +++ b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStatsTests.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ml.featureindexbuilder.job; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +public class DataFrameIndexerJobStatsTests extends AbstractSerializingTestCase{ + @Override + protected DataFrameIndexerJobStats createTestInstance() { + return randomStats(); + } + + @Override + protected Writeable.Reader instanceReader() { + return DataFrameIndexerJobStats::new; + } + + @Override + protected DataFrameIndexerJobStats doParseInstance(XContentParser parser) { + return DataFrameIndexerJobStats.fromXContent(parser); + } + + public static DataFrameIndexerJobStats randomStats() { + return new DataFrameIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + randomNonNegativeLong(), randomNonNegativeLong()); + } +} From c1bcd3621112f1f8250543e5a53831d663dc826c Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 28 Nov 2018 15:42:52 +0100 Subject: [PATCH 24/49] dedup ID fields (#35995) removes some ParseField duplication --- .../action/RestDeleteFeatureIndexBuilderJobAction.java | 5 ++--- .../rest/action/RestGetDataFrameJobsAction.java | 7 +++---- .../rest/action/RestPutFeatureIndexBuilderJobAction.java | 6 ++---- .../rest/action/RestStartFeatureIndexBuilderJobAction.java | 1 - 4 files changed, 7 insertions(+), 12 deletions(-) diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java index 8fc1375499edd..1e504987f51a6 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java @@ -7,7 +7,6 @@ import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -15,11 +14,11 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; import java.io.IOException; public class RestDeleteFeatureIndexBuilderJobAction extends BaseRestHandler { - public static final ParseField ID = new ParseField("id"); public RestDeleteFeatureIndexBuilderJobAction(Settings settings, RestController controller) { super(settings); @@ -28,7 +27,7 @@ public RestDeleteFeatureIndexBuilderJobAction(Settings settings, RestController @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(ID.getPreferredName()); + String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); DeleteFeatureIndexBuilderJobAction.Request request = new DeleteFeatureIndexBuilderJobAction.Request(id); return channel -> client.execute(DeleteFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java index 712a10ae5fbe5..2d18c63b0b0e6 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; @@ -15,9 +14,9 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; public class RestGetDataFrameJobsAction extends BaseRestHandler { - public static final ParseField ID = new ParseField("id"); public RestGetDataFrameJobsAction(Settings settings, RestController controller) { super(settings); @@ -26,13 +25,13 @@ public RestGetDataFrameJobsAction(Settings settings, RestController controller) @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - String id = restRequest.param(ID.getPreferredName()); + String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); GetDataFrameJobsAction.Request request = new GetDataFrameJobsAction.Request(id); return channel -> client.execute(GetDataFrameJobsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override public String getName() { - return "data_frame_get_job_action"; + return "data_frame_get_jobs_action"; } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java index 1b4bb4311bc40..45d05452c819d 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.rest.BaseRestHandler; @@ -16,11 +15,11 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; import java.io.IOException; public class RestPutFeatureIndexBuilderJobAction extends BaseRestHandler { - public static final ParseField ID = new ParseField("id"); public RestPutFeatureIndexBuilderJobAction(Settings settings, RestController controller) { super(settings); @@ -34,12 +33,11 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(ID.getPreferredName()); + String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); XContentParser parser = restRequest.contentParser(); PutFeatureIndexBuilderJobAction.Request request = PutFeatureIndexBuilderJobAction.Request.fromXContent(parser, id); return channel -> client.execute(PutFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); } - } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java index 206e2691670cc..2a43d4fd04630 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java @@ -37,5 +37,4 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient public String getName() { return "ml_feature_index_builder_start_job_action"; } - } From 12318a5d999c9d677233e42967d0a064715b7efb Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 29 Nov 2018 15:19:45 +0100 Subject: [PATCH 25/49] [ML-DataFrame] Rename plugin and endpoints (#36048) Renames the plugin to data-frame, directory to x-pack/plugin/data-frame, the API prefix to _data_frame and the package to org.elasticsearch.xpack.dataframe --- .../build.gradle | 6 +- .../xpack/dataframe/DataFrame.java} | 86 +++++++++---------- .../xpack/dataframe}/DataFrameFeatureSet.java | 2 +- .../action/DataFrameJobStateAndStats.java | 24 +++--- .../action/DeleteDataFrameJobAction.java} | 18 ++-- .../action/GetDataFrameJobsAction.java | 26 +++--- .../action/GetDataFrameJobsStatsAction.java | 10 +-- .../action/PutDataFrameJobAction.java} | 26 +++--- .../action/StartDataFrameJobAction.java} | 18 ++-- .../action/StopDataFrameJobAction.java} | 22 ++--- .../TransportDeleteDataFrameJobAction.java} | 34 ++++---- .../TransportGetDataFrameJobsAction.java | 20 ++--- .../TransportGetDataFrameJobsStatsAction.java | 14 +-- .../TransportPutDataFrameJobAction.java} | 42 ++++----- .../TransportStartDataFrameJobAction.java} | 52 +++++------ .../TransportStopDataFrameJobAction.java} | 40 ++++----- .../dataframe}/job/AggregationConfig.java | 2 +- .../job/AggregationResultUtils.java | 2 +- .../dataframe/job/DataFrameIndexer.java} | 12 +-- .../job/DataFrameIndexerJobStats.java | 2 +- .../xpack/dataframe/job/DataFrameJob.java} | 29 ++++--- .../dataframe/job/DataFrameJobConfig.java} | 20 ++--- .../DataFrameJobPersistentTasksExecutor.java} | 26 +++--- .../dataframe/job/DataFrameJobState.java} | 18 ++-- .../dataframe/job/DataFrameJobTask.java} | 52 +++++------ .../xpack/dataframe}/job/SourceConfig.java | 4 +- .../DataFramePersistentTaskUtils.java | 6 +- .../persistence/DataframeIndex.java | 6 +- .../action/RestDeleteDataFrameJobAction.java | 40 +++++++++ .../action/RestGetDataFrameJobsAction.java | 12 +-- .../RestGetDataFrameJobsStatsAction.java | 12 +-- .../action/RestPutDataFrameJobAction.java | 43 ++++++++++ .../action/RestStartDataFrameJobAction.java} | 18 ++-- .../action/RestStopDataFrameJobAction.java | 39 +++++++++ .../dataframe}/support/Aggregations.java | 2 +- .../dataframe}/support/JobValidator.java | 8 +- .../DataFrameJobStateAndStatsTests.java | 12 +-- ...DeleteDataFrameJobActionRequestTests.java} | 6 +- .../GetDataFrameJobsActionRequestTests.java | 4 +- ...tDataFrameJobsStatsActionRequestTests.java | 4 +- .../PutDataFrameJobActionRequestTests.java} | 12 +-- .../action/StartDataFrameJobActionTests.java} | 4 +- .../StopDataFrameJobActionRequestTests.java} | 7 +- ...AbstractSerializingDataFrameTestCase.java} | 4 +- .../job/AggregationConfigTests.java | 5 +- .../job/AggregationResultUtilsTests.java | 2 +- .../job/DataFrameIndexerJobStatsTests.java | 2 +- .../job/DataFrameJobConfigTests.java | 48 +++++++++++ .../job/DataFrameJobStateTests.java} | 20 ++--- .../dataframe}/job/SourceConfigTests.java | 4 +- .../dataframe}/support/AggregationsTests.java | 2 +- .../dataframe}/support/JobValidatorTests.java | 24 +++--- ...estDeleteFeatureIndexBuilderJobAction.java | 40 --------- .../RestPutFeatureIndexBuilderJobAction.java | 43 ---------- .../RestStopFeatureIndexBuilderJobAction.java | 39 --------- .../FeatureIndexBuilderJobConfigTests.java | 48 ----------- 56 files changed, 563 insertions(+), 560 deletions(-) rename x-pack/plugin/{ml-feature-index-builder => data-frame}/build.gradle (78%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java} (58%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/DataFrameFeatureSet.java (97%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/action/DataFrameJobStateAndStats.java (76%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java} (79%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/action/GetDataFrameJobsAction.java (84%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/action/GetDataFrameJobsStatsAction.java (93%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobAction.java} (76%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java} (85%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java} (84%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java} (72%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/action/TransportGetDataFrameJobsAction.java (81%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/action/TransportGetDataFrameJobsStatsAction.java (87%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java} (63%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java} (54%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java} (50%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/job/AggregationConfig.java (97%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/job/AggregationResultUtils.java (98%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java} (89%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/job/DataFrameIndexerJobStats.java (98%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java} (70%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java} (86%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java} (59%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java} (83%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java} (61%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/job/SourceConfig.java (96%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/persistence/DataFramePersistentTaskUtils.java (87%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/persistence/DataframeIndex.java (90%) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/rest/action/RestGetDataFrameJobsAction.java (69%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/rest/action/RestGetDataFrameJobsStatsAction.java (69%) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java => data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java} (52%) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/support/Aggregations.java (96%) rename x-pack/plugin/{ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/main/java/org/elasticsearch/xpack/dataframe}/support/JobValidator.java (97%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/action/DataFrameJobStateAndStatsTests.java (65%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java => data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java} (65%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/action/GetDataFrameJobsActionRequestTests.java (83%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/action/GetDataFrameJobsStatsActionRequestTests.java (82%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java => data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobActionRequestTests.java} (78%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java => data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java} (79%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java => data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java} (75%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java => data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AbstractSerializingDataFrameTestCase.java} (90%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/job/AggregationConfigTests.java (93%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/job/AggregationResultUtilsTests.java (99%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/job/DataFrameIndexerJobStatsTests.java (95%) create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java => data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java} (61%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/job/SourceConfigTests.java (92%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/support/AggregationsTests.java (92%) rename x-pack/plugin/{ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder => data-frame/src/test/java/org/elasticsearch/xpack/dataframe}/support/JobValidatorTests.java (86%) delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java delete mode 100644 x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java diff --git a/x-pack/plugin/ml-feature-index-builder/build.gradle b/x-pack/plugin/data-frame/build.gradle similarity index 78% rename from x-pack/plugin/ml-feature-index-builder/build.gradle rename to x-pack/plugin/data-frame/build.gradle index b4706e29487c5..bd047f68e2be9 100644 --- a/x-pack/plugin/ml-feature-index-builder/build.gradle +++ b/x-pack/plugin/data-frame/build.gradle @@ -4,9 +4,9 @@ evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin' esplugin { - name 'ml-feature-index-builder' - description 'A plugin to build feature indexes' - classname 'org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder' + name 'data-frame' + description 'A plugin to build data frames' + classname 'org.elasticsearch.xpack.dataframe.DataFrame' extendedPlugins = ['x-pack-core'] } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java similarity index 58% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index eb6cc2ecb56be..73dfe62960707 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/FeatureIndexBuilder.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder; +package org.elasticsearch.xpack.dataframe; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -36,27 +36,27 @@ import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportDeleteFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportGetDataFrameJobsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportGetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportPutFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStartFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.TransportStopFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobPersistentTasksExecutor; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobState; -import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestDeleteFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestGetDataFrameJobsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestGetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestPutFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStartFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.rest.action.RestStopFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.TransportDeleteDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.TransportGetDataFrameJobsAction; +import org.elasticsearch.xpack.dataframe.action.TransportGetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.dataframe.action.TransportPutDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.TransportStartDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.TransportStopDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobPersistentTasksExecutor; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobState; +import org.elasticsearch.xpack.dataframe.rest.action.RestDeleteDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameJobsAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestPutDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestStartDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestStopDataFrameJobAction; import java.time.Clock; import java.util.ArrayList; @@ -70,13 +70,13 @@ import static java.util.Collections.emptyList; -public class FeatureIndexBuilder extends Plugin implements ActionPlugin, PersistentTaskPlugin { +public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlugin { - public static final String NAME = "feature_index_builder"; - public static final String TASK_NAME = "xpack/feature_index_builder/job"; - public static final String BASE_PATH = "/_xpack/feature_index_builder/"; + public static final String NAME = "data_frame"; + public static final String TASK_NAME = "data_frame/jobs"; + public static final String BASE_PATH = "/_data_frame/"; public static final String BASE_PATH_JOBS_BY_ID = BASE_PATH + "jobs/{id}/"; - public static final String TASK_THREAD_POOL_NAME = "ml_feature_index_builder_indexing"; + public static final String TASK_THREAD_POOL_NAME = "data_frame_indexing"; // list of headers that will be stored when a job is created public static final Set HEADER_FILTERS = new HashSet<>( @@ -86,7 +86,7 @@ public class FeatureIndexBuilder extends Plugin implements ActionPlugin, Persist private final Settings settings; private final boolean transportClientMode; - public FeatureIndexBuilder(Settings settings) { + public DataFrame(Settings settings) { this.settings = settings; this.enabled = XPackSettings.DATA_FRAME_ENABLED.get(settings); @@ -117,10 +117,10 @@ public List getRestHandlers(final Settings settings, final RestCont } return Arrays.asList( - new RestPutFeatureIndexBuilderJobAction(settings, restController), - new RestStartFeatureIndexBuilderJobAction(settings, restController), - new RestStopFeatureIndexBuilderJobAction(settings, restController), - new RestDeleteFeatureIndexBuilderJobAction(settings, restController), + new RestPutDataFrameJobAction(settings, restController), + new RestStartDataFrameJobAction(settings, restController), + new RestStopDataFrameJobAction(settings, restController), + new RestDeleteDataFrameJobAction(settings, restController), new RestGetDataFrameJobsAction(settings, restController), new RestGetDataFrameJobsStatsAction(settings, restController) ); @@ -133,10 +133,10 @@ public List getRestHandlers(final Settings settings, final RestCont } return Arrays.asList( - new ActionHandler<>(PutFeatureIndexBuilderJobAction.INSTANCE, TransportPutFeatureIndexBuilderJobAction.class), - new ActionHandler<>(StartFeatureIndexBuilderJobAction.INSTANCE, TransportStartFeatureIndexBuilderJobAction.class), - new ActionHandler<>(StopFeatureIndexBuilderJobAction.INSTANCE, TransportStopFeatureIndexBuilderJobAction.class), - new ActionHandler<>(DeleteFeatureIndexBuilderJobAction.INSTANCE, TransportDeleteFeatureIndexBuilderJobAction.class), + new ActionHandler<>(PutDataFrameJobAction.INSTANCE, TransportPutDataFrameJobAction.class), + new ActionHandler<>(StartDataFrameJobAction.INSTANCE, TransportStartDataFrameJobAction.class), + new ActionHandler<>(StopDataFrameJobAction.INSTANCE, TransportStopDataFrameJobAction.class), + new ActionHandler<>(DeleteDataFrameJobAction.INSTANCE, TransportDeleteDataFrameJobAction.class), new ActionHandler<>(GetDataFrameJobsAction.INSTANCE, TransportGetDataFrameJobsAction.class), new ActionHandler<>(GetDataFrameJobsStatsAction.INSTANCE, TransportGetDataFrameJobsStatsAction.class) ); @@ -149,7 +149,7 @@ public List> getExecutorBuilders(Settings settings) { } FixedExecutorBuilder indexing = new FixedExecutorBuilder(settings, TASK_THREAD_POOL_NAME, 4, 4, - "xpack.feature_index_builder.task_thread_pool"); + "data_frame.task_thread_pool"); return Collections.singletonList(indexing); } @@ -162,7 +162,7 @@ public List> getPersistentTasksExecutor(ClusterServic } SchedulerEngine schedulerEngine = new SchedulerEngine(settings, Clock.systemUTC()); - return Collections.singletonList(new FeatureIndexBuilderJobPersistentTasksExecutor(client, + return Collections.singletonList(new DataFrameJobPersistentTasksExecutor(client, schedulerEngine, threadPool)); } @@ -173,11 +173,11 @@ public List getNamedXContent() { } return Arrays.asList( new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(TASK_NAME), - FeatureIndexBuilderJob::fromXContent), - new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(FeatureIndexBuilderJobState.NAME), - FeatureIndexBuilderJobState::fromXContent), - new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(FeatureIndexBuilderJobState.NAME), - FeatureIndexBuilderJobState::fromXContent) + DataFrameJob::fromXContent), + new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(DataFrameJobState.NAME), + DataFrameJobState::fromXContent), + new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(DataFrameJobState.NAME), + DataFrameJobState::fromXContent) ); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/DataFrameFeatureSet.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java similarity index 97% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/DataFrameFeatureSet.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java index b28d5908dd38c..7150a0a8eb836 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/DataFrameFeatureSet.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder; +package org.elasticsearch.xpack.dataframe; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Nullable; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStats.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java similarity index 76% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStats.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java index 2a44aace47fd5..442433cc56d4d 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStats.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -13,9 +13,9 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobState; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.DataFrameIndexerJobStats; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobState; import java.io.IOException; import java.util.Objects; @@ -26,20 +26,20 @@ public class DataFrameJobStateAndStats implements Writeable, ToXContentObject { public static final ParseField STATS_FIELD = new ParseField("stats"); private final String id; - private final FeatureIndexBuilderJobState jobState; + private final DataFrameJobState jobState; private final DataFrameIndexerJobStats jobStats; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( GetDataFrameJobsAction.NAME, - a -> new DataFrameJobStateAndStats((String) a[0], (FeatureIndexBuilderJobState) a[1], (DataFrameIndexerJobStats) a[2])); + a -> new DataFrameJobStateAndStats((String) a[0], (DataFrameJobState) a[1], (DataFrameIndexerJobStats) a[2])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FeatureIndexBuilderJob.ID); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), FeatureIndexBuilderJobState.PARSER::apply, STATE_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameJob.ID); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataFrameJobState.PARSER::apply, STATE_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerJobStats.fromXContent(p), STATS_FIELD); } - public DataFrameJobStateAndStats(String id, FeatureIndexBuilderJobState state, DataFrameIndexerJobStats stats) { + public DataFrameJobStateAndStats(String id, DataFrameJobState state, DataFrameIndexerJobStats stats) { this.id = Objects.requireNonNull(id); this.jobState = Objects.requireNonNull(state); this.jobStats = Objects.requireNonNull(stats); @@ -47,14 +47,14 @@ public DataFrameJobStateAndStats(String id, FeatureIndexBuilderJobState state, D public DataFrameJobStateAndStats(StreamInput in) throws IOException { this.id = in.readString(); - this.jobState = new FeatureIndexBuilderJobState(in); + this.jobState = new DataFrameJobState(in); this.jobStats = new DataFrameIndexerJobStats(in); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + builder.field(DataFrameJob.ID.getPreferredName(), id); builder.field(STATE_FIELD.getPreferredName(), jobState); builder.field(STATS_FIELD.getPreferredName(), jobStats); builder.endObject(); @@ -97,7 +97,7 @@ public DataFrameIndexerJobStats getJobStats() { return jobStats; } - public FeatureIndexBuilderJobState getJobState() { + public DataFrameJobState getJobState() { return jobState; } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java similarity index 79% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java index bdb4ef32a5d2c..7ccf5c6f95f89 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; @@ -16,17 +16,17 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; import java.util.Objects; -public class DeleteFeatureIndexBuilderJobAction extends Action { +public class DeleteDataFrameJobAction extends Action { - public static final DeleteFeatureIndexBuilderJobAction INSTANCE = new DeleteFeatureIndexBuilderJobAction(); - public static final String NAME = "cluster:admin/xpack/feature_index_builder/delete"; + public static final DeleteDataFrameJobAction INSTANCE = new DeleteDataFrameJobAction(); + public static final String NAME = "cluster:admin/data_frame/delete"; - private DeleteFeatureIndexBuilderJobAction() { + private DeleteDataFrameJobAction() { super(NAME); } @@ -39,7 +39,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private String id; public Request(String id) { - this.id = ExceptionsHelper.requireNonNull(id, FeatureIndexBuilderJob.ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, DataFrameJob.ID.getPreferredName()); } public Request() { @@ -68,7 +68,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + builder.field(DataFrameJob.ID.getPreferredName(), id); return builder; } @@ -93,7 +93,7 @@ public boolean equals(Object obj) { public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, DeleteFeatureIndexBuilderJobAction action) { + protected RequestBuilder(ElasticsearchClient client, DeleteDataFrameJobAction action) { super(client, action, new Request()); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java similarity index 84% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java index 9e87008c6d2cc..75bf6ace6c0ea 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; @@ -24,8 +24,8 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; import java.io.IOException; import java.util.Collections; @@ -65,10 +65,10 @@ public Request() {} public boolean match(Task task) { // If we are retrieving all the jobs, the task description does not contain the id if (id.equals(MetaData.ALL)) { - return task.getDescription().startsWith(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX); + return task.getDescription().startsWith(DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX); } // Otherwise find the task by ID - return task.getDescription().equals(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); + return task.getDescription().equals(DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); } public String getId() { @@ -94,7 +94,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + builder.field(DataFrameJob.ID.getPreferredName(), id); return builder; } @@ -125,17 +125,17 @@ protected RequestBuilder(ElasticsearchClient client, GetDataFrameJobsAction acti public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { - private List jobConfigurations; + private List jobConfigurations; - public Response(List jobConfigs) { + public Response(List jobConfigs) { super(Collections.emptyList(), Collections.emptyList()); this.jobConfigurations = jobConfigs; } - public Response(List jobResponses, List taskFailures, + public Response(List jobConfigs, List taskFailures, List nodeFailures) { super(taskFailures, nodeFailures); - this.jobConfigurations = jobResponses; + this.jobConfigurations = jobConfigs; } public Response() { @@ -147,14 +147,14 @@ public Response(StreamInput in) throws IOException { readFrom(in); } - public List getJobConfigurations() { + public List getJobConfigurations() { return jobConfigurations; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - jobConfigurations = in.readList(FeatureIndexBuilderJobConfig::new); + jobConfigurations = in.readList(DataFrameJobConfig::new); } @Override @@ -170,7 +170,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws // XContentBuilder does not support passing the params object for Iterables builder.field(JOBS.getPreferredName()); builder.startArray(); - for (FeatureIndexBuilderJobConfig jobResponse : jobConfigurations) { + for (DataFrameJobConfig jobResponse : jobConfigurations) { jobResponse.toXContent(builder, params); } builder.endArray(); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java similarity index 93% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java index be2b70084676e..8538476262cea 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; @@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; import java.util.Collections; @@ -64,10 +64,10 @@ public Request() {} public boolean match(Task task) { // If we are retrieving all the jobs, the task description does not contain the id if (id.equals(MetaData.ALL)) { - return task.getDescription().startsWith(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX); + return task.getDescription().startsWith(DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX); } // Otherwise find the task by ID - return task.getDescription().equals(FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); + return task.getDescription().equals(DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); } public String getId() { @@ -93,7 +93,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + builder.field(DataFrameJob.ID.getPreferredName(), id); return builder; } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobAction.java similarity index 76% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobAction.java index a05b526483bec..946522fe9a199 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; @@ -17,17 +17,17 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; import java.io.IOException; import java.util.Objects; -public class PutFeatureIndexBuilderJobAction extends Action { +public class PutDataFrameJobAction extends Action { - public static final PutFeatureIndexBuilderJobAction INSTANCE = new PutFeatureIndexBuilderJobAction(); - public static final String NAME = "cluster:admin/xpack/feature_index_builder/put"; + public static final PutDataFrameJobAction INSTANCE = new PutDataFrameJobAction(); + public static final String NAME = "cluster:admin/data_frame/put"; - private PutFeatureIndexBuilderJobAction() { + private PutDataFrameJobAction() { super(NAME); } @@ -38,9 +38,9 @@ public Response newResponse() { public static class Request extends AcknowledgedRequest implements ToXContentObject { - private FeatureIndexBuilderJobConfig config; + private DataFrameJobConfig config; - public Request(FeatureIndexBuilderJobConfig config) { + public Request(DataFrameJobConfig config) { this.setConfig(config); } @@ -49,7 +49,7 @@ public Request() { } public static Request fromXContent(final XContentParser parser, final String id) throws IOException { - return new Request(FeatureIndexBuilderJobConfig.fromXContent(parser, id)); + return new Request(DataFrameJobConfig.fromXContent(parser, id)); } @Override @@ -62,18 +62,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return this.config.toXContent(builder, params); } - public FeatureIndexBuilderJobConfig getConfig() { + public DataFrameJobConfig getConfig() { return config; } - public void setConfig(FeatureIndexBuilderJobConfig config) { + public void setConfig(DataFrameJobConfig config) { this.config = config; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - this.config = new FeatureIndexBuilderJobConfig(in); + this.config = new DataFrameJobConfig(in); } @Override @@ -102,7 +102,7 @@ public boolean equals(Object obj) { public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, PutFeatureIndexBuilderJobAction action) { + protected RequestBuilder(ElasticsearchClient client, PutDataFrameJobAction action) { super(client, action, new Request()); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java similarity index 85% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java index 6fcfdf4e8aa71..f43aaf4f85f5f 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; @@ -19,18 +19,18 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; import java.util.Collections; import java.util.Objects; -public class StartFeatureIndexBuilderJobAction extends Action { +public class StartDataFrameJobAction extends Action { - public static final StartFeatureIndexBuilderJobAction INSTANCE = new StartFeatureIndexBuilderJobAction(); - public static final String NAME = "cluster:admin/xpack/feature_index_builder/start"; + public static final StartDataFrameJobAction INSTANCE = new StartDataFrameJobAction(); + public static final String NAME = "cluster:admin/data_frame/start"; - private StartFeatureIndexBuilderJobAction() { + private StartDataFrameJobAction() { super(NAME); } @@ -43,7 +43,7 @@ public static class Request extends BaseTasksRequest implements ToXCont private String id; public Request(String id) { - this.id = ExceptionsHelper.requireNonNull(id, FeatureIndexBuilderJob.ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, DataFrameJob.ID.getPreferredName()); } public Request() { @@ -72,7 +72,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + builder.field(DataFrameJob.ID.getPreferredName(), id); return builder; } @@ -96,7 +96,7 @@ public boolean equals(Object obj) { public static class RequestBuilder extends ActionRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, StartFeatureIndexBuilderJobAction action) { + protected RequestBuilder(ElasticsearchClient client, StartDataFrameJobAction action) { super(client, action, new Request()); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java similarity index 84% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java index 49f3db0346246..c2f5349fe73f9 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; @@ -20,18 +20,18 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; import java.util.Collections; import java.util.Objects; -public class StopFeatureIndexBuilderJobAction extends Action { +public class StopDataFrameJobAction extends Action { - public static final StopFeatureIndexBuilderJobAction INSTANCE = new StopFeatureIndexBuilderJobAction(); - public static final String NAME = "cluster:admin/xpack/feature_index_builder/stop"; + public static final StopDataFrameJobAction INSTANCE = new StopDataFrameJobAction(); + public static final String NAME = "cluster:admin/data_frame/stop"; - private StopFeatureIndexBuilderJobAction() { + private StopDataFrameJobAction() { super(NAME); } @@ -46,11 +46,11 @@ public static class Request extends BaseTasksRequest implements ToXCont public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); static { - PARSER.declareString(Request::setId, FeatureIndexBuilderJob.ID); + PARSER.declareString(Request::setId, DataFrameJob.ID); } public Request(String id) { - this.id = ExceptionsHelper.requireNonNull(id, FeatureIndexBuilderJob.ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, DataFrameJob.ID.getPreferredName()); } public Request() { @@ -83,7 +83,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(FeatureIndexBuilderJob.ID.getPreferredName(), id); + builder.field(DataFrameJob.ID.getPreferredName(), id); return builder; } @@ -107,7 +107,7 @@ public boolean equals(Object obj) { @Override public boolean match(Task task) { - String expectedDescription = FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id; + String expectedDescription = DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id; return task.getDescription().equals(expectedDescription); } @@ -115,7 +115,7 @@ public boolean match(Task task) { public static class RequestBuilder extends ActionRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, StopFeatureIndexBuilderJobAction action) { + protected RequestBuilder(ElasticsearchClient client, StopDataFrameJobAction action) { super(client, action, new Request()); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java similarity index 72% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java index 52bc39dbe8ae6..c6a2fe8094b20 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportDeleteFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -23,23 +23,23 @@ import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.util.Objects; import java.util.concurrent.TimeUnit; -public class TransportDeleteFeatureIndexBuilderJobAction - extends TransportMasterNodeAction { +public class TransportDeleteDataFrameJobAction + extends TransportMasterNodeAction { private final PersistentTasksService persistentTasksService; - private static final Logger logger = LogManager.getLogger(TransportDeleteFeatureIndexBuilderJobAction.class); + private static final Logger logger = LogManager.getLogger(TransportDeleteDataFrameJobAction.class); @Inject - public TransportDeleteFeatureIndexBuilderJobAction(TransportService transportService, ThreadPool threadPool, + public TransportDeleteDataFrameJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PersistentTasksService persistentTasksService, ClusterService clusterService) { - super(DeleteFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, DeleteFeatureIndexBuilderJobAction.Request::new); + super(DeleteDataFrameJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, DeleteDataFrameJobAction.Request::new); this.persistentTasksService = persistentTasksService; } @@ -54,7 +54,7 @@ protected AcknowledgedResponse newResponse() { } @Override - protected void masterOperation(DeleteFeatureIndexBuilderJobAction.Request request, ClusterState state, + protected void masterOperation(DeleteDataFrameJobAction.Request request, ClusterState state, ActionListener listener) throws Exception { String jobId = request.getId(); @@ -64,27 +64,27 @@ protected void masterOperation(DeleteFeatureIndexBuilderJobAction.Request reques persistentTasksService.sendRemoveRequest(jobId, new ActionListener>() { @Override public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { - logger.debug("Request to cancel Task for Feature Index Builder job [" + jobId + "] successful."); + logger.debug("Request to cancel Task for data frame job [" + jobId + "] successful."); // Step 2. Wait for the task to finish cancellation internally persistentTasksService.waitForPersistentTaskCondition(jobId, Objects::isNull, timeout, - new PersistentTasksService.WaitForPersistentTaskListener() { + new PersistentTasksService.WaitForPersistentTaskListener() { @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { - logger.debug("Task for Feature Index Builder job [" + jobId + "] successfully canceled."); + public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { + logger.debug("Task for data frame job [" + jobId + "] successfully canceled."); listener.onResponse(new AcknowledgedResponse(true)); } @Override public void onFailure(Exception e) { - logger.error("Error while cancelling task for Feature Index Builder job [" + jobId + logger.error("Error while cancelling task for data frame job [" + jobId + "]." + e); listener.onFailure(e); } @Override public void onTimeout(TimeValue timeout) { - String msg = "Stopping of Feature Index Builder job [" + jobId + "] timed out after [" + timeout + "]."; + String msg = "Stopping of data frame job [" + jobId + "] timed out after [" + timeout + "]."; logger.warn(msg); listener.onFailure(new ElasticsearchException(msg)); } @@ -93,7 +93,7 @@ public void onTimeout(TimeValue timeout) { @Override public void onFailure(Exception e) { - logger.error("Error while requesting to cancel task for Feature Index Builder job [" + jobId + "]" + e); + logger.error("Error while requesting to cancel task for data frame job [" + jobId + "]" + e); listener.onFailure(e); } }); @@ -101,7 +101,7 @@ public void onFailure(Exception e) { } @Override - protected ClusterBlockException checkBlock(DeleteFeatureIndexBuilderJobAction.Request request, ClusterState state) { + protected ClusterBlockException checkBlock(DeleteDataFrameJobAction.Request request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java similarity index 81% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java index 46b44c70c5fb8..aaf1bdab1f139 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -22,11 +22,11 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Request; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Response; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; -import org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataFramePersistentTaskUtils; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Response; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; +import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; import java.io.IOException; import java.util.Collection; @@ -35,7 +35,7 @@ import java.util.stream.Collectors; public class TransportGetDataFrameJobsAction extends - TransportTasksAction { @@ -49,7 +49,7 @@ public TransportGetDataFrameJobsAction(TransportService transportService, Action @Override protected Response newResponse(Request request, List tasks, List taskOperationFailures, List failedNodeExceptions) { - List configs = tasks.stream().map(GetDataFrameJobsAction.Response::getJobConfigurations) + List configs = tasks.stream().map(GetDataFrameJobsAction.Response::getJobConfigurations) .flatMap(Collection::stream).collect(Collectors.toList()); return new Response(configs, taskOperationFailures, failedNodeExceptions); } @@ -60,8 +60,8 @@ protected Response readTaskResponse(StreamInput in) throws IOException { } @Override - protected void taskOperation(Request request, FeatureIndexBuilderJobTask task, ActionListener listener) { - List configs = Collections.emptyList(); + protected void taskOperation(Request request, DataFrameJobTask task, ActionListener listener) { + List configs = Collections.emptyList(); assert task.getConfig().getId().equals(request.getId()) || request.getId().equals(MetaData.ALL); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java similarity index 87% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsStatsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java index efb90a601a596..8f24186156025 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportGetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -22,10 +22,10 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction.Request; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction.Response; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; -import org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataFramePersistentTaskUtils; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Response; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; +import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; import java.io.IOException; import java.util.Collection; @@ -34,7 +34,7 @@ import java.util.stream.Collectors; public class TransportGetDataFrameJobsStatsAction extends - TransportTasksAction { @@ -60,7 +60,7 @@ protected Response readTaskResponse(StreamInput in) throws IOException { } @Override - protected void taskOperation(Request request, FeatureIndexBuilderJobTask task, ActionListener listener) { + protected void taskOperation(Request request, DataFrameJobTask task, ActionListener listener) { List jobsStateAndStats = Collections.emptyList(); assert task.getConfig().getId().equals(request.getId()) || request.getId().equals(MetaData.ALL); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java similarity index 63% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java index 166745e809bd3..25bbc2373ba31 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportPutFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -23,26 +23,26 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Request; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Response; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; -import org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataframeIndex; -import org.elasticsearch.xpack.ml.featureindexbuilder.support.JobValidator; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Request; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Response; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.persistence.DataframeIndex; +import org.elasticsearch.xpack.dataframe.support.JobValidator; -public class TransportPutFeatureIndexBuilderJobAction - extends TransportMasterNodeAction { +public class TransportPutDataFrameJobAction + extends TransportMasterNodeAction { private final XPackLicenseState licenseState; private final PersistentTasksService persistentTasksService; private final Client client; @Inject - public TransportPutFeatureIndexBuilderJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, + public TransportPutDataFrameJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, XPackLicenseState licenseState, PersistentTasksService persistentTasksService, Client client) { - super(PutFeatureIndexBuilderJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, PutFeatureIndexBuilderJobAction.Request::new); + super(PutDataFrameJobAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, PutDataFrameJobAction.Request::new); this.licenseState = licenseState; this.persistentTasksService = persistentTasksService; this.client = client; @@ -54,8 +54,8 @@ protected String executor() { } @Override - protected PutFeatureIndexBuilderJobAction.Response newResponse() { - return new PutFeatureIndexBuilderJobAction.Response(); + protected PutDataFrameJobAction.Response newResponse() { + return new PutDataFrameJobAction.Response(); } @Override @@ -72,7 +72,7 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio jobCreator.validate(ActionListener.wrap(validationResult -> { jobCreator.deduceMappings(ActionListener.wrap(mappings -> { - FeatureIndexBuilderJob job = createFeatureIndexBuilderJob(request.getConfig(), threadPool); + DataFrameJob job = createDataFrameJob(request.getConfig(), threadPool); DataframeIndex.createDestinationIndex(client, job, mappings, ActionListener.wrap(createIndexResult -> { startPersistentTask(job, listener, persistentTasksService); }, e3 -> { @@ -86,23 +86,23 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio })); } - private static FeatureIndexBuilderJob createFeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config, ThreadPool threadPool) { - return new FeatureIndexBuilderJob(config); + private static DataFrameJob createDataFrameJob(DataFrameJobConfig config, ThreadPool threadPool) { + return new DataFrameJob(config); } - static void startPersistentTask(FeatureIndexBuilderJob job, ActionListener listener, + static void startPersistentTask(DataFrameJob job, ActionListener listener, PersistentTasksService persistentTasksService) { - persistentTasksService.sendStartRequest(job.getConfig().getId(), FeatureIndexBuilderJob.NAME, job, + persistentTasksService.sendStartRequest(job.getConfig().getId(), DataFrameJob.NAME, job, ActionListener.wrap(persistentTask -> { - listener.onResponse(new PutFeatureIndexBuilderJobAction.Response(true)); + listener.onResponse(new PutDataFrameJobAction.Response(true)); }, e -> { listener.onFailure(e); })); } @Override - protected ClusterBlockException checkBlock(PutFeatureIndexBuilderJobAction.Request request, ClusterState state) { + protected ClusterBlockException checkBlock(PutDataFrameJobAction.Request request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java similarity index 54% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java index 6a4d4e1656397..12a419af5205b 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -21,39 +21,39 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; import java.io.IOException; import java.util.List; import java.util.function.Consumer; -public class TransportStartFeatureIndexBuilderJobAction extends - TransportTasksAction { +public class TransportStartDataFrameJobAction extends + TransportTasksAction { private final XPackLicenseState licenseState; @Inject - public TransportStartFeatureIndexBuilderJobAction(TransportService transportService, ActionFilters actionFilters, + public TransportStartDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, XPackLicenseState licenseState) { - super(StartFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, - StartFeatureIndexBuilderJobAction.Request::new, StartFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); + super(StartDataFrameJobAction.NAME, clusterService, transportService, actionFilters, + StartDataFrameJobAction.Request::new, StartDataFrameJobAction.Response::new, ThreadPool.Names.SAME); this.licenseState = licenseState; } @Override - protected void processTasks(StartFeatureIndexBuilderJobAction.Request request, Consumer operation) { - FeatureIndexBuilderJobTask matchingTask = null; + protected void processTasks(StartDataFrameJobAction.Request request, Consumer operation) { + DataFrameJobTask matchingTask = null; // todo: re-factor, see rollup TransportTaskHelper for (Task task : taskManager.getTasks().values()) { - if (task instanceof FeatureIndexBuilderJobTask - && ((FeatureIndexBuilderJobTask) task).getConfig().getId().equals(request.getId())) { + if (task instanceof DataFrameJobTask + && ((DataFrameJobTask) task).getConfig().getId().equals(request.getId())) { if (matchingTask != null) { - throw new IllegalArgumentException("Found more than one matching task for feature index builder job [" + request.getId() + throw new IllegalArgumentException("Found more than one matching task for data frame job [" + request.getId() + "] when " + "there should only be one."); } - matchingTask = (FeatureIndexBuilderJobTask) task; + matchingTask = (DataFrameJobTask) task; } } @@ -63,8 +63,8 @@ protected void processTasks(StartFeatureIndexBuilderJobAction.Request request, C } @Override - protected void doExecute(Task task, StartFeatureIndexBuilderJobAction.Request request, - ActionListener listener) { + protected void doExecute(Task task, StartDataFrameJobAction.Request request, + ActionListener listener) { if (!licenseState.isDataFrameAllowed()) { listener.onFailure(LicenseUtils.newComplianceException(XPackField.DATA_FRAME)); @@ -75,19 +75,19 @@ protected void doExecute(Task task, StartFeatureIndexBuilderJobAction.Request re } @Override - protected void taskOperation(StartFeatureIndexBuilderJobAction.Request request, FeatureIndexBuilderJobTask jobTask, - ActionListener listener) { + protected void taskOperation(StartDataFrameJobAction.Request request, DataFrameJobTask jobTask, + ActionListener listener) { if (jobTask.getConfig().getId().equals(request.getId())) { jobTask.start(listener); } else { - listener.onFailure(new RuntimeException("ID of FeatureIndexBuilder task [" + jobTask.getConfig().getId() + listener.onFailure(new RuntimeException("ID of data frame job task [" + jobTask.getConfig().getId() + "] does not match request's ID [" + request.getId() + "]")); } } @Override - protected StartFeatureIndexBuilderJobAction.Response newResponse(StartFeatureIndexBuilderJobAction.Request request, - List tasks, List taskOperationFailures, + protected StartDataFrameJobAction.Response newResponse(StartDataFrameJobAction.Request request, + List tasks, List taskOperationFailures, List failedNodeExceptions) { if (taskOperationFailures.isEmpty() == false) { @@ -100,18 +100,18 @@ protected StartFeatureIndexBuilderJobAction.Response newResponse(StartFeatureInd // after the StartAPI executed. // In either case, let the user know if (tasks.size() == 0) { - throw new ResourceNotFoundException("Task for FeatureIndexBuilder Job [" + request.getId() + "] not found"); + throw new ResourceNotFoundException("Task for data frame job [" + request.getId() + "] not found"); } assert tasks.size() == 1; - boolean allStarted = tasks.stream().allMatch(StartFeatureIndexBuilderJobAction.Response::isStarted); - return new StartFeatureIndexBuilderJobAction.Response(allStarted); + boolean allStarted = tasks.stream().allMatch(StartDataFrameJobAction.Response::isStarted); + return new StartDataFrameJobAction.Response(allStarted); } @Override - protected StartFeatureIndexBuilderJobAction.Response readTaskResponse(StreamInput in) throws IOException { - return new StartFeatureIndexBuilderJobAction.Response(in); + protected StartDataFrameJobAction.Response readTaskResponse(StreamInput in) throws IOException { + return new StartDataFrameJobAction.Response(in); } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java similarity index 50% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java index c4fb95cdca945..f981f6872013a 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/TransportStopFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -18,42 +18,42 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobTask; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; import java.io.IOException; import java.util.List; -public class TransportStopFeatureIndexBuilderJobAction extends - TransportTasksAction { +public class TransportStopDataFrameJobAction extends + TransportTasksAction { @Inject - public TransportStopFeatureIndexBuilderJobAction(TransportService transportService, ActionFilters actionFilters, + public TransportStopDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(StopFeatureIndexBuilderJobAction.NAME, clusterService, transportService, actionFilters, - StopFeatureIndexBuilderJobAction.Request::new, StopFeatureIndexBuilderJobAction.Response::new, ThreadPool.Names.SAME); + super(StopDataFrameJobAction.NAME, clusterService, transportService, actionFilters, + StopDataFrameJobAction.Request::new, StopDataFrameJobAction.Response::new, ThreadPool.Names.SAME); } @Override - protected void doExecute(Task task, StopFeatureIndexBuilderJobAction.Request request, - ActionListener listener) { + protected void doExecute(Task task, StopDataFrameJobAction.Request request, + ActionListener listener) { super.doExecute(task, request, listener); } @Override - protected void taskOperation(StopFeatureIndexBuilderJobAction.Request request, FeatureIndexBuilderJobTask jobTask, - ActionListener listener) { + protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJobTask jobTask, + ActionListener listener) { if (jobTask.getConfig().getId().equals(request.getId())) { jobTask.stop(listener); } else { - listener.onFailure(new RuntimeException("ID of feature index builder task [" + jobTask.getConfig().getId() + listener.onFailure(new RuntimeException("ID of data frame indexer task [" + jobTask.getConfig().getId() + "] does not match request's ID [" + request.getId() + "]")); } } @Override - protected StopFeatureIndexBuilderJobAction.Response newResponse(StopFeatureIndexBuilderJobAction.Request request, - List tasks, List taskOperationFailures, + protected StopDataFrameJobAction.Response newResponse(StopDataFrameJobAction.Request request, + List tasks, List taskOperationFailures, List failedNodeExceptions) { if (taskOperationFailures.isEmpty() == false) { @@ -66,17 +66,17 @@ protected StopFeatureIndexBuilderJobAction.Response newResponse(StopFeatureIndex // after the Stop API executed. // In either case, let the user know if (tasks.size() == 0) { - throw new ResourceNotFoundException("Task for Feature Index Builder Job [" + request.getId() + "] not found"); + throw new ResourceNotFoundException("Task for Data Frame Job [" + request.getId() + "] not found"); } assert tasks.size() == 1; - boolean allStopped = tasks.stream().allMatch(StopFeatureIndexBuilderJobAction.Response::isStopped); - return new StopFeatureIndexBuilderJobAction.Response(allStopped); + boolean allStopped = tasks.stream().allMatch(StopDataFrameJobAction.Response::isStopped); + return new StopDataFrameJobAction.Response(allStopped); } @Override - protected StopFeatureIndexBuilderJobAction.Response readTaskResponse(StreamInput in) throws IOException { - return new StopFeatureIndexBuilderJobAction.Response(in); + protected StopDataFrameJobAction.Response readTaskResponse(StreamInput in) throws IOException { + return new StopDataFrameJobAction.Response(in); } } \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationConfig.java similarity index 97% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationConfig.java index 5e20d4d258997..363ec245fec8f 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationConfig.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java similarity index 98% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java index 63b06cb369ba1..e70f3503f9b00 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java similarity index 89% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java index e3d3de8c432d0..22dc9facb109c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderIndexer.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -34,15 +34,15 @@ import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.ml.featureindexbuilder.persistence.DataframeIndex.DOC_TYPE; +import static org.elasticsearch.xpack.dataframe.persistence.DataframeIndex.DOC_TYPE; -public abstract class FeatureIndexBuilderIndexer extends AsyncTwoPhaseIndexer, DataFrameIndexerJobStats> { +public abstract class DataFrameIndexer extends AsyncTwoPhaseIndexer, DataFrameIndexerJobStats> { private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; - private static final Logger logger = LogManager.getLogger(FeatureIndexBuilderIndexer.class); - private FeatureIndexBuilderJob job; + private static final Logger logger = LogManager.getLogger(DataFrameIndexer.class); + private DataFrameJob job; - public FeatureIndexBuilderIndexer(Executor executor, FeatureIndexBuilderJob job, AtomicReference initialState, + public DataFrameIndexer(Executor executor, DataFrameJob job, AtomicReference initialState, Map initialPosition) { super(executor, initialState, initialPosition, new DataFrameIndexerJobStats()); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStats.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStats.java similarity index 98% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStats.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStats.java index 7cf5f430db25d..82dce41296d59 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStats.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStats.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java similarity index 70% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java index 0857bddab9731..7a8c1a0e36b16 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJob.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; @@ -14,38 +14,39 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.dataframe.DataFrame; import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.Objects; -public class FeatureIndexBuilderJob implements XPackPlugin.XPackPersistentTaskParams { +public class DataFrameJob implements XPackPlugin.XPackPersistentTaskParams { public static final ParseField ID = new ParseField("id"); - public static final String NAME = "xpack/feature_index_builder/job"; + public static final String NAME = DataFrame.TASK_NAME; // note: this is used to match tasks - public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "feature_index_builder-"; + public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "data_frame_"; - private FeatureIndexBuilderJobConfig config; + private DataFrameJobConfig config; private static final ParseField CONFIG = new ParseField("config"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - a -> new FeatureIndexBuilderJob((FeatureIndexBuilderJobConfig) a[0])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + a -> new DataFrameJob((DataFrameJobConfig) a[0])); static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> FeatureIndexBuilderJobConfig.fromXContent(p, null), + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameJobConfig.fromXContent(p, null), CONFIG); } - public FeatureIndexBuilderJob(FeatureIndexBuilderJobConfig config) { + public DataFrameJob(DataFrameJobConfig config) { this.config = Objects.requireNonNull(config); } - public FeatureIndexBuilderJob(StreamInput in) throws IOException { - this.config = new FeatureIndexBuilderJobConfig(in); + public DataFrameJob(StreamInput in) throws IOException { + this.config = new DataFrameJobConfig(in); } @Override @@ -72,11 +73,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public FeatureIndexBuilderJobConfig getConfig() { + public DataFrameJobConfig getConfig() { return config; } - public static FeatureIndexBuilderJob fromXContent(XContentParser parser) throws IOException { + public static DataFrameJob fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } @@ -90,7 +91,7 @@ public boolean equals(Object other) { return false; } - FeatureIndexBuilderJob that = (FeatureIndexBuilderJob) other; + DataFrameJob that = (DataFrameJob) other; return Objects.equals(this.config, that.config); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java similarity index 86% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java index e450b0870fffc..9f58a7305076f 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -24,11 +24,11 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * This class holds the configuration details of a feature index builder job + * This class holds the configuration details of a data frame job */ -public class FeatureIndexBuilderJobConfig implements NamedWriteable, ToXContentObject { +public class DataFrameJobConfig implements NamedWriteable, ToXContentObject { - private static final String NAME = "xpack/feature_index_builder/jobconfig"; + private static final String NAME = "xpack/data_frame/jobconfig"; private static final ParseField ID = new ParseField("id"); private static final ParseField INDEX_PATTERN = new ParseField("index_pattern"); private static final ParseField DESTINATION_INDEX = new ParseField("destination_index"); @@ -41,14 +41,14 @@ public class FeatureIndexBuilderJobConfig implements NamedWriteable, ToXContentO private final SourceConfig sourceConfig; private final AggregationConfig aggregationConfig; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, (args, optionalId) -> { String id = args[0] != null ? (String) args[0] : optionalId; String indexPattern = (String) args[1]; String destinationIndex = (String) args[2]; SourceConfig sourceConfig= (SourceConfig) args[3]; AggregationConfig aggregationConfig = (AggregationConfig) args[4]; - return new FeatureIndexBuilderJobConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); + return new DataFrameJobConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); }); static { @@ -59,7 +59,7 @@ public class FeatureIndexBuilderJobConfig implements NamedWriteable, ToXContentO PARSER.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); } - public FeatureIndexBuilderJobConfig(final String id, + public DataFrameJobConfig(final String id, final String indexPattern, final String destinationIndex, final SourceConfig sourceConfig, @@ -73,7 +73,7 @@ public FeatureIndexBuilderJobConfig(final String id, this.aggregationConfig = aggregationConfig; } - public FeatureIndexBuilderJobConfig(final StreamInput in) throws IOException { + public DataFrameJobConfig(final StreamInput in) throws IOException { id = in.readString(); indexPattern = in.readString(); destinationIndex = in.readString(); @@ -143,7 +143,7 @@ public boolean equals(Object other) { return false; } - final FeatureIndexBuilderJobConfig that = (FeatureIndexBuilderJobConfig) other; + final DataFrameJobConfig that = (DataFrameJobConfig) other; return Objects.equals(this.id, that.id) && Objects.equals(this.indexPattern, that.indexPattern) @@ -162,7 +162,7 @@ public String toString() { return Strings.toString(this, true, true); } - public static FeatureIndexBuilderJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) + public static DataFrameJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) throws IOException { return PARSER.parse(parser, optionalJobId); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java similarity index 59% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java index 0edd5ceb0084c..ba9d3344fa015 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -17,31 +17,31 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.dataframe.DataFrame; import java.util.Map; -public class FeatureIndexBuilderJobPersistentTasksExecutor extends PersistentTasksExecutor { +public class DataFrameJobPersistentTasksExecutor extends PersistentTasksExecutor { - private static final Logger logger = LogManager.getLogger(FeatureIndexBuilderJobPersistentTasksExecutor.class); + private static final Logger logger = LogManager.getLogger(DataFrameJobPersistentTasksExecutor.class); private final Client client; private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; - public FeatureIndexBuilderJobPersistentTasksExecutor(Client client, SchedulerEngine schedulerEngine, + public DataFrameJobPersistentTasksExecutor(Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { - super(FeatureIndexBuilder.TASK_NAME, FeatureIndexBuilder.TASK_THREAD_POOL_NAME); + super(DataFrame.TASK_NAME, DataFrame.TASK_THREAD_POOL_NAME); this.client = client; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; } @Override - protected void nodeOperation(AllocatedPersistentTask task, @Nullable FeatureIndexBuilderJob params, PersistentTaskState state) { - FeatureIndexBuilderJobTask buildTask = (FeatureIndexBuilderJobTask) task; + protected void nodeOperation(AllocatedPersistentTask task, @Nullable DataFrameJob params, PersistentTaskState state) { + DataFrameJobTask buildTask = (DataFrameJobTask) task; SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job( - FeatureIndexBuilderJobTask.SCHEDULE_NAME + "_" + params.getConfig().getId(), next()); + DataFrameJobTask.SCHEDULE_NAME + "_" + params.getConfig().getId(), next()); // Note that while the task is added to the scheduler here, the internal state // will prevent @@ -49,7 +49,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable FeatureInde schedulerEngine.register(buildTask); schedulerEngine.add(schedulerJob); - logger.info("FeatureIndexBuilder job [" + params.getConfig().getId() + "] created."); + logger.info("Data frame job [" + params.getConfig().getId() + "] created."); } static SchedulerEngine.Schedule next() { @@ -60,8 +60,8 @@ static SchedulerEngine.Schedule next() { @Override protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, - PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { - return new FeatureIndexBuilderJobTask(id, type, action, parentTaskId, persistentTask.getParams(), - (FeatureIndexBuilderJobState) persistentTask.getState(), client, schedulerEngine, threadPool, headers); + PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { + return new DataFrameJobTask(id, type, action, parentTaskId, persistentTask.getParams(), + (DataFrameJobState) persistentTask.getState(), client, schedulerEngine, threadPool, headers); } } \ No newline at end of file diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java similarity index 83% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java index e4731366d1861..dd7ba01b2aad1 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobState.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -29,8 +29,8 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskState { - public static final String NAME = "xpack/feature_index_builder/job"; +public class DataFrameJobState implements Task.Status, PersistentTaskState { + public static final String NAME = "xpack/data_frame/job_state"; private final IndexerState state; @@ -41,8 +41,8 @@ public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskS private static final ParseField CURRENT_POSITION = new ParseField("current_position"); @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - args -> new FeatureIndexBuilderJobState((IndexerState) args[0], (HashMap) args[1])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + args -> new DataFrameJobState((IndexerState) args[0], (HashMap) args[1])); static { PARSER.declareField(constructorArg(), p -> { @@ -63,12 +63,12 @@ public class FeatureIndexBuilderJobState implements Task.Status, PersistentTaskS }, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY); } - public FeatureIndexBuilderJobState(IndexerState state, @Nullable Map position) { + public DataFrameJobState(IndexerState state, @Nullable Map position) { this.state = state; this.currentPosition = position == null ? null : Collections.unmodifiableSortedMap(new TreeMap<>(position)); } - public FeatureIndexBuilderJobState(StreamInput in) throws IOException { + public DataFrameJobState(StreamInput in) throws IOException { state = IndexerState.fromStream(in); currentPosition = in.readBoolean() ? Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())) : null; } @@ -81,7 +81,7 @@ public Map getPosition() { return currentPosition; } - public static FeatureIndexBuilderJobState fromXContent(XContentParser parser) { + public static DataFrameJobState fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); } catch (IOException e) { @@ -124,7 +124,7 @@ public boolean equals(Object other) { return false; } - FeatureIndexBuilderJobState that = (FeatureIndexBuilderJobState) other; + DataFrameJobState that = (DataFrameJobState) other; return Objects.equals(this.state, that.state) && Objects.equals(this.currentPosition, that.currentPosition); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java similarity index 61% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index 659cec9fbb8ae..6e6e519e1fcf7 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -23,42 +23,42 @@ import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction.Response; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction.Response; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; -public class FeatureIndexBuilderJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { +public class DataFrameJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { - private static final Logger logger = LogManager.getLogger(FeatureIndexBuilderJobTask.class); + private static final Logger logger = LogManager.getLogger(DataFrameJobTask.class); - private final FeatureIndexBuilderJob job; + private final DataFrameJob job; private final ThreadPool threadPool; - private final FeatureIndexBuilderIndexer indexer; + private final DataFrameIndexer indexer; - static final String SCHEDULE_NAME = "xpack/feature_index_builder/job" + "/schedule"; + static final String SCHEDULE_NAME = "xpack/data_frame/job" + "/schedule"; - public FeatureIndexBuilderJobTask(long id, String type, String action, TaskId parentTask, FeatureIndexBuilderJob job, - FeatureIndexBuilderJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, + public DataFrameJobTask(long id, String type, String action, TaskId parentTask, DataFrameJob job, + DataFrameJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { - super(id, type, action, FeatureIndexBuilderJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getConfig().getId(), parentTask, headers); + super(id, type, action, DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getConfig().getId(), parentTask, headers); this.job = job; this.threadPool = threadPool; logger.info("construct job task"); // todo: simplistic implementation for now IndexerState initialState = IndexerState.STOPPED; Map initialPosition = null; - this.indexer = new ClientFeatureIndexBuilderIndexer(job, new AtomicReference<>(initialState), initialPosition, client); + this.indexer = new ClientDataFrameIndexer(job, new AtomicReference<>(initialState), initialPosition, client); } - public FeatureIndexBuilderJobConfig getConfig() { + public DataFrameJobConfig getConfig() { return job.getConfig(); } - public FeatureIndexBuilderJobState getState() { - return new FeatureIndexBuilderJobState(indexer.getState(), indexer.getPosition()); + public DataFrameJobState getState() { + return new DataFrameJobState(indexer.getState(), indexer.getPosition()); } public DataFrameIndexerJobStats getStats() { @@ -68,28 +68,28 @@ public DataFrameIndexerJobStats getStats() { public synchronized void start(ActionListener listener) { // TODO: safeguards missing, see rollup code indexer.start(); - listener.onResponse(new StartFeatureIndexBuilderJobAction.Response(true)); + listener.onResponse(new StartDataFrameJobAction.Response(true)); } - public void stop(ActionListener listener) { + public void stop(ActionListener listener) { // TODO: safeguards missing, see rollup code indexer.stop(); - listener.onResponse(new StopFeatureIndexBuilderJobAction.Response(true)); + listener.onResponse(new StopDataFrameJobAction.Response(true)); } @Override public void triggered(Event event) { if (event.getJobName().equals(SCHEDULE_NAME + "_" + job.getConfig().getId())) { logger.debug( - "FeatureIndexBuilder indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); + "Data frame indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); indexer.maybeTriggerAsyncJob(System.currentTimeMillis()); } } - protected class ClientFeatureIndexBuilderIndexer extends FeatureIndexBuilderIndexer { + protected class ClientDataFrameIndexer extends DataFrameIndexer { private final Client client; - public ClientFeatureIndexBuilderIndexer(FeatureIndexBuilderJob job, AtomicReference initialState, + public ClientDataFrameIndexer(DataFrameJob job, AtomicReference initialState, Map initialPosition, Client client) { super(threadPool.executor(ThreadPool.Names.GENERIC), job, initialState, initialPosition); this.client = client; @@ -112,7 +112,7 @@ protected void doSaveState(IndexerState indexerState, Map positi // If we're aborting, just invoke `next` (which is likely an onFailure handler) next.run(); } else { - final FeatureIndexBuilderJobState state = new FeatureIndexBuilderJobState(indexerState, getPosition()); + final DataFrameJobState state = new DataFrameJobState(indexerState, getPosition()); logger.info("Updating persistent state of job [" + job.getConfig().getId() + "] to [" + state.toString() + "]"); // TODO: we can not persist the state right now, need to be called from the task @@ -126,17 +126,17 @@ protected void doSaveState(IndexerState indexerState, Map positi @Override protected void onFailure(Exception exc) { - logger.warn("FeatureIndexBuilder job [" + job.getConfig().getId() + "] failed with an exception: ", exc); + logger.warn("Data frame job [" + job.getConfig().getId() + "] failed with an exception: ", exc); } @Override protected void onFinish() { - logger.info("Finished indexing for job [" + job.getConfig().getId() + "]"); + logger.info("Finished indexing for data frame job [" + job.getConfig().getId() + "]"); } @Override protected void onAbort() { - logger.info("FeatureIndexBuilder job [" + job.getConfig().getId() + "] received abort request, stopping indexer"); + logger.info("Data frame job [" + job.getConfig().getId() + "] received abort request, stopping indexer"); } } } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/SourceConfig.java similarity index 96% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/SourceConfig.java index 65b462ba6c367..0c976dceda783 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/SourceConfig.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -33,7 +33,7 @@ */ public class SourceConfig implements Writeable, ToXContentObject { - private static final String NAME = "feature_index_builder_source"; + private static final String NAME = "data_frame_source"; private final List> sources; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataFramePersistentTaskUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java similarity index 87% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataFramePersistentTaskUtils.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java index b3b70008f0c04..0573794d81398 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataFramePersistentTaskUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java @@ -4,12 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.persistence; +package org.elasticsearch.xpack.dataframe.persistence; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; +import org.elasticsearch.xpack.dataframe.DataFrame; public final class DataFramePersistentTaskUtils { @@ -29,7 +29,7 @@ public static boolean stateHasDataFrameJobs(String id, ClusterState state) { // persistent tasks and see if at least once has a DataFrameJob param if (id.equals(MetaData.ALL)) { hasJobs = pTasksMeta.tasks().stream() - .anyMatch(persistentTask -> persistentTask.getTaskName().equals(FeatureIndexBuilder.TASK_NAME)); + .anyMatch(persistentTask -> persistentTask.getTaskName().equals(DataFrame.TASK_NAME)); } else if (pTasksMeta.getTask(id) != null) { // If we're looking for a single job, we can just check directly diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java similarity index 90% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java index b8c353debe07a..d45af546947f4 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/persistence/DataframeIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.persistence; +package org.elasticsearch.xpack.dataframe.persistence; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -14,7 +14,7 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; import java.util.Map; @@ -32,7 +32,7 @@ public final class DataframeIndex { private DataframeIndex() { } - public static void createDestinationIndex(Client client, FeatureIndexBuilderJob job, Map mappings, + public static void createDestinationIndex(Client client, DataFrameJob job, Map mappings, final ActionListener listener) { CreateIndexRequest request = new CreateIndexRequest(job.getConfig().getDestinationIndex()); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java new file mode 100644 index 0000000000000..b6c441735d0ad --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe.rest.action; + + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; + +import java.io.IOException; + +public class RestDeleteDataFrameJobAction extends BaseRestHandler { + + public RestDeleteDataFrameJobAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.DELETE, DataFrame.BASE_PATH_JOBS_BY_ID, this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + DeleteDataFrameJobAction.Request request = new DeleteDataFrameJobAction.Request(id); + + return channel -> client.execute(DeleteDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "data_frame_delete_job_action"; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java similarity index 69% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java index 2d18c63b0b0e6..9f24bacc272d4 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; +package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; @@ -12,20 +12,20 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; public class RestGetDataFrameJobsAction extends BaseRestHandler { public RestGetDataFrameJobsAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.GET, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID, this); + controller.registerHandler(RestRequest.Method.GET, DataFrame.BASE_PATH_JOBS_BY_ID, this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); + String id = restRequest.param(DataFrameJob.ID.getPreferredName()); GetDataFrameJobsAction.Request request = new GetDataFrameJobsAction.Request(id); return channel -> client.execute(GetDataFrameJobsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java similarity index 69% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsStatsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java index ca01ca43caf2a..dcedb3305b4c6 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestGetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; +package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; @@ -12,20 +12,20 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; +import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; public class RestGetDataFrameJobsStatsAction extends BaseRestHandler { public RestGetDataFrameJobsStatsAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.GET, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID + "_stats", this); + controller.registerHandler(RestRequest.Method.GET, DataFrame.BASE_PATH_JOBS_BY_ID + "_stats", this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); + String id = restRequest.param(DataFrameJob.ID.getPreferredName()); GetDataFrameJobsStatsAction.Request request = new GetDataFrameJobsStatsAction.Request(id); return channel -> client.execute(GetDataFrameJobsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java new file mode 100644 index 0000000000000..bf7ade8e9aad2 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; + +import java.io.IOException; + +public class RestPutDataFrameJobAction extends BaseRestHandler { + + public RestPutDataFrameJobAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.PUT, DataFrame.BASE_PATH_JOBS_BY_ID, this); + } + + @Override + public String getName() { + return "data_frame_put_job_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + XContentParser parser = restRequest.contentParser(); + + PutDataFrameJobAction.Request request = PutDataFrameJobAction.Request.fromXContent(parser, id); + + return channel -> client.execute(PutDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java similarity index 52% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java index 2a43d4fd04630..bc17462a8b8a5 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStartFeatureIndexBuilderJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; +package org.elasticsearch.xpack.dataframe.rest.action; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; @@ -13,28 +13,28 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.rollup.RollupField; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StartFeatureIndexBuilderJobAction; +import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; import java.io.IOException; -public class RestStartFeatureIndexBuilderJobAction extends BaseRestHandler { +public class RestStartDataFrameJobAction extends BaseRestHandler { - public RestStartFeatureIndexBuilderJobAction(Settings settings, RestController controller) { + public RestStartDataFrameJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID + "_start", this); + controller.registerHandler(RestRequest.Method.POST, DataFrame.BASE_PATH_JOBS_BY_ID + "_start", this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String id = restRequest.param(RollupField.ID.getPreferredName()); - StartFeatureIndexBuilderJobAction.Request request = new StartFeatureIndexBuilderJobAction.Request(id); + StartDataFrameJobAction.Request request = new StartDataFrameJobAction.Request(id); - return channel -> client.execute(StartFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(StartDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override public String getName() { - return "ml_feature_index_builder_start_job_action"; + return "data_frame_start_job_action"; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java new file mode 100644 index 0000000000000..76cace53d90aa --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe.rest.action; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.job.DataFrameJob; + +import java.io.IOException; + +public class RestStopDataFrameJobAction extends BaseRestHandler { + + public RestStopDataFrameJobAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, DataFrame.BASE_PATH_JOBS_BY_ID + "_stop", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + StopDataFrameJobAction.Request request = new StopDataFrameJobAction.Request(id); + + return channel -> client.execute(StopDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "data_frame_stop_job_action"; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/Aggregations.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/Aggregations.java similarity index 96% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/Aggregations.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/Aggregations.java index 6f2308d1e0a1b..e625f4ac9edea 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/Aggregations.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/Aggregations.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.support; +package org.elasticsearch.xpack.dataframe.support; import java.util.Locale; import java.util.Set; diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/JobValidator.java similarity index 97% rename from x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/JobValidator.java index 78c990a989b36..cbbb8c806a228 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidator.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/JobValidator.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.support; +package org.elasticsearch.xpack.dataframe.support; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; import java.util.HashMap; import java.util.List; @@ -36,9 +36,9 @@ public class JobValidator { private static final Logger logger = LogManager.getLogger(JobValidator.class); private final Client client; - private final FeatureIndexBuilderJobConfig config; + private final DataFrameJobConfig config; - public JobValidator(FeatureIndexBuilderJobConfig config, Client client) { + public JobValidator(DataFrameJobConfig config, Client client) { this.client = Objects.requireNonNull(client); this.config = Objects.requireNonNull(config); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStatsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java similarity index 65% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStatsTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java index d23d23f97c91a..17010c4afd9ff 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DataFrameJobStateAndStatsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java @@ -4,18 +4,18 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.AbstractSerializingFeatureIndexBuilderTestCase; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.DataFrameIndexerJobStatsTests; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobStateTests; +import org.elasticsearch.xpack.dataframe.job.AbstractSerializingDataFrameTestCase; +import org.elasticsearch.xpack.dataframe.job.DataFrameIndexerJobStatsTests; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobStateTests; import java.io.IOException; public class DataFrameJobStateAndStatsTests - extends AbstractSerializingFeatureIndexBuilderTestCase { + extends AbstractSerializingDataFrameTestCase { @Override protected DataFrameJobStateAndStats doParseInstance(XContentParser parser) throws IOException { @@ -25,7 +25,7 @@ protected DataFrameJobStateAndStats doParseInstance(XContentParser parser) throw @Override protected DataFrameJobStateAndStats createTestInstance() { return new DataFrameJobStateAndStats(randomAlphaOfLengthBetween(1,10), - FeatureIndexBuilderJobStateTests.randomFeatureIndexBuilderJobState(), + DataFrameJobStateTests.randomDataFrameJobState(), DataFrameIndexerJobStatsTests.randomStats()); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java similarity index 65% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java index 81eda8351f614..3a1bc395a256d 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/DeleteFeatureIndexBuilderJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java @@ -4,12 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction.Request; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Request; -public class DeleteFeatureIndexBuilderJobActionRequestTests extends AbstractStreamableTestCase { +public class DeleteDataFrameJobActionRequestTests extends AbstractStreamableTestCase { @Override protected Request createTestInstance() { return new Request(randomAlphaOfLengthBetween(1, 20)); diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java similarity index 83% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java index 63662c81b81b3..4718e94e1e18f 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java @@ -4,11 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Request; public class GetDataFrameJobsActionRequestTests extends AbstractStreamableTestCase { diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java similarity index 82% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java index 2d311cd9f6c13..8cb3c6943de5e 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/GetDataFrameJobsStatsActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java @@ -4,11 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.GetDataFrameJobsStatsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Request; public class GetDataFrameJobsStatsActionRequestTests extends AbstractStreamableTestCase { @Override diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobActionRequestTests.java similarity index 78% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobActionRequestTests.java index 6e947cc823dbe..80be5cc77f63c 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/PutFeatureIndexBuilderJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobActionRequestTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; @@ -12,16 +12,16 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction.Request; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfigTests; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Request; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfigTests; import org.junit.Before; import java.io.IOException; import static java.util.Collections.emptyList; -public class PutFeatureIndexBuilderJobActionRequestTests extends AbstractStreamableXContentTestCase { +public class PutDataFrameJobActionRequestTests extends AbstractStreamableXContentTestCase { private String jobId; @@ -68,7 +68,7 @@ protected boolean supportsUnknownFields() { @Override protected Request createTestInstance() { - FeatureIndexBuilderJobConfig config = FeatureIndexBuilderJobConfigTests.randomFeatureIndexBuilderJobConfig(); + DataFrameJobConfig config = DataFrameJobConfigTests.randomDataFrameJobConfig(); return new Request(config); } diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java similarity index 79% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java index cec3458e8445a..43a927c02aa61 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StartFeatureIndexBuilderJobActionTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java @@ -1,4 +1,4 @@ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; @@ -8,7 +8,7 @@ import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction.Request; -public class StartFeatureIndexBuilderJobActionTests extends AbstractStreamableTestCase { +public class StartDataFrameJobActionTests extends AbstractStreamableTestCase { @Override protected Request createTestInstance() { return new Request(randomAlphaOfLengthBetween(1, 20)); diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java similarity index 75% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java index 89c61a021f2b2..4b3bba4e8d8d4 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/action/StopFeatureIndexBuilderJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java @@ -4,14 +4,15 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.action; +package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction.Request; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction.Request; + import java.io.IOException; -public class StopFeatureIndexBuilderJobActionRequestTests extends AbstractStreamableXContentTestCase { +public class StopDataFrameJobActionRequestTests extends AbstractStreamableXContentTestCase { @Override protected Request createTestInstance() { diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AbstractSerializingDataFrameTestCase.java similarity index 90% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AbstractSerializingDataFrameTestCase.java index c2ed9e3d4e281..55253113b3cc9 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AbstractSerializingFeatureIndexBuilderTestCase.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AbstractSerializingDataFrameTestCase.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -17,7 +17,7 @@ import static java.util.Collections.emptyList; -public abstract class AbstractSerializingFeatureIndexBuilderTestCase +public abstract class AbstractSerializingDataFrameTestCase extends AbstractSerializingTestCase { /** diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationConfigTests.java similarity index 93% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationConfigTests.java index 96fbb0142e08e..510b42990be52 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationConfigTests.java @@ -4,20 +4,21 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; + import java.io.IOException; import java.util.HashSet; import java.util.Set; import static org.hamcrest.Matchers.equalTo; -public class AggregationConfigTests extends AbstractSerializingFeatureIndexBuilderTestCase { +public class AggregationConfigTests extends AbstractSerializingDataFrameTestCase { public static AggregationConfig randomAggregationConfig() { AggregatorFactories.Builder builder = new AggregatorFactories.Builder(); diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java similarity index 99% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java index 4a62c56623ad3..b440277cf1471 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/AggregationResultUtilsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ContextParser; diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStatsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStatsTests.java similarity index 95% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStatsTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStatsTests.java index a71368d3ef031..90629b0a406fd 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/DataFrameIndexerJobStatsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStatsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java new file mode 100644 index 0000000000000..0c9deb1ccc6c9 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.job; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.junit.Before; + +import java.io.IOException; + +public class DataFrameJobConfigTests extends AbstractSerializingDataFrameTestCase { + + private String jobId; + + public static DataFrameJobConfig randomDataFrameJobConfig() { + return new DataFrameJobConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10), SourceConfigTests.randomSourceConfig(), + AggregationConfigTests.randomAggregationConfig()); + } + + @Before + public void setUpOptionalId() { + jobId = randomAlphaOfLengthBetween(1, 10); + } + + @Override + protected DataFrameJobConfig doParseInstance(XContentParser parser) throws IOException { + if (randomBoolean()) { + return DataFrameJobConfig.fromXContent(parser, jobId); + } else { + return DataFrameJobConfig.fromXContent(parser, null); + } + } + + @Override + protected DataFrameJobConfig createTestInstance() { + return randomDataFrameJobConfig(); + } + + @Override + protected Reader instanceReader() { + return DataFrameJobConfig::new; + } +} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java similarity index 61% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java index cade3ae67563e..4fad83be27210 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobStateTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; @@ -15,25 +15,25 @@ import java.util.HashMap; import java.util.Map; -public class FeatureIndexBuilderJobStateTests extends AbstractSerializingTestCase { +public class DataFrameJobStateTests extends AbstractSerializingTestCase { - public static FeatureIndexBuilderJobState randomFeatureIndexBuilderJobState() { - return new FeatureIndexBuilderJobState(randomFrom(IndexerState.values()), randomPosition()); + public static DataFrameJobState randomDataFrameJobState() { + return new DataFrameJobState(randomFrom(IndexerState.values()), randomPosition()); } @Override - protected FeatureIndexBuilderJobState doParseInstance(XContentParser parser) throws IOException { - return FeatureIndexBuilderJobState.fromXContent(parser); + protected DataFrameJobState doParseInstance(XContentParser parser) throws IOException { + return DataFrameJobState.fromXContent(parser); } @Override - protected FeatureIndexBuilderJobState createTestInstance() { - return randomFeatureIndexBuilderJobState(); + protected DataFrameJobState createTestInstance() { + return randomDataFrameJobState(); } @Override - protected Reader instanceReader() { - return FeatureIndexBuilderJobState::new; + protected Reader instanceReader() { + return DataFrameJobState::new; } private static Map randomPosition() { diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/SourceConfigTests.java similarity index 92% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfigTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/SourceConfigTests.java index e94754c825d27..3debbb04eb70b 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/SourceConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/SourceConfigTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.job; +package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; @@ -17,7 +17,7 @@ import java.util.ArrayList; import java.util.List; -public class SourceConfigTests extends AbstractSerializingFeatureIndexBuilderTestCase { +public class SourceConfigTests extends AbstractSerializingDataFrameTestCase { public static SourceConfig randomSourceConfig() { int numSources = randomIntBetween(1, 10); diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/AggregationsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/AggregationsTests.java similarity index 92% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/AggregationsTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/AggregationsTests.java index 465f9f501401c..644174b8c8009 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/AggregationsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/AggregationsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.support; +package org.elasticsearch.xpack.dataframe.support; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidatorTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java similarity index 86% rename from x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidatorTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java index 87b49fd755da1..57f4b1d2d8d96 100644 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/support/JobValidatorTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.featureindexbuilder.support; +package org.elasticsearch.xpack.dataframe.support; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; @@ -26,9 +26,9 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.AggregationConfig; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJobConfig; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.SourceConfig; +import org.elasticsearch.xpack.dataframe.job.AggregationConfig; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.job.SourceConfig; import org.junit.After; import org.junit.Before; @@ -78,7 +78,7 @@ public void testValidateExistingIndex() throws Exception { SourceConfig sourceConfig = getValidSourceConfig(); AggregationConfig aggregationConfig = getValidAggregationConfig(); - FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source_index", "non_existing_dest", + DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source_index", "non_existing_dest", sourceConfig, aggregationConfig); assertValidJob(client, config); @@ -88,7 +88,7 @@ public void testValidateNonExistingIndex() throws Exception { SourceConfig sourceConfig = getValidSourceConfig(); AggregationConfig aggregationConfig = getValidAggregationConfig(); - FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "non_existing_source_index", + DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "non_existing_source_index", "non_existing_dest", sourceConfig, aggregationConfig); assertInvalidJob(client, config); @@ -100,7 +100,7 @@ public void testSearchFailure() throws Exception { // test a failure during the search operation, job creation fails if // search has failures although they might just be temporary - FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source_index_with_failing_shards", + DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source_index_with_failing_shards", "non_existing_dest", sourceConfig, aggregationConfig); assertInvalidJob(client, config); @@ -112,7 +112,7 @@ public void testValidateAllSupportedAggregations() throws Exception { for (String agg : supportedAggregations) { AggregationConfig aggregationConfig = getAggregationConfig(agg); - FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source", "non_existing_dest", + DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source", "non_existing_dest", sourceConfig, aggregationConfig); assertValidJob(client, config); @@ -125,7 +125,7 @@ public void testValidateAllUnsupportedAggregations() throws Exception { for (String agg : unsupportedAggregations) { AggregationConfig aggregationConfig = getAggregationConfig(agg); - FeatureIndexBuilderJobConfig config = new FeatureIndexBuilderJobConfig(getTestName(), "existing_source", "non_existing_dest", + DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source", "non_existing_dest", sourceConfig, aggregationConfig); assertInvalidJob(client, config); @@ -199,15 +199,15 @@ private AggregationConfig parseAggregations(String json) throws IOException { return AggregationConfig.fromXContent(parser); } - private static void assertValidJob(Client client, FeatureIndexBuilderJobConfig config) throws Exception { + private static void assertValidJob(Client client, DataFrameJobConfig config) throws Exception { validate(client, config, true); } - private static void assertInvalidJob(Client client, FeatureIndexBuilderJobConfig config) throws Exception { + private static void assertInvalidJob(Client client, DataFrameJobConfig config) throws Exception { validate(client, config, false); } - private static void validate(Client client, FeatureIndexBuilderJobConfig config, boolean expectValid) throws Exception { + private static void validate(Client client, DataFrameJobConfig config, boolean expectValid) throws Exception { JobValidator validator = new JobValidator(config, client); CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java deleted file mode 100644 index 1e504987f51a6..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestDeleteFeatureIndexBuilderJobAction.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; - - -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.DeleteFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; - -import java.io.IOException; - -public class RestDeleteFeatureIndexBuilderJobAction extends BaseRestHandler { - - public RestDeleteFeatureIndexBuilderJobAction(Settings settings, RestController controller) { - super(settings); - controller.registerHandler(RestRequest.Method.DELETE, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID, this); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); - DeleteFeatureIndexBuilderJobAction.Request request = new DeleteFeatureIndexBuilderJobAction.Request(id); - - return channel -> client.execute(DeleteFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); - } - - @Override - public String getName() { - return "feature_index_builder_delete_job_action"; - } -} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java deleted file mode 100644 index 45d05452c819d..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestPutFeatureIndexBuilderJobAction.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; - -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.PutFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; - -import java.io.IOException; - -public class RestPutFeatureIndexBuilderJobAction extends BaseRestHandler { - - public RestPutFeatureIndexBuilderJobAction(Settings settings, RestController controller) { - super(settings); - controller.registerHandler(RestRequest.Method.PUT, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID, this); - } - - @Override - public String getName() { - return "feature_index_builder_put_job_action"; - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); - XContentParser parser = restRequest.contentParser(); - - PutFeatureIndexBuilderJobAction.Request request = PutFeatureIndexBuilderJobAction.Request.fromXContent(parser, id); - - return channel -> client.execute(PutFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); - } -} diff --git a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java b/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java deleted file mode 100644 index 24c5b6598d6de..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/main/java/org/elasticsearch/xpack/ml/featureindexbuilder/rest/action/RestStopFeatureIndexBuilderJobAction.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.featureindexbuilder.rest.action; - -import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.featureindexbuilder.FeatureIndexBuilder; -import org.elasticsearch.xpack.ml.featureindexbuilder.action.StopFeatureIndexBuilderJobAction; -import org.elasticsearch.xpack.ml.featureindexbuilder.job.FeatureIndexBuilderJob; - -import java.io.IOException; - -public class RestStopFeatureIndexBuilderJobAction extends BaseRestHandler { - - public RestStopFeatureIndexBuilderJobAction(Settings settings, RestController controller) { - super(settings); - controller.registerHandler(RestRequest.Method.POST, FeatureIndexBuilder.BASE_PATH_JOBS_BY_ID + "_stop", this); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(FeatureIndexBuilderJob.ID.getPreferredName()); - StopFeatureIndexBuilderJobAction.Request request = new StopFeatureIndexBuilderJobAction.Request(id); - - return channel -> client.execute(StopFeatureIndexBuilderJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); - } - - @Override - public String getName() { - return "feature_index_builder_stop_job_action"; - } -} diff --git a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java b/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java deleted file mode 100644 index a16b51aadcf09..0000000000000 --- a/x-pack/plugin/ml-feature-index-builder/src/test/java/org/elasticsearch/xpack/ml/featureindexbuilder/job/FeatureIndexBuilderJobConfigTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.ml.featureindexbuilder.job; - -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.xcontent.XContentParser; -import org.junit.Before; - -import java.io.IOException; - -public class FeatureIndexBuilderJobConfigTests extends AbstractSerializingFeatureIndexBuilderTestCase { - - private String jobId; - - public static FeatureIndexBuilderJobConfig randomFeatureIndexBuilderJobConfig() { - return new FeatureIndexBuilderJobConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), - randomAlphaOfLengthBetween(1, 10), SourceConfigTests.randomSourceConfig(), - AggregationConfigTests.randomAggregationConfig()); - } - - @Before - public void setUpOptionalId() { - jobId = randomAlphaOfLengthBetween(1, 10); - } - - @Override - protected FeatureIndexBuilderJobConfig doParseInstance(XContentParser parser) throws IOException { - if (randomBoolean()) { - return FeatureIndexBuilderJobConfig.fromXContent(parser, jobId); - } else { - return FeatureIndexBuilderJobConfig.fromXContent(parser, null); - } - } - - @Override - protected FeatureIndexBuilderJobConfig createTestInstance() { - return randomFeatureIndexBuilderJobConfig(); - } - - @Override - protected Reader instanceReader() { - return FeatureIndexBuilderJobConfig::new; - } -} From 116bfef6ced05f8b991d5c9f74e62752b108e207 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 30 Nov 2018 09:11:57 +0100 Subject: [PATCH 26/49] add a client helper origin for data frame (#36057) add a client helper origin for data frame --- .../main/java/org/elasticsearch/xpack/core/ClientHelper.java | 1 + .../elasticsearch/xpack/dataframe/job/DataFrameJobTask.java | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index c0a7a0b90a4a0..09e35b7496662 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -45,6 +45,7 @@ public final class ClientHelper { public static final String DEPRECATION_ORIGIN = "deprecation"; public static final String PERSISTENT_TASK_ORIGIN = "persistent_tasks"; public static final String ROLLUP_ORIGIN = "rollup"; + public static final String DATA_FRAME_ORIGIN = "data_frame"; private ClientHelper() {} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index 6e6e519e1fcf7..8305ec550fb03 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -97,13 +97,14 @@ public ClientDataFrameIndexer(DataFrameJob job, AtomicReference in @Override protected void doNextSearch(SearchRequest request, ActionListener nextPhase) { - ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.ML_ORIGIN, client, SearchAction.INSTANCE, request, + ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, SearchAction.INSTANCE, request, nextPhase); } @Override protected void doNextBulk(BulkRequest request, ActionListener nextPhase) { - ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.ML_ORIGIN, client, BulkAction.INSTANCE, request, nextPhase); + ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, BulkAction.INSTANCE, request, + nextPhase); } @Override From dd08f7bef4df073820d6fd75824dad3d43af6cc5 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 4 Dec 2018 13:55:00 +0100 Subject: [PATCH 27/49] adapt to upstream changes --- .../action/DeleteDataFrameJobAction.java | 11 +++++----- .../action/GetDataFrameJobsAction.java | 11 +++++----- .../action/GetDataFrameJobsStatsAction.java | 11 +++++----- .../action/StartDataFrameJobAction.java | 11 +++++----- .../action/StopDataFrameJobAction.java | 11 +++++----- .../DeleteDataFrameJobActionRequestTests.java | 9 ++++---- .../GetDataFrameJobsActionRequestTests.java | 9 ++++---- ...tDataFrameJobsStatsActionRequestTests.java | 9 ++++---- .../action/StartDataFrameJobActionTests.java | 11 +++++----- .../StopDataFrameJobActionRequestTests.java | 22 +++++-------------- 10 files changed, 51 insertions(+), 64 deletions(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java index 7ccf5c6f95f89..09df36b90205e 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java @@ -45,14 +45,13 @@ public Request(String id) { public Request() { } - public String getId() { - return id; + public Request(StreamInput in) throws IOException { + super(in); + id = in.readString(); } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); + public String getId() { + return id; } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java index 75bf6ace6c0ea..41c00ffa4ff7c 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java @@ -61,6 +61,11 @@ public Request(String id) { public Request() {} + public Request(StreamInput in) throws IOException { + super(in); + id = in.readString(); + } + @Override public boolean match(Task task) { // If we are retrieving all the jobs, the task description does not contain the id @@ -75,12 +80,6 @@ public String getId() { return id; } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java index 8538476262cea..04d14572e2de8 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java @@ -60,6 +60,11 @@ public Request(String id) { public Request() {} + public Request(StreamInput in) throws IOException { + super(in); + id = in.readString(); + } + @Override public boolean match(Task task) { // If we are retrieving all the jobs, the task description does not contain the id @@ -74,12 +79,6 @@ public String getId() { return id; } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java index f43aaf4f85f5f..19fab23fb45ed 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java @@ -49,14 +49,13 @@ public Request(String id) { public Request() { } - public String getId() { - return id; + public Request(StreamInput in) throws IOException { + super(in); + id = in.readString(); } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); + public String getId() { + return id; } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java index c2f5349fe73f9..6a65151289565 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java @@ -56,6 +56,11 @@ public Request(String id) { public Request() { } + public Request(StreamInput in) throws IOException { + super(in); + id = in.readString(); + } + public String getId() { return id; } @@ -64,12 +69,6 @@ public void setId(String id) { this.id = id; } - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - id = in.readString(); - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java index 3a1bc395a256d..c38ffce4243b2 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java @@ -6,17 +6,18 @@ package org.elasticsearch.xpack.dataframe.action; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Request; -public class DeleteDataFrameJobActionRequestTests extends AbstractStreamableTestCase { +public class DeleteDataFrameJobActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { return new Request(randomAlphaOfLengthBetween(1, 20)); } @Override - protected Request createBlankInstance() { - return new Request(); + protected Writeable.Reader instanceReader() { + return Request::new; } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java index 4718e94e1e18f..af6a58f42f050 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java @@ -7,10 +7,11 @@ package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Request; -public class GetDataFrameJobsActionRequestTests extends AbstractStreamableTestCase { +public class GetDataFrameJobsActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { @@ -21,7 +22,7 @@ protected Request createTestInstance() { } @Override - protected Request createBlankInstance() { - return new Request(); + protected Writeable.Reader instanceReader() { + return Request::new; } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java index 8cb3c6943de5e..5f21648befa22 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java @@ -7,10 +7,11 @@ package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Request; -public class GetDataFrameJobsStatsActionRequestTests extends AbstractStreamableTestCase { +public class GetDataFrameJobsStatsActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { if (randomBoolean()) { @@ -20,7 +21,7 @@ protected Request createTestInstance() { } @Override - protected Request createBlankInstance() { - return new Request(); + protected Writeable.Reader instanceReader() { + return Request::new; } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java index 43a927c02aa61..063cb6c0bc675 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java @@ -5,17 +5,18 @@ * you may not use this file except in compliance with the Elastic License. */ -import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction.Request; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction.Request; -public class StartDataFrameJobActionTests extends AbstractStreamableTestCase { +public class StartDataFrameJobActionTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { return new Request(randomAlphaOfLengthBetween(1, 20)); } @Override - protected Request createBlankInstance() { - return new Request(); + protected Writeable.Reader instanceReader() { + return Request::new; } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java index 4b3bba4e8d8d4..f7e3faa21a5cd 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java @@ -6,13 +6,11 @@ package org.elasticsearch.xpack.dataframe.action; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction.Request; -import java.io.IOException; - -public class StopDataFrameJobActionRequestTests extends AbstractStreamableXContentTestCase { +public class StopDataFrameJobActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { @@ -20,17 +18,7 @@ protected Request createTestInstance() { } @Override - protected boolean supportsUnknownFields() { - return false; - } - - @Override - protected Request doParseInstance(XContentParser parser) throws IOException { - return Request.PARSER.parse(parser, null); - } - - @Override - protected Request createBlankInstance() { - return new Request(); + protected Writeable.Reader instanceReader() { + return Request::new; } } From 7cd92084507fd5dd552679029ec621b9424a0a06 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 5 Dec 2018 10:14:08 +0100 Subject: [PATCH 28/49] =?UTF-8?q?[ML-DataFrame]=20cancel=20indexer=20on=20?= =?UTF-8?q?job=20deletion=20and=20remove=20task,=20allow=20only=20stopped?= =?UTF-8?q?=20jo=E2=80=A6=20(#36204)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit cancel indexer on job deletion and remove task, allow only stopped jobs to be deleted --- .../action/DeleteDataFrameJobAction.java | 95 ++++++++++-- .../TransportDeleteDataFrameJobAction.java | 136 +++++++++--------- .../xpack/dataframe/job/DataFrameJobTask.java | 38 ++++- .../action/RestDeleteDataFrameJobAction.java | 12 +- 4 files changed, 197 insertions(+), 84 deletions(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java index 09df36b90205e..48723b789a7fe 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java @@ -6,22 +6,29 @@ package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.support.tasks.BaseTasksRequest; +import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; +import java.util.Collections; +import java.util.List; import java.util.Objects; -public class DeleteDataFrameJobAction extends Action { +public class DeleteDataFrameJobAction extends Action { public static final DeleteDataFrameJobAction INSTANCE = new DeleteDataFrameJobAction(); public static final String NAME = "cluster:admin/data_frame/delete"; @@ -31,11 +38,11 @@ private DeleteDataFrameJobAction() { } @Override - public AcknowledgedResponse newResponse() { - return new AcknowledgedResponse(); + public Response newResponse() { + return new Response(); } - public static class Request extends AcknowledgedRequest implements ToXContent { + public static class Request extends BaseTasksRequest implements ToXContentFragment { private String id; public Request(String id) { @@ -54,6 +61,11 @@ public String getId() { return id; } + @Override + public boolean match(Task task) { + return task.getDescription().equals(DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -90,10 +102,73 @@ public boolean equals(Object obj) { } } - public static class RequestBuilder extends MasterNodeOperationRequestBuilder { + public static class RequestBuilder extends ActionRequestBuilder { protected RequestBuilder(ElasticsearchClient client, DeleteDataFrameJobAction action) { - super(client, action, new Request()); + super(client, action, new DeleteDataFrameJobAction.Request()); + } + } + + public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { + private boolean acknowledged; + public Response(StreamInput in) throws IOException { + super(Collections.emptyList(), Collections.emptyList()); + readFrom(in); + } + + public Response(boolean acknowledged, List taskFailures, List nodeFailures) { + super(taskFailures, nodeFailures); + this.acknowledged = acknowledged; + } + + public Response(boolean acknowledged) { + this(acknowledged, Collections.emptyList(), Collections.emptyList()); + } + + public Response() { + this(false, Collections.emptyList(), Collections.emptyList()); + } + + public boolean isDeleted() { + return acknowledged; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + acknowledged = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeBoolean(acknowledged); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + toXContentCommon(builder, params); + builder.field("acknowledged", acknowledged); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + DeleteDataFrameJobAction.Response response = (DeleteDataFrameJobAction.Response) o; + return super.equals(o) && acknowledged == response.acknowledged; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), acknowledged); } } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java index c6a2fe8094b20..bc1385f56b99d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java @@ -5,103 +5,95 @@ */ package org.elasticsearch.xpack.dataframe.action; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; +import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Request; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Response; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; -import java.util.Objects; -import java.util.concurrent.TimeUnit; +import java.io.IOException; +import java.util.List; -public class TransportDeleteDataFrameJobAction - extends TransportMasterNodeAction { - - private final PersistentTasksService persistentTasksService; - private static final Logger logger = LogManager.getLogger(TransportDeleteDataFrameJobAction.class); +public class TransportDeleteDataFrameJobAction extends TransportTasksAction { @Inject - public TransportDeleteDataFrameJobAction(TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - PersistentTasksService persistentTasksService, ClusterService clusterService) { - super(DeleteDataFrameJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, DeleteDataFrameJobAction.Request::new); - this.persistentTasksService = persistentTasksService; + public TransportDeleteDataFrameJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, PersistentTasksService persistentTasksService, + ClusterService clusterService) { + super(DeleteDataFrameJobAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, + ThreadPool.Names.SAME); } @Override - protected String executor() { - return ThreadPool.Names.SAME; + protected Response newResponse(Request request, List tasks, List taskOperationFailures, + List failedNodeExceptions) { + assert tasks.size() + taskOperationFailures.size() == 1; + boolean cancelled = tasks.size() > 0 && tasks.stream().allMatch(Response::isDeleted); + + return new Response(cancelled, taskOperationFailures, failedNodeExceptions); } @Override - protected AcknowledgedResponse newResponse() { - return new AcknowledgedResponse(); + protected Response readTaskResponse(StreamInput in) throws IOException { + Response response = new Response(); + response.readFrom(in); + return response; } @Override - protected void masterOperation(DeleteDataFrameJobAction.Request request, ClusterState state, - ActionListener listener) throws Exception { - - String jobId = request.getId(); - TimeValue timeout = new TimeValue(60, TimeUnit.SECONDS); // TODO make this a config option - - // Step 1. Cancel the persistent task - persistentTasksService.sendRemoveRequest(jobId, new ActionListener>() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask persistentTask) { - logger.debug("Request to cancel Task for data frame job [" + jobId + "] successful."); - - // Step 2. Wait for the task to finish cancellation internally - persistentTasksService.waitForPersistentTaskCondition(jobId, Objects::isNull, timeout, - new PersistentTasksService.WaitForPersistentTaskListener() { - @Override - public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { - logger.debug("Task for data frame job [" + jobId + "] successfully canceled."); - listener.onResponse(new AcknowledgedResponse(true)); - } - - @Override - public void onFailure(Exception e) { - logger.error("Error while cancelling task for data frame job [" + jobId - + "]." + e); - listener.onFailure(e); - } - - @Override - public void onTimeout(TimeValue timeout) { - String msg = "Stopping of data frame job [" + jobId + "] timed out after [" + timeout + "]."; - logger.warn(msg); - listener.onFailure(new ElasticsearchException(msg)); - } - }); - } - - @Override - public void onFailure(Exception e) { - logger.error("Error while requesting to cancel task for data frame job [" + jobId + "]" + e); - listener.onFailure(e); - } - }); - + protected void taskOperation(Request request, DataFrameJobTask task, ActionListener listener) { + assert task.getConfig().getId().equals(request.getId()); + IndexerState state = task.getState().getJobState(); + if (state.equals(IndexerState.STOPPED)) { + task.onCancelled(); + listener.onResponse(new Response(true)); + } else { + listener.onFailure(new IllegalStateException("Could not delete job [" + request.getId() + "] because " + "indexer state is [" + + state + "]. Job must be [" + IndexerState.STOPPED + "] before deletion.")); + } } @Override - protected ClusterBlockException checkBlock(DeleteDataFrameJobAction.Request request, ClusterState state) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + protected void doExecute(Task task, Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + final DiscoveryNodes nodes = state.nodes(); + if (nodes.isLocalNodeElectedMaster()) { + PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) { + super.doExecute(task, request, listener); + } else { + // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this call, + // no need to go looking for the allocated task + listener.onFailure(new ResourceNotFoundException("the task with id [" + request.getId() + "] doesn't exist")); + } + } else { + // Delegates DeleteJob to elected master node, so it becomes the coordinating node. + // Non-master nodes may have a stale cluster state that shows jobs which are cancelled + // on the master, which makes testing difficult. + if (nodes.getMasterNode() == null) { + listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); + } else { + transportService.sendRequest(nodes.getMasterNode(), actionName, request, + new ActionListenerResponseHandler<>(listener, Response::new)); + } + } } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index 8305ec550fb03..966896cf4ea28 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; +import org.elasticsearch.xpack.dataframe.DataFrame; import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction.Response; @@ -35,16 +36,18 @@ public class DataFrameJobTask extends AllocatedPersistentTask implements Schedul private static final Logger logger = LogManager.getLogger(DataFrameJobTask.class); private final DataFrameJob job; + private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; private final DataFrameIndexer indexer; - static final String SCHEDULE_NAME = "xpack/data_frame/job" + "/schedule"; + static final String SCHEDULE_NAME = DataFrame.TASK_NAME + "/schedule"; public DataFrameJobTask(long id, String type, String action, TaskId parentTask, DataFrameJob job, DataFrameJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { super(id, type, action, DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getConfig().getId(), parentTask, headers); this.job = job; + this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; logger.info("construct job task"); // todo: simplistic implementation for now @@ -86,6 +89,38 @@ public void triggered(Event event) { } } + /** + * Attempt to gracefully cleanup the data frame job so it can be terminated. + * This tries to remove the job from the scheduler, and potentially any other + * cleanup operations in the future + */ + synchronized void shutdown() { + try { + logger.info("Data frame indexer [" + job.getConfig().getId() + "] received abort request, stopping indexer."); + schedulerEngine.remove(SCHEDULE_NAME + "_" + job.getConfig().getId()); + schedulerEngine.unregister(this); + } catch (Exception e) { + markAsFailed(e); + return; + } + markAsCompleted(); + } + + /** + * This is called when the persistent task signals that the allocated task should be terminated. + * Termination in the task framework is essentially voluntary, as the allocated task can only be + * shut down from the inside. + */ + @Override + public synchronized void onCancelled() { + logger.info( + "Received cancellation request for data frame job [" + job.getConfig().getId() + "], state: [" + indexer.getState() + "]"); + if (indexer.abort()) { + // there is no background job running, we can shutdown safely + shutdown(); + } + } + protected class ClientDataFrameIndexer extends DataFrameIndexer { private final Client client; @@ -138,6 +173,7 @@ protected void onFinish() { @Override protected void onAbort() { logger.info("Data frame job [" + job.getConfig().getId() + "] received abort request, stopping indexer"); + shutdown(); } } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java index b6c441735d0ad..8923a27ccbd56 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.dataframe.DataFrame; import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; @@ -30,7 +31,16 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(DataFrameJob.ID.getPreferredName()); DeleteDataFrameJobAction.Request request = new DeleteDataFrameJobAction.Request(id); - return channel -> client.execute(DeleteDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(DeleteDataFrameJobAction.INSTANCE, request, + new RestToXContentListener(channel) { + @Override + protected RestStatus getStatus(DeleteDataFrameJobAction.Response response) { + if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { + return RestStatus.INTERNAL_SERVER_ERROR; + } + return RestStatus.OK; + } + }); } @Override From 5f03768784b8a6cccdd342ac2d129bb9fa4f217f Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Mon, 10 Dec 2018 10:33:05 +0100 Subject: [PATCH 29/49] repair merge conflicts --- .../TransportDeleteDataFrameJobAction.java | 11 +---------- .../action/TransportGetDataFrameJobsAction.java | 7 +------ .../TransportGetDataFrameJobsStatsAction.java | 9 +-------- .../action/TransportStartDataFrameJobAction.java | 16 ++++------------ .../action/TransportStopDataFrameJobAction.java | 12 +++--------- 5 files changed, 10 insertions(+), 45 deletions(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java index bc1385f56b99d..e240271b83dea 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; @@ -29,7 +28,6 @@ import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Response; import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; -import java.io.IOException; import java.util.List; public class TransportDeleteDataFrameJobAction extends TransportTasksAction { @@ -38,7 +36,7 @@ public class TransportDeleteDataFrameJobAction extends TransportTasksAction tasks, List listener) { assert task.getConfig().getId().equals(request.getId()); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java index aaf1bdab1f139..90bd0d46d678f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java @@ -43,7 +43,7 @@ public class TransportGetDataFrameJobsAction extends @Inject public TransportGetDataFrameJobsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { super(GetDataFrameJobsAction.NAME, clusterService, transportService, actionFilters, GetDataFrameJobsAction.Request::new, - GetDataFrameJobsAction.Response::new, ThreadPool.Names.SAME); + GetDataFrameJobsAction.Response::new, GetDataFrameJobsAction.Response::new, ThreadPool.Names.SAME); } @Override @@ -54,11 +54,6 @@ protected Response newResponse(Request request, List tasks, List listener) { List configs = Collections.emptyList(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java index 8f24186156025..13b09cb7c969d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -27,7 +26,6 @@ import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -42,7 +40,7 @@ public class TransportGetDataFrameJobsStatsAction extends @Inject public TransportGetDataFrameJobsStatsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(GetDataFrameJobsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, + super(GetDataFrameJobsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, Response::new, ThreadPool.Names.SAME); } @@ -54,11 +52,6 @@ protected Response newResponse(Request request, List tasks, List listener) { List jobsStateAndStats = Collections.emptyList(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java index 12a419af5205b..390fe2ab16b66 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; @@ -23,7 +22,6 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; -import java.io.IOException; import java.util.List; import java.util.function.Consumer; @@ -34,10 +32,10 @@ public class TransportStartDataFrameJobAction extends private final XPackLicenseState licenseState; @Inject - public TransportStartDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, - ClusterService clusterService, XPackLicenseState licenseState) { - super(StartDataFrameJobAction.NAME, clusterService, transportService, actionFilters, - StartDataFrameJobAction.Request::new, StartDataFrameJobAction.Response::new, ThreadPool.Names.SAME); + public TransportStartDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, + XPackLicenseState licenseState) { + super(StartDataFrameJobAction.NAME, clusterService, transportService, actionFilters, StartDataFrameJobAction.Request::new, + StartDataFrameJobAction.Response::new, StartDataFrameJobAction.Response::new, ThreadPool.Names.SAME); this.licenseState = licenseState; } @@ -108,10 +106,4 @@ protected StartDataFrameJobAction.Response newResponse(StartDataFrameJobAction.R boolean allStarted = tasks.stream().allMatch(StartDataFrameJobAction.Response::isStarted); return new StartDataFrameJobAction.Response(allStarted); } - - @Override - protected StartDataFrameJobAction.Response readTaskResponse(StreamInput in) throws IOException { - return new StartDataFrameJobAction.Response(in); - } - } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java index f981f6872013a..0c7ea922bfecc 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java @@ -28,10 +28,9 @@ public class TransportStopDataFrameJobAction extends StopDataFrameJobAction.Response, StopDataFrameJobAction.Response> { @Inject - public TransportStopDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, - ClusterService clusterService) { - super(StopDataFrameJobAction.NAME, clusterService, transportService, actionFilters, - StopDataFrameJobAction.Request::new, StopDataFrameJobAction.Response::new, ThreadPool.Names.SAME); + public TransportStopDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { + super(StopDataFrameJobAction.NAME, clusterService, transportService, actionFilters, StopDataFrameJobAction.Request::new, + StopDataFrameJobAction.Response::new, StopDataFrameJobAction.Response::new, ThreadPool.Names.SAME); } @Override @@ -74,9 +73,4 @@ protected StopDataFrameJobAction.Response newResponse(StopDataFrameJobAction.Req boolean allStopped = tasks.stream().allMatch(StopDataFrameJobAction.Response::isStopped); return new StopDataFrameJobAction.Response(allStopped); } - - @Override - protected StopDataFrameJobAction.Response readTaskResponse(StreamInput in) throws IOException { - return new StopDataFrameJobAction.Response(in); - } } \ No newline at end of file From cf92a1a318d2d3a244b16b861b7bb2b2de38f5ee Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Mon, 10 Dec 2018 13:31:02 +0100 Subject: [PATCH 30/49] [ML-DataFrame] fix state persistence and load on startup (#36375) fix state persistence and load on startup --- .../TransportDeleteDataFrameJobAction.java | 2 +- .../TransportGetDataFrameJobsAction.java | 2 - .../TransportStopDataFrameJobAction.java | 2 - .../xpack/dataframe/job/DataFrameJob.java | 3 +- .../dataframe/job/DataFrameJobState.java | 5 +- .../xpack/dataframe/job/DataFrameJobTask.java | 97 +++++++++++++++++-- .../dataframe/support/JobValidatorTests.java | 6 +- 7 files changed, 97 insertions(+), 20 deletions(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java index e240271b83dea..1a5e371003e4c 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java @@ -52,7 +52,7 @@ protected Response newResponse(Request request, List tasks, List listener) { assert task.getConfig().getId().equals(request.getId()); - IndexerState state = task.getState().getJobState(); + IndexerState state = task.getState().getIndexerState(); if (state.equals(IndexerState.STOPPED)) { task.onCancelled(); listener.onResponse(new Response(true)); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java index 90bd0d46d678f..6ae1e08ac95db 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -28,7 +27,6 @@ import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.List; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java index 0c7ea922bfecc..5c0cb444754fe 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java @@ -14,13 +14,11 @@ import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; -import java.io.IOException; import java.util.List; public class TransportStopDataFrameJobAction extends diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java index 7a8c1a0e36b16..52ec6043b6573 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.dataframe.job; import org.elasticsearch.Version; +import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,7 +22,7 @@ import java.util.Map; import java.util.Objects; -public class DataFrameJob implements XPackPlugin.XPackPersistentTaskParams { +public class DataFrameJob extends AbstractDiffable implements XPackPlugin.XPackPersistentTaskParams { public static final ParseField ID = new ParseField("id"); public static final String NAME = DataFrame.TASK_NAME; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java index dd7ba01b2aad1..4fcf15908b08e 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java @@ -17,6 +17,7 @@ import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.indexing.IndexerState; +import org.elasticsearch.xpack.dataframe.DataFrame; import java.io.IOException; import java.util.Collections; @@ -30,7 +31,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class DataFrameJobState implements Task.Status, PersistentTaskState { - public static final String NAME = "xpack/data_frame/job_state"; + public static final String NAME = DataFrame.TASK_NAME; private final IndexerState state; @@ -73,7 +74,7 @@ public DataFrameJobState(StreamInput in) throws IOException { currentPosition = in.readBoolean() ? Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())) : null; } - public IndexerState getJobState() { + public IndexerState getIndexerState() { return state; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index 966896cf4ea28..4e747e35c1f30 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; @@ -49,10 +50,24 @@ public DataFrameJobTask(long id, String type, String action, TaskId parentTask, this.job = job; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; - logger.info("construct job task"); - // todo: simplistic implementation for now IndexerState initialState = IndexerState.STOPPED; Map initialPosition = null; + logger.info("[{}] init, got state: [{}]", job.getConfig().getId(), state != null); + if (state != null) { + final IndexerState existingState = state.getIndexerState(); + logger.info("[{}] Loading existing state: [{}], position [{}]", job.getConfig().getId(), existingState, state.getPosition()); + if (existingState.equals(IndexerState.INDEXING)) { + // reset to started as no indexer is running + initialState = IndexerState.STARTED; + } else if (existingState.equals(IndexerState.ABORTING) || existingState.equals(IndexerState.STOPPING)) { + // reset to stopped as something bad happened + initialState = IndexerState.STOPPED; + } else { + initialState = existingState; + } + initialPosition = state.getPosition(); + } + this.indexer = new ClientDataFrameIndexer(job, new AtomicReference<>(initialState), initialPosition, client); } @@ -60,6 +75,14 @@ public DataFrameJobConfig getConfig() { return job.getConfig(); } + /** + * Enable Task API to return detailed status information + */ + @Override + public Status getStatus() { + return getState(); + } + public DataFrameJobState getState() { return new DataFrameJobState(indexer.getState(), indexer.getPosition()); } @@ -69,19 +92,73 @@ public DataFrameIndexerJobStats getStats() { } public synchronized void start(ActionListener listener) { - // TODO: safeguards missing, see rollup code - indexer.start(); - listener.onResponse(new StartDataFrameJobAction.Response(true)); + final IndexerState prevState = indexer.getState(); + if (prevState != IndexerState.STOPPED) { + // fails if the task is not STOPPED + listener.onFailure(new ElasticsearchException("Cannot start task for data frame job [{}], because state was [{}]", + job.getConfig().getId(), prevState)); + return; + } + + final IndexerState newState = indexer.start(); + if (newState != IndexerState.STARTED) { + listener.onFailure(new ElasticsearchException("Cannot start task for data frame job [{}], because state was [{}]", + job.getConfig().getId(), newState)); + return; + } + + final DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition()); + + logger.debug("Updating state for data frame job [{}] to [{}][{}]", job.getConfig().getId(), state.getIndexerState(), + state.getPosition()); + updatePersistentTaskState(state, + ActionListener.wrap( + (task) -> { + logger.debug("Successfully updated state for data frame job [" + job.getConfig().getId() + "] to [" + + state.getIndexerState() + "][" + state.getPosition() + "]"); + listener.onResponse(new StartDataFrameJobAction.Response(true)); + }, (exc) -> { + // We were unable to update the persistent status, so we need to shutdown the indexer too. + indexer.stop(); + listener.onFailure(new ElasticsearchException("Error while updating state for data frame job [" + + job.getConfig().getId() + "] to [" + state.getIndexerState() + "].", exc)); + }) + ); } - public void stop(ActionListener listener) { - // TODO: safeguards missing, see rollup code - indexer.stop(); - listener.onResponse(new StopDataFrameJobAction.Response(true)); + public synchronized void stop(ActionListener listener) { + final IndexerState newState = indexer.stop(); + switch (newState) { + case STOPPED: + listener.onResponse(new StopDataFrameJobAction.Response(true)); + break; + + case STOPPING: + // update the persistent state to STOPPED. There are two scenarios and both are safe: + // 1. we persist STOPPED now, indexer continues a bit then sees the flag and checkpoints another STOPPED with the more recent + // position. + // 2. we persist STOPPED now, indexer continues a bit but then dies. When/if we resume we'll pick up at last checkpoint, + // overwrite some docs and eventually checkpoint. + DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition()); + updatePersistentTaskState(state, ActionListener.wrap((task) -> { + logger.debug("Successfully updated state for data frame job [{}] to [{}]", job.getConfig().getId(), + state.getIndexerState()); + listener.onResponse(new StopDataFrameJobAction.Response(true)); + }, (exc) -> { + listener.onFailure(new ElasticsearchException("Error while updating state for data frame job [{}] to [{}]", exc, + job.getConfig().getId(), state.getIndexerState())); + })); + break; + + default: + listener.onFailure(new ElasticsearchException("Cannot stop task for data frame job [{}], because state was [{}]", + job.getConfig().getId(), newState)); + break; + } } @Override - public void triggered(Event event) { + public synchronized void triggered(Event event) { if (event.getJobName().equals(SCHEDULE_NAME + "_" + job.getConfig().getId())) { logger.debug( "Data frame indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java index 57f4b1d2d8d96..a9735f68bb99c 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.dataframe.support; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -157,8 +158,9 @@ protected void } } - final SearchResponseSections sections = new SearchResponseSections(new SearchHits(new SearchHit[0], 0, 0), null, null, - false, null, null, 1); + final SearchResponseSections sections = new SearchResponseSections( + new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0), null, null, false, null, null, + 1); final SearchResponse response = new SearchResponse(sections, null, 10, searchFailures.size() > 0 ? 0 : 5, 0, 0, searchFailures.toArray(new ShardSearchFailure[searchFailures.size()]), null); From fa85f403b9e13e70165669e2cf4822213cdbc84a Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 11 Dec 2018 08:31:55 +0100 Subject: [PATCH 31/49] [ML-DataFrame] add generation to dataframe state (#36434) add a generation field to the dataframe state, the generation gets increased after every complete data frame indexing (aka builder run) --- .../dataframe/job/DataFrameJobState.java | 20 +++++++-- .../xpack/dataframe/job/DataFrameJobTask.java | 45 ++++++++++++------- .../dataframe/job/DataFrameJobStateTests.java | 2 +- 3 files changed, 46 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java index 4fcf15908b08e..4cb4c7e8e8848 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java @@ -34,16 +34,18 @@ public class DataFrameJobState implements Task.Status, PersistentTaskState { public static final String NAME = DataFrame.TASK_NAME; private final IndexerState state; + private final long generation; @Nullable private final SortedMap currentPosition; private static final ParseField STATE = new ParseField("job_state"); private static final ParseField CURRENT_POSITION = new ParseField("current_position"); + private static final ParseField GENERATION = new ParseField("generation"); @SuppressWarnings("unchecked") public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - args -> new DataFrameJobState((IndexerState) args[0], (HashMap) args[1])); + args -> new DataFrameJobState((IndexerState) args[0], (HashMap) args[1], (long) args[2])); static { PARSER.declareField(constructorArg(), p -> { @@ -62,16 +64,19 @@ public class DataFrameJobState implements Task.Status, PersistentTaskState { } throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); }, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), GENERATION); } - public DataFrameJobState(IndexerState state, @Nullable Map position) { + public DataFrameJobState(IndexerState state, @Nullable Map position, long generation) { this.state = state; this.currentPosition = position == null ? null : Collections.unmodifiableSortedMap(new TreeMap<>(position)); + this.generation = generation; } public DataFrameJobState(StreamInput in) throws IOException { state = IndexerState.fromStream(in); currentPosition = in.readBoolean() ? Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())) : null; + generation = in.readLong(); } public IndexerState getIndexerState() { @@ -82,6 +87,10 @@ public Map getPosition() { return currentPosition; } + public long getGeneration() { + return generation; + } + public static DataFrameJobState fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); @@ -97,6 +106,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (currentPosition != null) { builder.field(CURRENT_POSITION.getPreferredName(), currentPosition); } + builder.field(GENERATION.getPreferredName(), generation); builder.endObject(); return builder; } @@ -113,6 +123,7 @@ public void writeTo(StreamOutput out) throws IOException { if (currentPosition != null) { out.writeMap(currentPosition); } + out.writeLong(generation); } @Override @@ -127,11 +138,12 @@ public boolean equals(Object other) { DataFrameJobState that = (DataFrameJobState) other; - return Objects.equals(this.state, that.state) && Objects.equals(this.currentPosition, that.currentPosition); + return Objects.equals(this.state, that.state) && Objects.equals(this.currentPosition, that.currentPosition) + && this.generation == that.generation; } @Override public int hashCode() { - return Objects.hash(state, currentPosition); + return Objects.hash(state, currentPosition, generation); } } \ No newline at end of file diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index 4e747e35c1f30..a325a374b9fe1 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -41,6 +41,11 @@ public class DataFrameJobTask extends AllocatedPersistentTask implements Schedul private final ThreadPool threadPool; private final DataFrameIndexer indexer; + // the generation of this data frame, for v1 there will be only + // 0: data frame not created or still indexing + // 1: data frame complete, all data has been indexed + private final AtomicReference generation; + static final String SCHEDULE_NAME = DataFrame.TASK_NAME + "/schedule"; public DataFrameJobTask(long id, String type, String action, TaskId parentTask, DataFrameJob job, @@ -69,6 +74,7 @@ public DataFrameJobTask(long id, String type, String action, TaskId parentTask, } this.indexer = new ClientDataFrameIndexer(job, new AtomicReference<>(initialState), initialPosition, client); + this.generation = new AtomicReference<>(0L); } public DataFrameJobConfig getConfig() { @@ -84,13 +90,17 @@ public Status getStatus() { } public DataFrameJobState getState() { - return new DataFrameJobState(indexer.getState(), indexer.getPosition()); + return new DataFrameJobState(indexer.getState(), indexer.getPosition(), generation.get()); } public DataFrameIndexerJobStats getStats() { return indexer.getStats(); } + public long getGeneration() { + return generation.get(); + } + public synchronized void start(ActionListener listener) { final IndexerState prevState = indexer.getState(); if (prevState != IndexerState.STOPPED) { @@ -107,7 +117,7 @@ public synchronized void start(ActionListener listener) { return; } - final DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition()); + final DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); logger.debug("Updating state for data frame job [{}] to [{}][{}]", job.getConfig().getId(), state.getIndexerState(), state.getPosition()); @@ -139,7 +149,7 @@ public synchronized void stop(ActionListener li // position. // 2. we persist STOPPED now, indexer continues a bit but then dies. When/if we resume we'll pick up at last checkpoint, // overwrite some docs and eventually checkpoint. - DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition()); + DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); updatePersistentTaskState(state, ActionListener.wrap((task) -> { logger.debug("Successfully updated state for data frame job [{}] to [{}]", job.getConfig().getId(), state.getIndexerState()); @@ -159,9 +169,8 @@ public synchronized void stop(ActionListener li @Override public synchronized void triggered(Event event) { - if (event.getJobName().equals(SCHEDULE_NAME + "_" + job.getConfig().getId())) { - logger.debug( - "Data frame indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); + if (generation.get() == 0 && event.getJobName().equals(SCHEDULE_NAME + "_" + job.getConfig().getId())) { + logger.debug("Data frame indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); indexer.maybeTriggerAsyncJob(System.currentTimeMillis()); } } @@ -224,17 +233,21 @@ protected void doSaveState(IndexerState indexerState, Map positi if (indexerState.equals(IndexerState.ABORTING)) { // If we're aborting, just invoke `next` (which is likely an onFailure handler) next.run(); - } else { - final DataFrameJobState state = new DataFrameJobState(indexerState, getPosition()); - logger.info("Updating persistent state of job [" + job.getConfig().getId() + "] to [" + state.toString() + "]"); - - // TODO: we can not persist the state right now, need to be called from the task - updatePersistentTaskState(state, ActionListener.wrap(task -> next.run(), exc -> { - // We failed to update the persistent task for some reason, - // set our flag back to what it was before - next.run(); - })); + return; + } + + if(indexerState.equals(IndexerState.STARTED)) { + // if the indexer resets the state to started, it means it is done, so increment the generation + generation.compareAndSet(0L, 1L); } + + final DataFrameJobState state = new DataFrameJobState(indexerState, getPosition(), generation.get()); + logger.info("Updating persistent state of job [" + job.getConfig().getId() + "] to [" + state.toString() + "]"); + + updatePersistentTaskState(state, ActionListener.wrap(task -> next.run(), exc -> { + logger.error("Updating persistent state of job [" + job.getConfig().getId() + "] failed", exc); + next.run(); + })); } @Override diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java index 4fad83be27210..93d2e556147bc 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java @@ -18,7 +18,7 @@ public class DataFrameJobStateTests extends AbstractSerializingTestCase { public static DataFrameJobState randomDataFrameJobState() { - return new DataFrameJobState(randomFrom(IndexerState.values()), randomPosition()); + return new DataFrameJobState(randomFrom(IndexerState.values()), randomPosition(), randomLongBetween(0,10)); } @Override From 2f98c1d1418fb80f4d7f28b3824261aa765f349c Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 12 Dec 2018 13:04:54 +0100 Subject: [PATCH 32/49] [ML-Dataframe] add integration tests (#34554) add basic integration testing --- x-pack/plugin/data-frame/build.gradle | 12 + x-pack/plugin/data-frame/qa/build.gradle | 14 + .../qa/single-node-tests/build.gradle | 12 + .../integration/DataframePivotRestIT.java | 252 ++++++++++++++++++ 4 files changed, 290 insertions(+) create mode 100644 x-pack/plugin/data-frame/qa/build.gradle create mode 100644 x-pack/plugin/data-frame/qa/single-node-tests/build.gradle create mode 100644 x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java diff --git a/x-pack/plugin/data-frame/build.gradle b/x-pack/plugin/data-frame/build.gradle index bd047f68e2be9..caba438ffaf75 100644 --- a/x-pack/plugin/data-frame/build.gradle +++ b/x-pack/plugin/data-frame/build.gradle @@ -24,4 +24,16 @@ run { plugin xpackModule('core') } +// xpack modules are installed in real clusters as the meta plugin, so +// installing them as individual plugins for integ tests doesn't make sense, +// so we disable integ tests integTest.enabled = false + +// add all sub-projects of the qa sub-project +gradle.projectsEvaluated { + project.subprojects + .find { it.path == project.path + ":qa" } + .subprojects + .findAll { it.path.startsWith(project.path + ":qa") } + .each { check.dependsOn it.check } +} diff --git a/x-pack/plugin/data-frame/qa/build.gradle b/x-pack/plugin/data-frame/qa/build.gradle new file mode 100644 index 0000000000000..4f0103c9c1232 --- /dev/null +++ b/x-pack/plugin/data-frame/qa/build.gradle @@ -0,0 +1,14 @@ +/* Remove assemble on all qa projects because we don't need to publish + * artifacts for them. */ +gradle.projectsEvaluated { + subprojects { + Task assemble = project.tasks.findByName('assemble') + if (assemble) { + assemble.enabled = false + } + Task dependenciesInfo = project.tasks.findByName('dependenciesInfo') + if (dependenciesInfo) { + dependenciesInfo.enabled = false + } + } +} diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle b/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle new file mode 100644 index 0000000000000..fccd98313dc07 --- /dev/null +++ b/x-pack/plugin/data-frame/qa/single-node-tests/build.gradle @@ -0,0 +1,12 @@ +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile project(path: xpackModule('data-frame'), configuration: 'runtime') +} + +integTestCluster { + setting 'xpack.security.enabled', 'false' + setting 'xpack.license.self_generated.type', 'trial' +} diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java new file mode 100644 index 0000000000000..5f08322fc5d96 --- /dev/null +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java @@ -0,0 +1,252 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.integration; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.dataframe.DataFrame; +import org.junit.AfterClass; +import org.junit.Before; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +public class DataframePivotRestIT extends ESRestTestCase { + + private static final String DATAFRAME_ENDPOINT = DataFrame.BASE_PATH + "jobs/"; + private boolean indicesCreated = false; + + // preserve indices in order to reuse source indices in several test cases + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @Before + public void createReviewsIndex() throws IOException { + + // it's not possible to run it as @BeforeClass as clients aren't initialized then, so we need this little hack + if (indicesCreated) { + return; + } + + int[] distributionTable = {5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 4, 3, 3, 2, 1, 1, 1}; + + final int numDocs = 1000; + + // create mapping + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + { + builder.startObject("mappings") + .startObject("_doc") + .startObject("properties") + .startObject("user_id") + .field("type", "keyword") + .endObject() + .startObject("business_id") + .field("type", "keyword") + .endObject() + .startObject("stars") + .field("type", "integer") + .endObject() + .endObject() + .endObject() + .endObject(); + } + builder.endObject(); + final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); + Request req = new Request("PUT", "reviews"); + req.setEntity(entity); + client().performRequest(req); + } + + // create index + final StringBuilder bulk = new StringBuilder(); + for (int i = 0; i < numDocs; i++) { + bulk.append("{\"index\":{\"_index\":\"reviews\",\"_type\":\"_doc\"}}\n"); + long user = Math.round(Math.pow(i * 31 % 1000, distributionTable[i % distributionTable.length]) % 27); + int stars = distributionTable[(i * 33) % distributionTable.length]; + long business = Math.round(Math.pow(user * stars, distributionTable[i % distributionTable.length]) % 13); + bulk.append("{\"user_id\":\"") + .append("user_") + .append(user) + .append("\",\"business_id\":\"") + .append("business_") + .append(business) + .append("\",\"stars\":") + .append(stars) + .append("}\n"); + + if (i % 50 == 0) { + bulk.append("\r\n"); + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.addParameter("refresh", "true"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + // clear the builder + bulk.setLength(0); + } + } + bulk.append("\r\n"); + + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.addParameter("refresh", "true"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + indicesCreated = true; + } + + @AfterClass + public static void removeIndices() throws Exception { + wipeDataFrameJobs(); + waitForPendingDataFrameTasks(); + // we disabled wiping indices, but now its time to get rid of them + // note: can not use super.cleanUpCluster() as this method must be static + wipeIndices(); + } + + public void testSimplePivot() throws Exception { + String jobId = "simplePivot"; + String dataFrameIndex = "pivot_reviews"; + + final Request createDataframeJobRequest = new Request("PUT", DATAFRAME_ENDPOINT + jobId); + createDataframeJobRequest.setJsonEntity("{" + + " \"index_pattern\": \"reviews\"," + + " \"destination_index\": \"" + dataFrameIndex + "\"," + + " \"sources\": {" + + " \"sources\": [ {" + + " \"reviewer\": {" + + " \"terms\": {" + + " \"field\": \"user_id\"" + + " } } } ] }," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } }" + + "}"); + Map createDataframeJobResponse = entityAsMap(client().performRequest(createDataframeJobRequest)); + assertThat(createDataframeJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertTrue(indexExists(dataFrameIndex)); + + // start the job + final Request startJobRequest = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_start"); + Map startJobResponse = entityAsMap(client().performRequest(startJobRequest)); + assertThat(startJobResponse.get("started"), equalTo(Boolean.TRUE)); + + // wait until the dataframe has been created and all data is available + waitForDataFrameGeneration(jobId); + refreshIndex(dataFrameIndex); + + // we expect 27 documents as there shall be 27 user_id's + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(27, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + + // get and check some users + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_0", 3.776978417); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_5", 3.72); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_11", 3.846153846); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_20", 3.769230769); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); + } + + private void waitForDataFrameGeneration(String jobId) throws Exception { + assertBusy(() -> { + long generation = getDataFrameGeneration(jobId); + assertEquals(1, generation); + }, 30, TimeUnit.SECONDS); + } + + private int getDataFrameGeneration(String jobId) throws IOException { + Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + jobId + "/_stats")); + + Map jobStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("jobs")).get(0); + return (int) XContentMapValues.extractValue("state.generation", jobStatsAsMap); + } + + private void refreshIndex(String index) throws IOException { + assertOK(client().performRequest(new Request("POST", index + "/_refresh"))); + } + + private void assertOnePivotValue(String query, double expected) throws IOException { + Map searchResult = getAsMap(query); + + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + double actual = (double) ((List) XContentMapValues.extractValue("hits.hits._source.avg_rating", searchResult)).get(0); + assertEquals(expected, actual, 0.000001); + } + + private static void wipeDataFrameJobs() throws IOException, InterruptedException { + Response response = adminClient().performRequest(new Request("GET", DATAFRAME_ENDPOINT + "_all")); + Map jobs = entityAsMap(response); + @SuppressWarnings("unchecked") + List> jobConfigs = + (List>) XContentMapValues.extractValue("jobs", jobs); + + if (jobConfigs == null) { + return; + } + + for (Map jobConfig : jobConfigs) { + String jobId = (String) jobConfig.get("id"); + Request request = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_stop"); + request.addParameter("ignore", "404"); + adminClient().performRequest(request); + } + + // TODO this is temporary until the StopDataFrameJob API gains the ability to block until stopped + boolean stopped = awaitBusy(() -> { + Request request = new Request("GET", DATAFRAME_ENDPOINT + "_all"); + try { + Response jobsResponse = adminClient().performRequest(request); + String body = EntityUtils.toString(jobsResponse.getEntity()); + // If the body contains any of the non-stopped states, at least one job is not finished yet + return Arrays.stream(new String[]{"started", "aborting", "stopping", "indexing"}).noneMatch(body::contains); + } catch (IOException e) { + return false; + } + }, 10, TimeUnit.SECONDS); + + assertTrue("Timed out waiting for data frame job(s) to stop", stopped); + + for (Map jobConfig : jobConfigs) { + String jobId = (String) jobConfig.get("id"); + Request request = new Request("DELETE", DATAFRAME_ENDPOINT + jobId); + request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this + adminClient().performRequest(request); + } + } + + private static void waitForPendingDataFrameTasks() throws Exception { + waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(DataFrame.TASK_NAME) == false); + } + + private static void wipeIndices() throws IOException { + try { + adminClient().performRequest(new Request("DELETE", "*")); + } catch (ResponseException e) { + // 404 here just means we had no indexes + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + } + } +} From 239eb2788f166f50797158ff005b7dfb33a8544f Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 13 Dec 2018 10:10:42 +0100 Subject: [PATCH 33/49] move common fields and string into a utility class (#36557) move common fields and string into a utility class --- .../xpack/core/dataframe/DataFrameField.java | 31 +++++++++++++++++++ .../integration/DataframePivotRestIT.java | 6 ++-- .../xpack/dataframe/DataFrame.java | 6 ++-- .../action/DataFrameJobStateAndStats.java | 6 ++-- .../action/DeleteDataFrameJobAction.java | 8 ++--- .../action/GetDataFrameJobsAction.java | 15 ++++----- .../action/GetDataFrameJobsStatsAction.java | 16 ++++------ .../action/StartDataFrameJobAction.java | 6 ++-- .../action/StopDataFrameJobAction.java | 10 +++--- .../xpack/dataframe/job/DataFrameJob.java | 8 ++--- .../dataframe/job/DataFrameJobConfig.java | 8 ++--- .../DataFrameJobPersistentTasksExecutor.java | 3 +- .../dataframe/job/DataFrameJobState.java | 4 +-- .../xpack/dataframe/job/DataFrameJobTask.java | 6 ++-- .../DataFramePersistentTaskUtils.java | 4 +-- .../action/RestDeleteDataFrameJobAction.java | 7 ++--- .../action/RestGetDataFrameJobsAction.java | 7 ++--- .../RestGetDataFrameJobsStatsAction.java | 7 ++--- .../action/RestPutDataFrameJobAction.java | 7 ++--- .../action/RestStartDataFrameJobAction.java | 4 +-- .../action/RestStopDataFrameJobAction.java | 7 ++--- 21 files changed, 95 insertions(+), 81 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java new file mode 100644 index 0000000000000..841b686deff54 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe; + +import org.elasticsearch.common.ParseField; + +/* + * Utility class to hold common fields and strings for data frame. + */ +public final class DataFrameField { + + // common parse fields + public static final ParseField ID = new ParseField("id"); + public static final ParseField JOBS = new ParseField("jobs"); + public static final ParseField COUNT = new ParseField("count"); + + // common strings + public static final String TASK_NAME = "data_frame/jobs"; + public static final String REST_BASE_PATH = "/_data_frame/"; + public static final String REST_BASE_PATH_JOBS_BY_ID = REST_BASE_PATH + "jobs/{id}/"; + + // note: this is used to match tasks + public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "data_frame_"; + + private DataFrameField() { + } +} diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java index 5f08322fc5d96..7eac6564e34cd 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.junit.AfterClass; import org.junit.Before; import java.io.IOException; @@ -30,7 +30,7 @@ public class DataframePivotRestIT extends ESRestTestCase { - private static final String DATAFRAME_ENDPOINT = DataFrame.BASE_PATH + "jobs/"; + private static final String DATAFRAME_ENDPOINT = DataFrameField.REST_BASE_PATH + "jobs/"; private boolean indicesCreated = false; // preserve indices in order to reuse source indices in several test cases @@ -236,7 +236,7 @@ private static void wipeDataFrameJobs() throws IOException, InterruptedException } private static void waitForPendingDataFrameTasks() throws Exception { - waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(DataFrame.TASK_NAME) == false); + waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(DataFrameField.TASK_NAME) == false); } private static void wipeIndices() throws IOException { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index 73dfe62960707..a7d7f94381e46 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -35,6 +35,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction; @@ -73,9 +74,6 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlugin { public static final String NAME = "data_frame"; - public static final String TASK_NAME = "data_frame/jobs"; - public static final String BASE_PATH = "/_data_frame/"; - public static final String BASE_PATH_JOBS_BY_ID = BASE_PATH + "jobs/{id}/"; public static final String TASK_THREAD_POOL_NAME = "data_frame_indexing"; // list of headers that will be stored when a job is created @@ -172,7 +170,7 @@ public List getNamedXContent() { return emptyList(); } return Arrays.asList( - new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(TASK_NAME), + new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(DataFrameField.TASK_NAME), DataFrameJob::fromXContent), new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(DataFrameJobState.NAME), DataFrameJobState::fromXContent), diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java index 442433cc56d4d..b4585dc3f3611 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java @@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.job.DataFrameIndexerJobStats; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import org.elasticsearch.xpack.dataframe.job.DataFrameJobState; import java.io.IOException; @@ -34,7 +34,7 @@ public class DataFrameJobStateAndStats implements Writeable, ToXContentObject { a -> new DataFrameJobStateAndStats((String) a[0], (DataFrameJobState) a[1], (DataFrameIndexerJobStats) a[2])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameJob.ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameField.ID); PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataFrameJobState.PARSER::apply, STATE_FIELD); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerJobStats.fromXContent(p), STATS_FIELD); } @@ -54,7 +54,7 @@ public DataFrameJobStateAndStats(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(DataFrameJob.ID.getPreferredName(), id); + builder.field(DataFrameField.ID.getPreferredName(), id); builder.field(STATE_FIELD.getPreferredName(), jobState); builder.field(STATS_FIELD.getPreferredName(), jobStats); builder.endObject(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java index 48723b789a7fe..1aec0746ef2a9 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java @@ -20,8 +20,8 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; import java.util.Collections; @@ -46,7 +46,7 @@ public static class Request extends BaseTasksRequest implements ToXCont private String id; public Request(String id) { - this.id = ExceptionsHelper.requireNonNull(id, DataFrameJob.ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); } public Request() { @@ -63,7 +63,7 @@ public String getId() { @Override public boolean match(Task task) { - return task.getDescription().equals(DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); + return task.getDescription().equals(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + id); } @Override @@ -79,7 +79,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(DataFrameJob.ID.getPreferredName(), id); + builder.field(DataFrameField.ID.getPreferredName(), id); return builder; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java index 41c00ffa4ff7c..d783f9b501f14 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -24,7 +23,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; import java.io.IOException; @@ -36,8 +35,6 @@ public class GetDataFrameJobsAction extends Action implements ToXCont private String id; public Request(String id) { - this.id = ExceptionsHelper.requireNonNull(id, DataFrameJob.ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); } public Request() { @@ -71,7 +71,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(DataFrameJob.ID.getPreferredName(), id); + builder.field(DataFrameField.ID.getPreferredName(), id); return builder; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java index 6a65151289565..08c517a380c30 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java @@ -19,8 +19,8 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; import java.util.Collections; @@ -46,11 +46,11 @@ public static class Request extends BaseTasksRequest implements ToXCont public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); static { - PARSER.declareString(Request::setId, DataFrameJob.ID); + PARSER.declareString(Request::setId, DataFrameField.ID); } public Request(String id) { - this.id = ExceptionsHelper.requireNonNull(id, DataFrameJob.ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); } public Request() { @@ -82,7 +82,7 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(DataFrameJob.ID.getPreferredName(), id); + builder.field(DataFrameField.ID.getPreferredName(), id); return builder; } @@ -106,7 +106,7 @@ public boolean equals(Object obj) { @Override public boolean match(Task task) { - String expectedDescription = DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + id; + String expectedDescription = DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + id; return task.getDescription().equals(expectedDescription); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java index 52ec6043b6573..4118df06df0b4 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import java.io.IOException; import java.util.Collections; @@ -24,11 +24,7 @@ public class DataFrameJob extends AbstractDiffable implements XPackPlugin.XPackPersistentTaskParams { - public static final ParseField ID = new ParseField("id"); - public static final String NAME = DataFrame.TASK_NAME; - - // note: this is used to match tasks - public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "data_frame_"; + public static final String NAME = DataFrameField.TASK_NAME; private DataFrameJobConfig config; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java index 9f58a7305076f..db23feb8d10ac 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; import java.util.Objects; @@ -29,7 +30,6 @@ public class DataFrameJobConfig implements NamedWriteable, ToXContentObject { private static final String NAME = "xpack/data_frame/jobconfig"; - private static final ParseField ID = new ParseField("id"); private static final ParseField INDEX_PATTERN = new ParseField("index_pattern"); private static final ParseField DESTINATION_INDEX = new ParseField("destination_index"); private static final ParseField SOURCES = new ParseField("sources"); @@ -52,7 +52,7 @@ public class DataFrameJobConfig implements NamedWriteable, ToXContentObject { }); static { - PARSER.declareString(optionalConstructorArg(), ID); + PARSER.declareString(optionalConstructorArg(), DataFrameField.ID); PARSER.declareString(constructorArg(), INDEX_PATTERN); PARSER.declareString(constructorArg(), DESTINATION_INDEX); PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.fromXContent(p), SOURCES); @@ -64,7 +64,7 @@ public DataFrameJobConfig(final String id, final String destinationIndex, final SourceConfig sourceConfig, final AggregationConfig aggregationConfig) { - this.id = ExceptionsHelper.requireNonNull(id, ID.getPreferredName()); + this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); this.indexPattern = ExceptionsHelper.requireNonNull(indexPattern, INDEX_PATTERN.getPreferredName()); this.destinationIndex = ExceptionsHelper.requireNonNull(destinationIndex, DESTINATION_INDEX.getPreferredName()); @@ -115,7 +115,7 @@ public void writeTo(final StreamOutput out) throws IOException { public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); - builder.field(ID.getPreferredName(), id); + builder.field(DataFrameField.ID.getPreferredName(), id); builder.field(INDEX_PATTERN.getPreferredName(), indexPattern); builder.field(DESTINATION_INDEX.getPreferredName(), destinationIndex); if (sourceConfig != null) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java index ba9d3344fa015..99135d8df5a35 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java @@ -16,6 +16,7 @@ import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.dataframe.DataFrame; @@ -31,7 +32,7 @@ public class DataFrameJobPersistentTasksExecutor extends PersistentTasksExecutor public DataFrameJobPersistentTasksExecutor(Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool) { - super(DataFrame.TASK_NAME, DataFrame.TASK_THREAD_POOL_NAME); + super(DataFrameField.TASK_NAME, DataFrame.TASK_THREAD_POOL_NAME); this.client = client; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java index 4cb4c7e8e8848..fe991b8ed6102 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java @@ -16,8 +16,8 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.persistent.PersistentTaskState; import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.indexing.IndexerState; -import org.elasticsearch.xpack.dataframe.DataFrame; import java.io.IOException; import java.util.Collections; @@ -31,7 +31,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class DataFrameJobState implements Task.Status, PersistentTaskState { - public static final String NAME = DataFrame.TASK_NAME; + public static final String NAME = DataFrameField.TASK_NAME; private final IndexerState state; private final long generation; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index a325a374b9fe1..c93f39fc57cb2 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -21,10 +21,10 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; -import org.elasticsearch.xpack.dataframe.DataFrame; import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction.Response; @@ -46,12 +46,12 @@ public class DataFrameJobTask extends AllocatedPersistentTask implements Schedul // 1: data frame complete, all data has been indexed private final AtomicReference generation; - static final String SCHEDULE_NAME = DataFrame.TASK_NAME + "/schedule"; + static final String SCHEDULE_NAME = DataFrameField.TASK_NAME + "/schedule"; public DataFrameJobTask(long id, String type, String action, TaskId parentTask, DataFrameJob job, DataFrameJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { - super(id, type, action, DataFrameJob.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getConfig().getId(), parentTask, headers); + super(id, type, action, DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getConfig().getId(), parentTask, headers); this.job = job; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java index 0573794d81398..496cf18a3dc6a 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java @@ -9,7 +9,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; public final class DataFramePersistentTaskUtils { @@ -29,7 +29,7 @@ public static boolean stateHasDataFrameJobs(String id, ClusterState state) { // persistent tasks and see if at least once has a DataFrameJob param if (id.equals(MetaData.ALL)) { hasJobs = pTasksMeta.tasks().stream() - .anyMatch(persistentTask -> persistentTask.getTaskName().equals(DataFrame.TASK_NAME)); + .anyMatch(persistentTask -> persistentTask.getTaskName().equals(DataFrameField.TASK_NAME)); } else if (pTasksMeta.getTask(id) != null) { // If we're looking for a single job, we can just check directly diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java index 8923a27ccbd56..d7c88e726949a 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java @@ -13,9 +13,8 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; @@ -23,12 +22,12 @@ public class RestDeleteDataFrameJobAction extends BaseRestHandler { public RestDeleteDataFrameJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.DELETE, DataFrame.BASE_PATH_JOBS_BY_ID, this); + controller.registerHandler(RestRequest.Method.DELETE, DataFrameField.REST_BASE_PATH_JOBS_BY_ID, this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + String id = restRequest.param(DataFrameField.ID.getPreferredName()); DeleteDataFrameJobAction.Request request = new DeleteDataFrameJobAction.Request(id); return channel -> client.execute(DeleteDataFrameJobAction.INSTANCE, request, diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java index 9f24bacc272d4..c54c48ad6f182 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java @@ -12,20 +12,19 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; public class RestGetDataFrameJobsAction extends BaseRestHandler { public RestGetDataFrameJobsAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.GET, DataFrame.BASE_PATH_JOBS_BY_ID, this); + controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_JOBS_BY_ID, this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + String id = restRequest.param(DataFrameField.ID.getPreferredName()); GetDataFrameJobsAction.Request request = new GetDataFrameJobsAction.Request(id); return channel -> client.execute(GetDataFrameJobsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java index dcedb3305b4c6..7e8fc16b58b3f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java @@ -12,20 +12,19 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; public class RestGetDataFrameJobsStatsAction extends BaseRestHandler { public RestGetDataFrameJobsStatsAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.GET, DataFrame.BASE_PATH_JOBS_BY_ID + "_stats", this); + controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_JOBS_BY_ID + "_stats", this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + String id = restRequest.param(DataFrameField.ID.getPreferredName()); GetDataFrameJobsStatsAction.Request request = new GetDataFrameJobsStatsAction.Request(id); return channel -> client.execute(GetDataFrameJobsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java index bf7ade8e9aad2..ac04981586dec 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java @@ -13,9 +13,8 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; @@ -23,7 +22,7 @@ public class RestPutDataFrameJobAction extends BaseRestHandler { public RestPutDataFrameJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.PUT, DataFrame.BASE_PATH_JOBS_BY_ID, this); + controller.registerHandler(RestRequest.Method.PUT, DataFrameField.REST_BASE_PATH_JOBS_BY_ID, this); } @Override @@ -33,7 +32,7 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + String id = restRequest.param(DataFrameField.ID.getPreferredName()); XContentParser parser = restRequest.contentParser(); PutDataFrameJobAction.Request request = PutDataFrameJobAction.Request.fromXContent(parser, id); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java index bc17462a8b8a5..4e984f587bda3 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java @@ -12,8 +12,8 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.rollup.RollupField; -import org.elasticsearch.xpack.dataframe.DataFrame; import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; import java.io.IOException; @@ -22,7 +22,7 @@ public class RestStartDataFrameJobAction extends BaseRestHandler { public RestStartDataFrameJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, DataFrame.BASE_PATH_JOBS_BY_ID + "_start", this); + controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_JOBS_BY_ID + "_start", this); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java index 76cace53d90aa..7f17a388b5b93 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java @@ -11,9 +11,8 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import java.io.IOException; @@ -21,12 +20,12 @@ public class RestStopDataFrameJobAction extends BaseRestHandler { public RestStopDataFrameJobAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, DataFrame.BASE_PATH_JOBS_BY_ID + "_stop", this); + controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_JOBS_BY_ID + "_stop", this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String id = restRequest.param(DataFrameJob.ID.getPreferredName()); + String id = restRequest.param(DataFrameField.ID.getPreferredName()); StopDataFrameJobAction.Request request = new StopDataFrameJobAction.Request(id); return channel -> client.execute(StopDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); From 972c063b23bbb30d1027b258acbc6e7da8d41128 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 19 Dec 2018 09:56:35 +0100 Subject: [PATCH 34/49] [ML-DataFrame] add a wait_for_completion option to the stop data frame api (#36701) add a wait_for_completion option to the stop data frame api --- .../xpack/core/dataframe/DataFrameField.java | 2 + .../core/dataframe/DataFrameMessages.java | 39 +++++++++++++++ .../dataframe/DataFrameMessagesTests.java | 18 +++++++ .../integration/DataframePivotRestIT.java | 29 ++++------- .../action/StopDataFrameJobAction.java | 41 +++++++++++---- .../TransportStopDataFrameJobAction.java | 50 ++++++++++++++++++- .../xpack/dataframe/job/DataFrameJobTask.java | 4 ++ .../action/RestStopDataFrameJobAction.java | 6 ++- .../StopDataFrameJobActionRequestTests.java | 15 +++++- 9 files changed, 172 insertions(+), 32 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java index 841b686deff54..db14947c8d3af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -17,6 +17,8 @@ public final class DataFrameField { public static final ParseField ID = new ParseField("id"); public static final ParseField JOBS = new ParseField("jobs"); public static final ParseField COUNT = new ParseField("count"); + public static final ParseField TIMEOUT = new ParseField("timeout"); + public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion"); // common strings public static final String TASK_NAME = "data_frame/jobs"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java new file mode 100644 index 0000000000000..f7d3514e1a442 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe; + +import java.text.MessageFormat; +import java.util.Locale; + +public class DataFrameMessages { + + public static final String REST_STOP_JOB_WAIT_FOR_COMPLETION_TIMEOUT = + "Timed out after [{0}] while waiting for data frame job [{1}] to stop"; + public static final String REST_STOP_JOB_WAIT_FOR_COMPLETION_INTERRUPT = "Interrupted while waiting for data frame job [{0}] to stop"; + + private DataFrameMessages() { + } + + /** + * Returns the message parameter + * + * @param message Should be one of the statics defined in this class + */ + public static String getMessage(String message) { + return message; + } + + /** + * Format the message with the supplied arguments + * + * @param message Should be one of the statics defined in this class + * @param args MessageFormat arguments. See {@linkplain MessageFormat#format(Object)}] + */ + public static String getMessage(String message, Object... args) { + return new MessageFormat(message, Locale.ROOT).format(args); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java new file mode 100644 index 0000000000000..ffe49918e97a3 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe; + +import org.elasticsearch.test.ESTestCase; + +public class DataFrameMessagesTests extends ESTestCase { + + public void testGetMessage_WithFormatStrings() { + String formattedMessage = DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_JOB_WAIT_FOR_COMPLETION_TIMEOUT, "30s", + "my_job"); + assertEquals("Timed out after [30s] while waiting for data frame job [my_job] to stop", formattedMessage); + } +} diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java index 7eac6564e34cd..d764737811f0d 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java @@ -8,7 +8,6 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -20,7 +19,6 @@ import org.junit.AfterClass; import org.junit.Before; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -175,13 +173,20 @@ private void waitForDataFrameGeneration(String jobId) throws Exception { }, 30, TimeUnit.SECONDS); } - private int getDataFrameGeneration(String jobId) throws IOException { + private static int getDataFrameGeneration(String jobId) throws IOException { Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + jobId + "/_stats")); Map jobStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("jobs")).get(0); return (int) XContentMapValues.extractValue("state.generation", jobStatsAsMap); } + private static String getDataFrameIndexerState(String jobId) throws IOException { + Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + jobId + "/_stats")); + + Map jobStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("jobs")).get(0); + return (String) XContentMapValues.extractValue("state.job_state", jobStatsAsMap); + } + private void refreshIndex(String index) throws IOException { assertOK(client().performRequest(new Request("POST", index + "/_refresh"))); } @@ -208,25 +213,13 @@ private static void wipeDataFrameJobs() throws IOException, InterruptedException for (Map jobConfig : jobConfigs) { String jobId = (String) jobConfig.get("id"); Request request = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_stop"); + request.addParameter("wait_for_completion", "true"); + request.addParameter("timeout", "10s"); request.addParameter("ignore", "404"); adminClient().performRequest(request); + assertEquals("stopped", getDataFrameIndexerState(jobId)); } - // TODO this is temporary until the StopDataFrameJob API gains the ability to block until stopped - boolean stopped = awaitBusy(() -> { - Request request = new Request("GET", DATAFRAME_ENDPOINT + "_all"); - try { - Response jobsResponse = adminClient().performRequest(request); - String body = EntityUtils.toString(jobsResponse.getEntity()); - // If the body contains any of the non-stopped states, at least one job is not finished yet - return Arrays.stream(new String[]{"started", "aborting", "stopping", "indexing"}).noneMatch(body::contains); - } catch (IOException e) { - return false; - } - }, 10, TimeUnit.SECONDS); - - assertTrue("Timed out waiting for data frame job(s) to stop", stopped); - for (Map jobConfig : jobConfigs) { String jobId = (String) jobConfig.get("id"); Request request = new Request("DELETE", DATAFRAME_ENDPOINT + jobId); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java index 08c517a380c30..b31bde4f843c6 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java @@ -11,10 +11,11 @@ import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -25,12 +26,15 @@ import java.io.IOException; import java.util.Collections; import java.util.Objects; +import java.util.concurrent.TimeUnit; public class StopDataFrameJobAction extends Action { public static final StopDataFrameJobAction INSTANCE = new StopDataFrameJobAction(); public static final String NAME = "cluster:admin/data_frame/stop"; + public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private StopDataFrameJobAction() { super(NAME); } @@ -42,23 +46,24 @@ public Response newResponse() { public static class Request extends BaseTasksRequest implements ToXContent { private String id; + private final boolean waitForCompletion; - public static ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); - - static { - PARSER.declareString(Request::setId, DataFrameField.ID); - } - - public Request(String id) { + public Request(String id, boolean waitForCompletion, @Nullable TimeValue timeout) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); + this.waitForCompletion = waitForCompletion; + + // use the timeout value already present in BaseTasksRequest + this.setTimeout(timeout == null ? DEFAULT_TIMEOUT : timeout); } public Request() { + this(null, false, null); } public Request(StreamInput in) throws IOException { super(in); id = in.readString(); + waitForCompletion = in.readBoolean(); } public String getId() { @@ -69,10 +74,15 @@ public void setId(String id) { this.id = id; } + public boolean waitForCompletion() { + return waitForCompletion; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); + out.writeBoolean(waitForCompletion); } @Override @@ -83,12 +93,17 @@ public ActionRequestValidationException validate() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(DataFrameField.ID.getPreferredName(), id); + builder.field(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), waitForCompletion); + if (this.getTimeout() != null) { + builder.field(DataFrameField.TIMEOUT.getPreferredName(), this.getTimeout()); + } return builder; } @Override public int hashCode() { - return Objects.hash(id); + // the base class does not implement hashCode, therefore we need to hash timeout ourselves + return Objects.hash(id, waitForCompletion, this.getTimeout()); } @Override @@ -101,7 +116,13 @@ public boolean equals(Object obj) { return false; } Request other = (Request) obj; - return Objects.equals(id, other.id); + + // the base class does not implement equals, therefore we need to compare timeout ourselves + if (Objects.equals(this.getTimeout(), other.getTimeout()) == false) { + return false; + } + + return Objects.equals(id, other.id) && Objects.equals(waitForCompletion, other.waitForCompletion); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java index 5c0cb444754fe..e76c6dabc3e3c 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.dataframe.action; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; @@ -14,21 +16,30 @@ import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; import java.util.List; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + public class TransportStopDataFrameJobAction extends TransportTasksAction { + private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100); + private final ThreadPool threadPool; + @Inject - public TransportStopDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { + public TransportStopDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, + ThreadPool threadPool) { super(StopDataFrameJobAction.NAME, clusterService, transportService, actionFilters, StopDataFrameJobAction.Request::new, StopDataFrameJobAction.Response::new, StopDataFrameJobAction.Response::new, ThreadPool.Names.SAME); + this.threadPool = threadPool; } @Override @@ -41,7 +52,42 @@ protected void doExecute(Task task, StopDataFrameJobAction.Request request, protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJobTask jobTask, ActionListener listener) { if (jobTask.getConfig().getId().equals(request.getId())) { - jobTask.stop(listener); + if (request.waitForCompletion() == false) { + jobTask.stop(listener); + } else { + ActionListener blockingListener = ActionListener.wrap(response -> { + if (response.isStopped()) { + // The Task acknowledged that it is stopped/stopping... wait until the status actually + // changes over before returning. Switch over to Generic threadpool so + // we don't block the network thread + threadPool.generic().execute(() -> { + try { + long untilInNanos = System.nanoTime() + request.getTimeout().getNanos(); + + while (System.nanoTime() - untilInNanos < 0) { + if (jobTask.isStopped()) { + listener.onResponse(response); + return; + } + Thread.sleep(WAIT_FOR_COMPLETION_POLL.millis()); + } + // ran out of time + listener.onFailure(new ElasticsearchTimeoutException( + DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_JOB_WAIT_FOR_COMPLETION_TIMEOUT, + request.getTimeout().getStringRep(), request.getId()))); + } catch (InterruptedException e) { + listener.onFailure(new ElasticsearchException(DataFrameMessages + .getMessage(DataFrameMessages.REST_STOP_JOB_WAIT_FOR_COMPLETION_INTERRUPT, request.getId()), e)); + } + }); + } else { + // Did not acknowledge stop, just return the response + listener.onResponse(response); + } + }, listener::onFailure); + + jobTask.stop(blockingListener); + } } else { listener.onFailure(new RuntimeException("ID of data frame indexer task [" + jobTask.getConfig().getId() + "] does not match request's ID [" + request.getId() + "]")); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index c93f39fc57cb2..9059d863aa913 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -101,6 +101,10 @@ public long getGeneration() { return generation.get(); } + public boolean isStopped() { + return indexer.getState().equals(IndexerState.STOPPED); + } + public synchronized void start(ActionListener listener) { final IndexerState prevState = indexer.getState(); if (prevState != IndexerState.STOPPED) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java index 7f17a388b5b93..f5979264d07fd 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java @@ -7,6 +7,7 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -26,7 +27,10 @@ public RestStopDataFrameJobAction(Settings settings, RestController controller) @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String id = restRequest.param(DataFrameField.ID.getPreferredName()); - StopDataFrameJobAction.Request request = new StopDataFrameJobAction.Request(id); + TimeValue timeout = restRequest.paramAsTime(DataFrameField.TIMEOUT.getPreferredName(), StopDataFrameJobAction.DEFAULT_TIMEOUT); + boolean waitForCompletion = restRequest.paramAsBoolean(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), false); + + StopDataFrameJobAction.Request request = new StopDataFrameJobAction.Request(id, waitForCompletion, timeout); return channel -> client.execute(StopDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java index f7e3faa21a5cd..d5a292fe71aac 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.dataframe.action; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction.Request; @@ -14,11 +15,23 @@ public class StopDataFrameJobActionRequestTests extends AbstractWireSerializingT @Override protected Request createTestInstance() { - return new Request(randomAlphaOfLengthBetween(1, 10)); + TimeValue timeout = randomBoolean() ? TimeValue.timeValueMinutes(randomIntBetween(1, 10)) : null; + return new Request(randomAlphaOfLengthBetween(1, 10), randomBoolean(), timeout); } @Override protected Writeable.Reader instanceReader() { return Request::new; } + + public void testSameButDifferentTimeout() { + String id = randomAlphaOfLengthBetween(1, 10); + boolean waitForCompletion = randomBoolean(); + + Request r1 = new Request(id, waitForCompletion, TimeValue.timeValueSeconds(10)); + Request r2 = new Request(id, waitForCompletion, TimeValue.timeValueSeconds(20)); + + assertNotEquals(r1,r2); + assertNotEquals(r1.hashCode(),r2.hashCode()); + } } From c7e6480ce5138c55110fa16d5c9380e37bc64869 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 8 Jan 2019 16:08:33 +0100 Subject: [PATCH 35/49] fix merge conflict --- .../java/org/elasticsearch/xpack/core/XPackClientPlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index fe66abf0c8ee7..5907b0ae8f131 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -426,7 +426,7 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), // Data Frame new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_FRAME, DataFrameFeatureSetUsage::new)); } From ce822562e576aa28c39ed741611f68815ab33a1a Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 10 Jan 2019 12:46:23 +0100 Subject: [PATCH 36/49] [ML-DataFrame] Feature/fib jobconfigmanager (#37068) store data frame configurations in an index rather than the cluster state --- .../elasticsearch/client/IndicesClientIT.java | 2 +- .../core/dataframe/DataFrameMessages.java | 14 ++ .../dataframe/DataFrameMessagesTests.java | 56 +++++++ .../DataFrameConfigurationIndexIT.java | 57 +++++++ ...tRestIT.java => DataFramePivotRestIT.java} | 57 +------ .../integration/DataFrameRestTestCase.java | 88 +++++++++++ .../xpack/dataframe/DataFrame.java | 49 +++++- .../TransportDeleteDataFrameJobAction.java | 23 ++- .../TransportGetDataFrameJobsAction.java | 25 +-- .../TransportGetDataFrameJobsStatsAction.java | 4 +- .../TransportPutDataFrameJobAction.java | 85 +++++++--- .../TransportStartDataFrameJobAction.java | 6 +- .../TransportStopDataFrameJobAction.java | 4 +- .../xpack/dataframe/job/DataFrameIndexer.java | 28 ++-- .../xpack/dataframe/job/DataFrameJob.java | 28 ++-- .../dataframe/job/DataFrameJobConfig.java | 41 +++-- .../DataFrameJobPersistentTasksExecutor.java | 11 +- .../xpack/dataframe/job/DataFrameJobTask.java | 104 +++++++++---- .../persistence/DataFrameInternalIndex.java | 105 +++++++++++++ .../DataFrameJobConfigManager.java | 147 ++++++++++++++++++ .../dataframe/persistence/DataframeIndex.java | 11 +- .../xpack/dataframe/LocalStateDataFrame.java | 27 ++++ .../DataFrameJobConfigManagerTests.java | 91 +++++++++++ .../DataFrameSingleNodeTestCase.java | 75 +++++++++ .../security/authz/AuthorizationUtils.java | 2 + 25 files changed, 945 insertions(+), 195 deletions(-) create mode 100644 x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java rename x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/{DataframePivotRestIT.java => DataFramePivotRestIT.java} (75%) create mode 100644 x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManager.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/LocalStateDataFrame.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameSingleNodeTestCase.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 92d7e94394594..93235e332787f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -1352,7 +1352,7 @@ public void testCRUDIndexTemplate() throws Exception { assertThat(template2.settings().get("index.number_of_replicas"), equalTo("0")); List names = randomBoolean() - ? Arrays.asList("*-1", "template-2") + ? Arrays.asList("*plate-1", "template-2") : Arrays.asList("template-*"); GetIndexTemplatesRequest getBothRequest = new GetIndexTemplatesRequest(names); GetIndexTemplatesResponse getBoth = execute(getBothRequest, client.indices()::getTemplate, client.indices()::getTemplateAsync); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java index f7d3514e1a442..d95c6355fcad7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -14,6 +14,20 @@ public class DataFrameMessages { public static final String REST_STOP_JOB_WAIT_FOR_COMPLETION_TIMEOUT = "Timed out after [{0}] while waiting for data frame job [{1}] to stop"; public static final String REST_STOP_JOB_WAIT_FOR_COMPLETION_INTERRUPT = "Interrupted while waiting for data frame job [{0}] to stop"; + public static final String REST_PUT_DATA_FRAME_JOB_EXISTS = "Job with id [{0}] already exists"; + public static final String REST_DATA_FRAME_UNKNOWN_JOB = "Job with id [{0}] could not be found"; + public static final String REST_PUT_DATA_FRAME_FAILED_TO_VALIDATE_DATA_FRAME_CONFIGURATION = + "Failed to validate data frame configuration"; + public static final String REST_PUT_DATA_FRAME_FAILED_PERSIST_JOB_CONFIGURATION = "Failed to persist data frame configuration"; + public static final String REST_PUT_DATA_FRAME_FAILED_TO_DEDUCE_TARGET_MAPPINGS = "Failed to deduce target mappings"; + public static final String REST_PUT_DATA_FRAME_FAILED_TO_CREATE_TARGET_INDEX = "Failed to create target index"; + public static final String REST_PUT_DATA_FRAME_FAILED_TO_START_PERSISTENT_TASK = + "Failed to start persistent task, configuration has been cleaned up: [{0}]"; + public static final String REST_DATA_FRAME_FAILED_TO_SERIALIZE_JOB = "Failed to serialise job [{0}]"; + + public static final String FAILED_TO_CREATE_DESTINATION_INDEX = "Could not create destination index [{0}] for job[{1}]"; + public static final String FAILED_TO_LOAD_JOB_CONFIGURATION = "Failed to load data frame job configuration for job [{0}]"; + public static final String FAILED_TO_PARSE_JOB_CONFIGURATION = "Failed to parse job configuration for data frame job [{0}]"; private DataFrameMessages() { } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java index ffe49918e97a3..8cb4a49639d3b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java @@ -8,6 +8,12 @@ import org.elasticsearch.test.ESTestCase; +import java.lang.reflect.Field; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + public class DataFrameMessagesTests extends ESTestCase { public void testGetMessage_WithFormatStrings() { @@ -15,4 +21,54 @@ public void testGetMessage_WithFormatStrings() { "my_job"); assertEquals("Timed out after [30s] while waiting for data frame job [my_job] to stop", formattedMessage); } + + public void testMessageProperFormat() throws IllegalArgumentException, IllegalAccessException { + Field[] declaredFields = DataFrameMessages.class.getFields(); + int checkedMessages = 0; + + for (Field field : declaredFields) { + int modifiers = field.getModifiers(); + if (java.lang.reflect.Modifier.isStatic(modifiers) && java.lang.reflect.Modifier.isFinal(modifiers) + && field.getType().isAssignableFrom(String.class)) { + + assertSingleMessage((String) field.get(DataFrameMessages.class)); + ++checkedMessages; + } + } + assertTrue(checkedMessages > 0); + logger.info("Checked {} messages", checkedMessages); + } + + public void testAssertSingleMessage() { + expectThrows(RuntimeException.class, () -> innerAssertSingleMessage("missing zero position {1} {1}")); + expectThrows(RuntimeException.class, () -> innerAssertSingleMessage("incomplete {}")); + expectThrows(RuntimeException.class, () -> innerAssertSingleMessage("count from 1 {1}")); + } + + private void assertSingleMessage(String message) { + // for testing the test method, we can not assert directly, but wrap it with an exception, which also + // nicely encapsulate parsing errors thrown by MessageFormat itself + try { + innerAssertSingleMessage(message); + } catch (Exception e) { + fail(e.getMessage()); + } + } + + private void innerAssertSingleMessage(String message) { + MessageFormat messageWithNoArguments = new MessageFormat(message, Locale.ROOT); + int numberOfArguments = messageWithNoArguments.getFormats().length; + + List args = new ArrayList<>(); + for (int i = 0; i < numberOfArguments; ++i) { + args.add(randomAlphaOfLength(5)); + } + + String properFormatedMessage = new MessageFormat(message, Locale.ROOT).format(args.toArray(new String[0])); + for (String arg : args) { + if (properFormatedMessage.contains(arg) == false) { + throw new RuntimeException("Message check: [" + message + "] failed, missing argument"); + } + } + } } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java new file mode 100644 index 0000000000000..08c797f63cdcf --- /dev/null +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.integration; + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; + +import java.io.IOException; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +public class DataFrameConfigurationIndexIT extends DataFrameRestTestCase { + + /** + * Tests the corner case that for some reason a job configuration still exists in the index but + * the persistent task disappeared + * + * test note: {@link DataFrameRestTestCase} checks for an empty index as part of the test case cleanup, + * so we do not need to check that the document has been deleted in this place + */ + public void testDeleteConfigurationLeftOver() throws IOException { + String fakeJobName = randomAlphaOfLengthBetween(5, 20); + + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + { + builder.field(DataFrameField.ID.getPreferredName(), fakeJobName); + } + builder.endObject(); + final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); + Request req = new Request("PUT", DataFrameInternalIndex.INDEX_NAME + "/_doc/" + DataFrameJobConfig.documentId(fakeJobName)); + req.setEntity(entity); + client().performRequest(req); + } + + Request deleteRequest = new Request("DELETE", DATAFRAME_ENDPOINT + fakeJobName); + Response deleteResponse = client().performRequest(deleteRequest); + assertOK(deleteResponse); + assertTrue((boolean)XContentMapValues.extractValue("acknowledged", entityAsMap(deleteResponse))); + + // delete again, should fail + expectThrows(ResponseException.class,() -> client().performRequest(deleteRequest)); + } +} diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java similarity index 75% rename from x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java rename to x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index d764737811f0d..63b97fafddb21 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataframePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -10,12 +10,9 @@ import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.junit.AfterClass; import org.junit.Before; import java.io.IOException; @@ -26,9 +23,8 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; -public class DataframePivotRestIT extends ESRestTestCase { +public class DataFramePivotRestIT extends DataFrameRestTestCase { - private static final String DATAFRAME_ENDPOINT = DataFrameField.REST_BASE_PATH + "jobs/"; private boolean indicesCreated = false; // preserve indices in order to reuse source indices in several test cases @@ -180,13 +176,6 @@ private static int getDataFrameGeneration(String jobId) throws IOException { return (int) XContentMapValues.extractValue("state.generation", jobStatsAsMap); } - private static String getDataFrameIndexerState(String jobId) throws IOException { - Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + jobId + "/_stats")); - - Map jobStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("jobs")).get(0); - return (String) XContentMapValues.extractValue("state.job_state", jobStatsAsMap); - } - private void refreshIndex(String index) throws IOException { assertOK(client().performRequest(new Request("POST", index + "/_refresh"))); } @@ -198,48 +187,4 @@ private void assertOnePivotValue(String query, double expected) throws IOExcepti double actual = (double) ((List) XContentMapValues.extractValue("hits.hits._source.avg_rating", searchResult)).get(0); assertEquals(expected, actual, 0.000001); } - - private static void wipeDataFrameJobs() throws IOException, InterruptedException { - Response response = adminClient().performRequest(new Request("GET", DATAFRAME_ENDPOINT + "_all")); - Map jobs = entityAsMap(response); - @SuppressWarnings("unchecked") - List> jobConfigs = - (List>) XContentMapValues.extractValue("jobs", jobs); - - if (jobConfigs == null) { - return; - } - - for (Map jobConfig : jobConfigs) { - String jobId = (String) jobConfig.get("id"); - Request request = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_stop"); - request.addParameter("wait_for_completion", "true"); - request.addParameter("timeout", "10s"); - request.addParameter("ignore", "404"); - adminClient().performRequest(request); - assertEquals("stopped", getDataFrameIndexerState(jobId)); - } - - for (Map jobConfig : jobConfigs) { - String jobId = (String) jobConfig.get("id"); - Request request = new Request("DELETE", DATAFRAME_ENDPOINT + jobId); - request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this - adminClient().performRequest(request); - } - } - - private static void waitForPendingDataFrameTasks() throws Exception { - waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(DataFrameField.TASK_NAME) == false); - } - - private static void wipeIndices() throws IOException { - try { - adminClient().performRequest(new Request("DELETE", "*")); - } catch (ResponseException e) { - // 404 here just means we had no indexes - if (e.getResponse().getStatusLine().getStatusCode() != 404) { - throw e; - } - } - } } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java new file mode 100644 index 0000000000000..8ddd63679a4b1 --- /dev/null +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +abstract class DataFrameRestTestCase extends ESRestTestCase { + + protected static final String DATAFRAME_ENDPOINT = DataFrameField.REST_BASE_PATH + "jobs/"; + + @SuppressWarnings("unchecked") + private static List> getDataFrameJobs() throws IOException { + Response response = adminClient().performRequest(new Request("GET", DATAFRAME_ENDPOINT + "_all")); + Map jobs = entityAsMap(response); + List> jobConfigs = (List>) XContentMapValues.extractValue("jobs", jobs); + + return jobConfigs == null ? Collections.emptyList() : jobConfigs; + } + + protected static String getDataFrameIndexerState(String jobId) throws IOException { + Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + jobId + "/_stats")); + + Map jobStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("jobs")).get(0); + return (String) XContentMapValues.extractValue("state.job_state", jobStatsAsMap); + } + + protected static void wipeDataFrameJobs() throws IOException, InterruptedException { + List> jobConfigs = getDataFrameJobs(); + + for (Map jobConfig : jobConfigs) { + String jobId = (String) jobConfig.get("id"); + Request request = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_stop"); + request.addParameter("wait_for_completion", "true"); + request.addParameter("timeout", "10s"); + request.addParameter("ignore", "404"); + adminClient().performRequest(request); + assertEquals("stopped", getDataFrameIndexerState(jobId)); + } + + for (Map jobConfig : jobConfigs) { + String jobId = (String) jobConfig.get("id"); + Request request = new Request("DELETE", DATAFRAME_ENDPOINT + jobId); + request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this + adminClient().performRequest(request); + } + + // jobs should be all gone + jobConfigs = getDataFrameJobs(); + assertTrue(jobConfigs.isEmpty()); + + // the configuration index should be empty + Request request = new Request("GET", DataFrameInternalIndex.INDEX_NAME + "/_search"); + Response searchResponse = adminClient().performRequest(request); + Map searchResult = entityAsMap(searchResponse); + + assertEquals(0, XContentMapValues.extractValue("hits.total.value", searchResult)); + } + + protected static void waitForPendingDataFrameTasks() throws Exception { + waitForPendingTasks(adminClient(), taskName -> taskName.startsWith(DataFrameField.TASK_NAME) == false); + } + + protected static void wipeIndices() throws IOException { + try { + adminClient().performRequest(new Request("DELETE", "*")); + } catch (ResponseException e) { + // 404 here just means we had no indexes + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + } + } +} \ No newline at end of file diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index a7d7f94381e46..f6e1620aa13cf 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -6,20 +6,27 @@ package org.elasticsearch.xpack.dataframe; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.inject.Module; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTaskState; @@ -29,10 +36,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.dataframe.DataFrameField; @@ -52,6 +61,8 @@ import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import org.elasticsearch.xpack.dataframe.job.DataFrameJobPersistentTasksExecutor; import org.elasticsearch.xpack.dataframe.job.DataFrameJobState; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; import org.elasticsearch.xpack.dataframe.rest.action.RestDeleteDataFrameJobAction; import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameJobsAction; import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameJobsStatsAction; @@ -59,6 +70,7 @@ import org.elasticsearch.xpack.dataframe.rest.action.RestStartDataFrameJobAction; import org.elasticsearch.xpack.dataframe.rest.action.RestStopDataFrameJobAction; +import java.io.IOException; import java.time.Clock; import java.util.ArrayList; import java.util.Arrays; @@ -66,8 +78,10 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Supplier; +import java.util.function.UnaryOperator; import static java.util.Collections.emptyList; @@ -80,9 +94,12 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu public static final Set HEADER_FILTERS = new HashSet<>( Arrays.asList("es-security-runas-user", "_xpack_security_authentication")); + private static final Logger logger = LogManager.getLogger(XPackPlugin.class); + private final boolean enabled; private final Settings settings; private final boolean transportClientMode; + private final SetOnce dataFrameJobConfigManager = new SetOnce<>(); public DataFrame(Settings settings) { this.settings = settings; @@ -152,6 +169,31 @@ public List> getExecutorBuilders(Settings settings) { return Collections.singletonList(indexing); } + @Override + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, + Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + if (enabled == false || transportClientMode) { + return emptyList(); + } + + dataFrameJobConfigManager.set(new DataFrameJobConfigManager(client, xContentRegistry)); + + return Collections.singletonList(dataFrameJobConfigManager.get()); + } + + @Override + public UnaryOperator> getIndexTemplateMetaDataUpgrader() { + return templates -> { + try { + templates.put(DataFrameInternalIndex.INDEX_TEMPLATE_NAME, DataFrameInternalIndex.getIndexTemplateMetaData()); + } catch (IOException e) { + logger.error("Error creating data frame index template", e); + } + return templates; + }; + } + @Override public List> getPersistentTasksExecutor(ClusterService clusterService, ThreadPool threadPool, Client client, SettingsModule settingsModule) { @@ -160,8 +202,11 @@ public List> getPersistentTasksExecutor(ClusterServic } SchedulerEngine schedulerEngine = new SchedulerEngine(settings, Clock.systemUTC()); - return Collections.singletonList(new DataFrameJobPersistentTasksExecutor(client, - schedulerEngine, threadPool)); + + // the job config manager should have been created + assert dataFrameJobConfigManager.get() != null; + return Collections.singletonList( + new DataFrameJobPersistentTasksExecutor(client, dataFrameJobConfigManager.get(), schedulerEngine, threadPool)); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java index 1a5e371003e4c..e1658e8b9f690 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.dataframe.action; -import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.FailedNodeException; @@ -27,17 +26,21 @@ import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Request; import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Response; import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; import java.util.List; public class TransportDeleteDataFrameJobAction extends TransportTasksAction { + private final DataFrameJobConfigManager jobConfigManager; + @Inject public TransportDeleteDataFrameJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PersistentTasksService persistentTasksService, - ClusterService clusterService) { + ClusterService clusterService, DataFrameJobConfigManager jobConfigManager) { super(DeleteDataFrameJobAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, Response::new, ThreadPool.Names.SAME); + this.jobConfigManager = jobConfigManager; } @Override @@ -51,11 +54,13 @@ protected Response newResponse(Request request, List tasks, List listener) { - assert task.getConfig().getId().equals(request.getId()); + assert task.getJobId().equals(request.getId()); IndexerState state = task.getState().getIndexerState(); if (state.equals(IndexerState.STOPPED)) { task.onCancelled(); - listener.onResponse(new Response(true)); + jobConfigManager.deleteJobConfiguration(request.getId(), ActionListener.wrap(r -> { + listener.onResponse(new Response(true)); + }, listener::onFailure)); } else { listener.onFailure(new IllegalStateException("Could not delete job [" + request.getId() + "] because " + "indexer state is [" + state + "]. Job must be [" + IndexerState.STOPPED + "] before deletion.")); @@ -71,9 +76,13 @@ protected void doExecute(Task task, Request request, ActionListener li if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) { super.doExecute(task, request, listener); } else { - // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this call, - // no need to go looking for the allocated task - listener.onFailure(new ResourceNotFoundException("the task with id [" + request.getId() + "] doesn't exist")); + // we couldn't find the job in the persistent task CS, but maybe the job exists in the configuration index, + // if so delete the orphaned document and do not throw (for the normal case we want to stop the task first, + // than delete the configuration document if and only if the data frame job is in stopped state) + jobConfigManager.deleteJobConfiguration(request.getId(), ActionListener.wrap(r -> { + listener.onResponse(new Response(true)); + return; + }, listener::onFailure)); } } else { // Delegates DeleteJob to elected master node, so it becomes the coordinating node. diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java index 6ae1e08ac95db..2d85c04163688 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Response; import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; import java.util.Collection; @@ -38,10 +39,14 @@ public class TransportGetDataFrameJobsAction extends GetDataFrameJobsAction.Response, GetDataFrameJobsAction.Response> { + private final DataFrameJobConfigManager jobConfigManager; + @Inject - public TransportGetDataFrameJobsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { + public TransportGetDataFrameJobsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, + DataFrameJobConfigManager jobConfigManager) { super(GetDataFrameJobsAction.NAME, clusterService, transportService, actionFilters, GetDataFrameJobsAction.Request::new, GetDataFrameJobsAction.Response::new, GetDataFrameJobsAction.Response::new, ThreadPool.Names.SAME); + this.jobConfigManager = jobConfigManager; } @Override @@ -54,16 +59,18 @@ protected Response newResponse(Request request, List tasks, List listener) { - List configs = Collections.emptyList(); - - assert task.getConfig().getId().equals(request.getId()) || request.getId().equals(MetaData.ALL); - - // Little extra insurance, make sure we only return jobs that aren't cancelled + assert task.getJobId().equals(request.getId()) || request.getId().equals(MetaData.ALL); + // Little extra insurance, make sure we only return jobs that aren't + // cancelled if (task.isCancelled() == false) { - configs = Collections.singletonList(task.getConfig()); + jobConfigManager.getJobConfiguration(task.getJobId(), ActionListener.wrap(config -> { + listener.onResponse(new Response(Collections.singletonList(config))); + }, e -> { + listener.onFailure(new RuntimeException("failed to retrieve...", e)); + })); + } else { + listener.onResponse(new Response(Collections.emptyList())); } - - listener.onResponse(new Response(configs)); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java index 13b09cb7c969d..6a64514437f9f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java @@ -56,11 +56,11 @@ protected Response newResponse(Request request, List tasks, List listener) { List jobsStateAndStats = Collections.emptyList(); - assert task.getConfig().getId().equals(request.getId()) || request.getId().equals(MetaData.ALL); + assert task.getJobId().equals(request.getId()) || request.getId().equals(MetaData.ALL); // Little extra insurance, make sure we only return jobs that aren't cancelled if (task.isCancelled() == false) { - DataFrameJobStateAndStats jobStateAndStats = new DataFrameJobStateAndStats(task.getConfig().getId(), task.getState(), + DataFrameJobStateAndStats jobStateAndStats = new DataFrameJobStateAndStats(task.getJobId(), task.getState(), task.getStats()); jobsStateAndStats = Collections.singletonList(jobStateAndStats); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java index 25bbc2373ba31..485696bf8fe4d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.dataframe.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; @@ -18,34 +21,40 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Request; import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Response; import org.elasticsearch.xpack.dataframe.job.DataFrameJob; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; import org.elasticsearch.xpack.dataframe.persistence.DataframeIndex; import org.elasticsearch.xpack.dataframe.support.JobValidator; public class TransportPutDataFrameJobAction extends TransportMasterNodeAction { + private static final Logger logger = LogManager.getLogger(TransportPutDataFrameJobAction.class); + private final XPackLicenseState licenseState; private final PersistentTasksService persistentTasksService; private final Client client; + private final DataFrameJobConfigManager dataFrameJobConfigManager; @Inject public TransportPutDataFrameJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, XPackLicenseState licenseState, - PersistentTasksService persistentTasksService, Client client) { + PersistentTasksService persistentTasksService, DataFrameJobConfigManager dataFrameJobConfigManager, Client client) { super(PutDataFrameJobAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutDataFrameJobAction.Request::new); this.licenseState = licenseState; this.persistentTasksService = persistentTasksService; this.client = client; + this.dataFrameJobConfigManager = dataFrameJobConfigManager; } @Override @@ -68,37 +77,63 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); - JobValidator jobCreator = new JobValidator(request.getConfig(), client); + String jobId = request.getConfig().getId(); + // quick check whether a job has already been created under that name + if (PersistentTasksCustomMetaData.getTaskWithId(clusterState, jobId) != null) { + listener.onFailure(new ResourceAlreadyExistsException( + DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_JOB_EXISTS, jobId))); + return; + } + // create the job, note the non-state creating steps are done first, so we minimize the chance to end up with orphaned state + // job validation + JobValidator jobCreator = new JobValidator(request.getConfig(), client); jobCreator.validate(ActionListener.wrap(validationResult -> { + // deduce target mappings jobCreator.deduceMappings(ActionListener.wrap(mappings -> { - DataFrameJob job = createDataFrameJob(request.getConfig(), threadPool); - DataframeIndex.createDestinationIndex(client, job, mappings, ActionListener.wrap(createIndexResult -> { - startPersistentTask(job, listener, persistentTasksService); - }, e3 -> { - listener.onFailure(new RuntimeException("Failed to create index", e3)); + // create the destination index + DataframeIndex.createDestinationIndex(client, request.getConfig(), mappings, ActionListener.wrap(createIndexResult -> { + DataFrameJob job = createDataFrameJob(jobId, threadPool); + // create the job configuration and store it in the internal index + dataFrameJobConfigManager.putJobConfiguration(request.getConfig(), ActionListener.wrap(r -> { + // finally start the persistent task + persistentTasksService.sendStartRequest(job.getId(), DataFrameJob.NAME, job, ActionListener.wrap(persistentTask -> { + listener.onResponse(new PutDataFrameJobAction.Response(true)); + }, startPersistentTaskException -> { + // delete the otherwise orphaned job configuration, for now we do not delete the destination index + dataFrameJobConfigManager.deleteJobConfiguration(jobId, ActionListener.wrap(r2 -> { + logger.debug("Deleted data frame job [{}] configuration from data frame configuration index", jobId); + listener.onFailure( + new RuntimeException( + DataFrameMessages.getMessage( + DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_TO_START_PERSISTENT_TASK, r2), + startPersistentTaskException)); + }, deleteJobFromIndexException -> { + logger.error("Failed to cleanup orphaned data frame job [{}] configuration", jobId); + listener.onFailure( + new RuntimeException( + DataFrameMessages.getMessage( + DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_TO_START_PERSISTENT_TASK, false), + startPersistentTaskException)); + })); + })); + }, listener::onFailure)); + }, createDestinationIndexException -> { + listener.onFailure(new RuntimeException(DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_TO_CREATE_TARGET_INDEX, + createDestinationIndexException)); })); - }, e2 -> { - listener.onFailure(new RuntimeException("Failed to deduce targe mappings", e2)); + }, deduceTargetMappingsException -> { + listener.onFailure(new RuntimeException(DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_TO_DEDUCE_TARGET_MAPPINGS, + deduceTargetMappingsException)); })); - }, e -> { - listener.onFailure(new RuntimeException("Failed to validate", e)); + }, validationException -> { + listener.onFailure(new RuntimeException(DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_TO_VALIDATE_DATA_FRAME_CONFIGURATION, + validationException)); })); } - private static DataFrameJob createDataFrameJob(DataFrameJobConfig config, ThreadPool threadPool) { - return new DataFrameJob(config); - } - - static void startPersistentTask(DataFrameJob job, ActionListener listener, - PersistentTasksService persistentTasksService) { - - persistentTasksService.sendStartRequest(job.getConfig().getId(), DataFrameJob.NAME, job, - ActionListener.wrap(persistentTask -> { - listener.onResponse(new PutDataFrameJobAction.Response(true)); - }, e -> { - listener.onFailure(e); - })); + private static DataFrameJob createDataFrameJob(String jobId, ThreadPool threadPool) { + return new DataFrameJob(jobId); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java index 390fe2ab16b66..ab5a01e2ad5e7 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java @@ -46,7 +46,7 @@ protected void processTasks(StartDataFrameJobAction.Request request, Consumer listener) { - if (jobTask.getConfig().getId().equals(request.getId())) { + if (jobTask.getJobId().equals(request.getId())) { jobTask.start(listener); } else { - listener.onFailure(new RuntimeException("ID of data frame job task [" + jobTask.getConfig().getId() + listener.onFailure(new RuntimeException("ID of data frame job task [" + jobTask.getJobId() + "] does not match request's ID [" + request.getId() + "]")); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java index e76c6dabc3e3c..5e52efc4e42d9 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java @@ -51,7 +51,7 @@ protected void doExecute(Task task, StopDataFrameJobAction.Request request, @Override protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJobTask jobTask, ActionListener listener) { - if (jobTask.getConfig().getId().equals(request.getId())) { + if (jobTask.getJobId().equals(request.getId())) { if (request.waitForCompletion() == false) { jobTask.stop(listener); } else { @@ -89,7 +89,7 @@ protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJo jobTask.stop(blockingListener); } } else { - listener.onFailure(new RuntimeException("ID of data frame indexer task [" + jobTask.getConfig().getId() + listener.onFailure(new RuntimeException("ID of data frame indexer task [" + jobTask.getJobId() + "] does not match request's ID [" + request.getId() + "]")); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java index 22dc9facb109c..2effab1f3eb84 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java @@ -34,25 +34,17 @@ import java.util.stream.Stream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.dataframe.persistence.DataframeIndex.DOC_TYPE; public abstract class DataFrameIndexer extends AsyncTwoPhaseIndexer, DataFrameIndexerJobStats> { private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; private static final Logger logger = LogManager.getLogger(DataFrameIndexer.class); - private DataFrameJob job; - public DataFrameIndexer(Executor executor, DataFrameJob job, AtomicReference initialState, - Map initialPosition) { + public DataFrameIndexer(Executor executor, AtomicReference initialState, Map initialPosition) { super(executor, initialState, initialPosition, new DataFrameIndexerJobStats()); - - this.job = job; } - @Override - protected String getJobId() { - return job.getConfig().getId(); - } + protected abstract DataFrameJobConfig getConfig(); @Override protected void onStartJob(long now) { @@ -74,9 +66,10 @@ protected IterationResult> doProcess(SearchResponse searchRe * in later versions, see {@link IngestDocument). */ private Stream processBucketsToIndexRequests(CompositeAggregation agg) { - String indexName = job.getConfig().getDestinationIndex(); - List> sources = job.getConfig().getSourceConfig().getSources(); - Collection aggregationBuilders = job.getConfig().getAggregationConfig().getAggregatorFactories(); + final DataFrameJobConfig jobConfig = getConfig(); + String indexName = jobConfig.getDestinationIndex(); + List> sources = jobConfig.getSourceConfig().getSources(); + Collection aggregationBuilders = jobConfig.getAggregationConfig().getAggregatorFactories(); return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders, getStats()).map(document -> { XContentBuilder builder; @@ -87,7 +80,7 @@ private Stream processBucketsToIndexRequests(CompositeAggregation throw new UncheckedIOException(e); } - IndexRequest request = new IndexRequest(indexName, DOC_TYPE).source(builder); + IndexRequest request = new IndexRequest(indexName).source(builder); return request; }); } @@ -95,11 +88,12 @@ private Stream processBucketsToIndexRequests(CompositeAggregation @Override protected SearchRequest buildSearchRequest() { final Map position = getPosition(); + final DataFrameJobConfig jobConfig = getConfig(); QueryBuilder queryBuilder = new MatchAllQueryBuilder(); - SearchRequest searchRequest = new SearchRequest(job.getConfig().getIndexPattern()); + SearchRequest searchRequest = new SearchRequest(jobConfig.getIndexPattern()); - List> sources = job.getConfig().getSourceConfig().getSources(); + List> sources = jobConfig.getSourceConfig().getSources(); CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder(COMPOSITE_AGGREGATION_NAME, sources); compositeAggregation.size(1000); @@ -108,7 +102,7 @@ protected SearchRequest buildSearchRequest() { compositeAggregation.aggregateAfter(position); } - for (AggregationBuilder agg : job.getConfig().getAggregationConfig().getAggregatorFactories()) { + for (AggregationBuilder agg : jobConfig.getAggregationConfig().getAggregatorFactories()) { compositeAggregation.subAggregation(agg); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java index 4118df06df0b4..b56925b7716ea 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java @@ -8,7 +8,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -26,24 +25,21 @@ public class DataFrameJob extends AbstractDiffable implements XPac public static final String NAME = DataFrameField.TASK_NAME; - private DataFrameJobConfig config; - - private static final ParseField CONFIG = new ParseField("config"); + private final String jobId; public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - a -> new DataFrameJob((DataFrameJobConfig) a[0])); + a -> new DataFrameJob((String) a[0])); static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameJobConfig.fromXContent(p, null), - CONFIG); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameField.ID); } - public DataFrameJob(DataFrameJobConfig config) { - this.config = Objects.requireNonNull(config); + public DataFrameJob(String jobId) { + this.jobId = jobId; } public DataFrameJob(StreamInput in) throws IOException { - this.config = new DataFrameJobConfig(in); + this.jobId = in.readString(); } @Override @@ -59,19 +55,19 @@ public Version getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { - config.writeTo(out); + out.writeString(jobId); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(CONFIG.getPreferredName(), config); + builder.field(DataFrameField.ID.getPreferredName(), jobId); builder.endObject(); return builder; } - public DataFrameJobConfig getConfig() { - return config; + public String getId() { + return jobId; } public static DataFrameJob fromXContent(XContentParser parser) throws IOException { @@ -90,12 +86,12 @@ public boolean equals(Object other) { DataFrameJob that = (DataFrameJob) other; - return Objects.equals(this.config, that.config); + return Objects.equals(this.jobId, that.jobId); } @Override public int hashCode() { - return Objects.hash(config); + return Objects.hash(jobId); } public Map getHeaders() { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java index db23feb8d10ac..4cc0ad004a39e 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java @@ -41,22 +41,31 @@ public class DataFrameJobConfig implements NamedWriteable, ToXContentObject { private final SourceConfig sourceConfig; private final AggregationConfig aggregationConfig; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, - (args, optionalId) -> { - String id = args[0] != null ? (String) args[0] : optionalId; - String indexPattern = (String) args[1]; - String destinationIndex = (String) args[2]; - SourceConfig sourceConfig= (SourceConfig) args[3]; - AggregationConfig aggregationConfig = (AggregationConfig) args[4]; - return new DataFrameJobConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); - }); - - static { - PARSER.declareString(optionalConstructorArg(), DataFrameField.ID); - PARSER.declareString(constructorArg(), INDEX_PATTERN); - PARSER.declareString(constructorArg(), DESTINATION_INDEX); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.fromXContent(p), SOURCES); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); + public static final ConstructingObjectParser PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, + (args, optionalId) -> { + String id = args[0] != null ? (String) args[0] : optionalId; + String indexPattern = (String) args[1]; + String destinationIndex = (String) args[2]; + SourceConfig sourceConfig = (SourceConfig) args[3]; + AggregationConfig aggregationConfig = (AggregationConfig) args[4]; + return new DataFrameJobConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); + }); + + parser.declareString(optionalConstructorArg(), DataFrameField.ID); + parser.declareString(constructorArg(), INDEX_PATTERN); + parser.declareString(constructorArg(), DESTINATION_INDEX); + parser.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.fromXContent(p), SOURCES); + parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); + + return parser; + } + + public static String documentId(String jobId) { + return "dataframe-" + jobId; } public DataFrameJobConfig(final String id, diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java index 99135d8df5a35..449502e29f774 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.dataframe.DataFrame; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; import java.util.Map; @@ -27,13 +28,15 @@ public class DataFrameJobPersistentTasksExecutor extends PersistentTasksExecutor private static final Logger logger = LogManager.getLogger(DataFrameJobPersistentTasksExecutor.class); private final Client client; + private final DataFrameJobConfigManager jobConfigManager; private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; - public DataFrameJobPersistentTasksExecutor(Client client, SchedulerEngine schedulerEngine, + public DataFrameJobPersistentTasksExecutor(Client client, DataFrameJobConfigManager jobConfigManager, SchedulerEngine schedulerEngine, ThreadPool threadPool) { super(DataFrameField.TASK_NAME, DataFrame.TASK_THREAD_POOL_NAME); this.client = client; + this.jobConfigManager = jobConfigManager; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; } @@ -42,7 +45,7 @@ public DataFrameJobPersistentTasksExecutor(Client client, SchedulerEngine schedu protected void nodeOperation(AllocatedPersistentTask task, @Nullable DataFrameJob params, PersistentTaskState state) { DataFrameJobTask buildTask = (DataFrameJobTask) task; SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job( - DataFrameJobTask.SCHEDULE_NAME + "_" + params.getConfig().getId(), next()); + DataFrameJobTask.SCHEDULE_NAME + "_" + params.getId(), next()); // Note that while the task is added to the scheduler here, the internal state // will prevent @@ -50,7 +53,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable DataFrameJo schedulerEngine.register(buildTask); schedulerEngine.add(schedulerJob); - logger.info("Data frame job [" + params.getConfig().getId() + "] created."); + logger.info("Data frame job [" + params.getId() + "] created."); } static SchedulerEngine.Schedule next() { @@ -63,6 +66,6 @@ static SchedulerEngine.Schedule next() { protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { return new DataFrameJobTask(id, type, action, parentTaskId, persistentTask.getParams(), - (DataFrameJobState) persistentTask.getState(), client, schedulerEngine, threadPool, headers); + (DataFrameJobState) persistentTask.getState(), client, jobConfigManager, schedulerEngine, threadPool, headers); } } \ No newline at end of file diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index 9059d863aa913..464255e2dd54d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -22,19 +23,24 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction.Response; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; public class DataFrameJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { private static final Logger logger = LogManager.getLogger(DataFrameJobTask.class); + public static final String SCHEDULE_NAME = DataFrameField.TASK_NAME + "/schedule"; private final DataFrameJob job; private final SchedulerEngine schedulerEngine; @@ -46,21 +52,20 @@ public class DataFrameJobTask extends AllocatedPersistentTask implements Schedul // 1: data frame complete, all data has been indexed private final AtomicReference generation; - static final String SCHEDULE_NAME = DataFrameField.TASK_NAME + "/schedule"; - - public DataFrameJobTask(long id, String type, String action, TaskId parentTask, DataFrameJob job, - DataFrameJobState state, Client client, SchedulerEngine schedulerEngine, ThreadPool threadPool, + public DataFrameJobTask(long id, String type, String action, TaskId parentTask, DataFrameJob job, DataFrameJobState state, + Client client, DataFrameJobConfigManager jobConfigManager, SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { - super(id, type, action, DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getConfig().getId(), parentTask, headers); + super(id, type, action, DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getId(), parentTask, headers); this.job = job; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; IndexerState initialState = IndexerState.STOPPED; + long initialGeneration = 0; Map initialPosition = null; - logger.info("[{}] init, got state: [{}]", job.getConfig().getId(), state != null); + logger.info("[{}] init, got state: [{}]", job.getId(), state != null); if (state != null) { final IndexerState existingState = state.getIndexerState(); - logger.info("[{}] Loading existing state: [{}], position [{}]", job.getConfig().getId(), existingState, state.getPosition()); + logger.info("[{}] Loading existing state: [{}], position [{}]", job.getId(), existingState, state.getPosition()); if (existingState.equals(IndexerState.INDEXING)) { // reset to started as no indexer is running initialState = IndexerState.STARTED; @@ -71,14 +76,16 @@ public DataFrameJobTask(long id, String type, String action, TaskId parentTask, initialState = existingState; } initialPosition = state.getPosition(); + initialGeneration = state.getGeneration(); } - this.indexer = new ClientDataFrameIndexer(job, new AtomicReference<>(initialState), initialPosition, client); - this.generation = new AtomicReference<>(0L); + this.indexer = new ClientDataFrameIndexer(job.getId(), jobConfigManager, new AtomicReference<>(initialState), initialPosition, + client); + this.generation = new AtomicReference(initialGeneration); } - public DataFrameJobConfig getConfig() { - return job.getConfig(); + public String getJobId() { + return job.getId(); } /** @@ -110,32 +117,32 @@ public synchronized void start(ActionListener listener) { if (prevState != IndexerState.STOPPED) { // fails if the task is not STOPPED listener.onFailure(new ElasticsearchException("Cannot start task for data frame job [{}], because state was [{}]", - job.getConfig().getId(), prevState)); + job.getId(), prevState)); return; } final IndexerState newState = indexer.start(); if (newState != IndexerState.STARTED) { listener.onFailure(new ElasticsearchException("Cannot start task for data frame job [{}], because state was [{}]", - job.getConfig().getId(), newState)); + job.getId(), newState)); return; } final DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); - logger.debug("Updating state for data frame job [{}] to [{}][{}]", job.getConfig().getId(), state.getIndexerState(), + logger.debug("Updating state for data frame job [{}] to [{}][{}]", job.getId(), state.getIndexerState(), state.getPosition()); updatePersistentTaskState(state, ActionListener.wrap( (task) -> { - logger.debug("Successfully updated state for data frame job [" + job.getConfig().getId() + "] to [" + logger.debug("Successfully updated state for data frame job [" + job.getId() + "] to [" + state.getIndexerState() + "][" + state.getPosition() + "]"); listener.onResponse(new StartDataFrameJobAction.Response(true)); }, (exc) -> { // We were unable to update the persistent status, so we need to shutdown the indexer too. indexer.stop(); listener.onFailure(new ElasticsearchException("Error while updating state for data frame job [" - + job.getConfig().getId() + "] to [" + state.getIndexerState() + "].", exc)); + + job.getId() + "] to [" + state.getIndexerState() + "].", exc)); }) ); } @@ -155,25 +162,25 @@ public synchronized void stop(ActionListener li // overwrite some docs and eventually checkpoint. DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); updatePersistentTaskState(state, ActionListener.wrap((task) -> { - logger.debug("Successfully updated state for data frame job [{}] to [{}]", job.getConfig().getId(), + logger.debug("Successfully updated state for data frame job [{}] to [{}]", job.getId(), state.getIndexerState()); listener.onResponse(new StopDataFrameJobAction.Response(true)); }, (exc) -> { listener.onFailure(new ElasticsearchException("Error while updating state for data frame job [{}] to [{}]", exc, - job.getConfig().getId(), state.getIndexerState())); + job.getId(), state.getIndexerState())); })); break; default: listener.onFailure(new ElasticsearchException("Cannot stop task for data frame job [{}], because state was [{}]", - job.getConfig().getId(), newState)); + job.getId(), newState)); break; } } @Override public synchronized void triggered(Event event) { - if (generation.get() == 0 && event.getJobName().equals(SCHEDULE_NAME + "_" + job.getConfig().getId())) { + if (generation.get() == 0 && event.getJobName().equals(SCHEDULE_NAME + "_" + job.getId())) { logger.debug("Data frame indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); indexer.maybeTriggerAsyncJob(System.currentTimeMillis()); } @@ -186,8 +193,8 @@ public synchronized void triggered(Event event) { */ synchronized void shutdown() { try { - logger.info("Data frame indexer [" + job.getConfig().getId() + "] received abort request, stopping indexer."); - schedulerEngine.remove(SCHEDULE_NAME + "_" + job.getConfig().getId()); + logger.info("Data frame indexer [" + job.getId() + "] received abort request, stopping indexer."); + schedulerEngine.remove(SCHEDULE_NAME + "_" + job.getId()); schedulerEngine.unregister(this); } catch (Exception e) { markAsFailed(e); @@ -204,7 +211,7 @@ synchronized void shutdown() { @Override public synchronized void onCancelled() { logger.info( - "Received cancellation request for data frame job [" + job.getConfig().getId() + "], state: [" + indexer.getState() + "]"); + "Received cancellation request for data frame job [" + job.getId() + "], state: [" + indexer.getState() + "]"); if (indexer.abort()) { // there is no background job running, we can shutdown safely shutdown(); @@ -212,14 +219,51 @@ public synchronized void onCancelled() { } protected class ClientDataFrameIndexer extends DataFrameIndexer { + private static final int LOAD_JOB_TIMEOUT_IN_SECONDS = 30; private final Client client; + private final DataFrameJobConfigManager jobConfigManager; + private final String jobId; + + private DataFrameJobConfig jobConfig = null; - public ClientDataFrameIndexer(DataFrameJob job, AtomicReference initialState, + public ClientDataFrameIndexer(String jobId, DataFrameJobConfigManager jobConfigManager, AtomicReference initialState, Map initialPosition, Client client) { - super(threadPool.executor(ThreadPool.Names.GENERIC), job, initialState, initialPosition); + super(threadPool.executor(ThreadPool.Names.GENERIC), initialState, initialPosition); + this.jobId = jobId; + this.jobConfigManager = jobConfigManager; this.client = client; } + @Override + protected DataFrameJobConfig getConfig() { + return jobConfig; + } + + @Override + protected String getJobId() { + return jobId; + } + + @Override + public synchronized boolean maybeTriggerAsyncJob(long now) { + if (jobConfig == null) { + CountDownLatch latch = new CountDownLatch(1); + + jobConfigManager.getJobConfiguration(jobId, new LatchedActionListener<>(ActionListener.wrap(config -> { + jobConfig = config; + }, e -> { + throw new RuntimeException(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_JOB_CONFIGURATION, jobId), e); + }), latch)); + + try { + latch.await(LOAD_JOB_TIMEOUT_IN_SECONDS, TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_JOB_CONFIGURATION, jobId), e); + } + } + return super.maybeTriggerAsyncJob(now); + } + @Override protected void doNextSearch(SearchRequest request, ActionListener nextPhase) { ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, SearchAction.INSTANCE, request, @@ -246,27 +290,27 @@ protected void doSaveState(IndexerState indexerState, Map positi } final DataFrameJobState state = new DataFrameJobState(indexerState, getPosition(), generation.get()); - logger.info("Updating persistent state of job [" + job.getConfig().getId() + "] to [" + state.toString() + "]"); + logger.info("Updating persistent state of job [" + job.getId() + "] to [" + state.toString() + "]"); updatePersistentTaskState(state, ActionListener.wrap(task -> next.run(), exc -> { - logger.error("Updating persistent state of job [" + job.getConfig().getId() + "] failed", exc); + logger.error("Updating persistent state of job [" + job.getId() + "] failed", exc); next.run(); })); } @Override protected void onFailure(Exception exc) { - logger.warn("Data frame job [" + job.getConfig().getId() + "] failed with an exception: ", exc); + logger.warn("Data frame job [" + job.getId() + "] failed with an exception: ", exc); } @Override protected void onFinish() { - logger.info("Finished indexing for data frame job [" + job.getConfig().getId() + "]"); + logger.info("Finished indexing for data frame job [" + job.getId() + "]"); } @Override protected void onAbort() { - logger.info("Data frame job [" + job.getConfig().getId() + "] received abort request, stopping indexer"); + logger.info("Data frame job [" + job.getId() + "] received abort request, stopping indexer"); shutdown(); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java new file mode 100644 index 0000000000000..00101db66812a --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.persistence; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import java.io.IOException; +import java.util.Collections; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +public final class DataFrameInternalIndex { + + // constants for the index + public static final String INDEX_TEMPLATE_VERSION = "1"; + public static final String INDEX_TEMPLATE_PATTERN = ".data-frame-internal-"; + public static final String INDEX_TEMPLATE_NAME = INDEX_TEMPLATE_PATTERN + INDEX_TEMPLATE_VERSION; + public static final String INDEX_NAME = INDEX_TEMPLATE_NAME; + + // constants for mappings + public static final String ENABLED = "enabled"; + public static final String DYNAMIC = "dynamic"; + public static final String PROPERTIES = "properties"; + public static final String TYPE = "type"; + + // data types + public static final String DOUBLE = "double"; + public static final String KEYWORD = "keyword"; + + // internal document types, e.g. "job_config" + public static final String DOC_TYPE = "doc_type"; + + public static IndexTemplateMetaData getIndexTemplateMetaData() throws IOException { + IndexTemplateMetaData dataFrameTemplate = IndexTemplateMetaData.builder(INDEX_TEMPLATE_NAME) + .patterns(Collections.singletonList(INDEX_TEMPLATE_NAME)) + .version(Version.CURRENT.id) + .settings(Settings.builder() + // the configurations are expected to be small + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")) + // todo: remove type + .putMapping(MapperService.SINGLE_MAPPING_NAME, Strings.toString(mappings())) + .build(); + return dataFrameTemplate; + } + + private static XContentBuilder mappings() throws IOException { + XContentBuilder builder = jsonBuilder(); + builder.startObject(); + + builder.startObject(MapperService.SINGLE_MAPPING_NAME); + addMetaInformation(builder); + + // no need to analyze anything, we use the config index as key value store, revisit if we decide to search on it + builder.field(ENABLED, false); + // do not allow anything outside of the defined schema + builder.field(DYNAMIC, "strict"); + // the schema definitions + builder.startObject(PROPERTIES); + // overall doc type + builder.startObject(DOC_TYPE).field(TYPE, KEYWORD).endObject(); + // add the schema for job configurations + addDataFrameJobConfigMappings(builder); + + // end type + builder.endObject(); + // end properties + builder.endObject(); + // end mapping + builder.endObject(); + return builder; + } + + private static XContentBuilder addDataFrameJobConfigMappings(XContentBuilder builder) throws IOException { + return builder + .startObject(DataFrameField.ID.getPreferredName()) + .field(TYPE, KEYWORD) + .endObject(); + } + + /** + * Inserts "_meta" containing useful information like the version into the mapping + * template. + * + * @param builder The builder for the mappings + * @throws IOException On write error + */ + private static XContentBuilder addMetaInformation(XContentBuilder builder) throws IOException { + return builder.startObject("_meta") + .field("version", Version.CURRENT) + .endObject(); + } + + private DataFrameInternalIndex() { + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManager.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManager.java new file mode 100644 index 0000000000000..734e09f53aef5 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManager.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.persistence; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteAction; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.core.ClientHelper.DATA_FRAME_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class DataFrameJobConfigManager { + + private static final Logger logger = LogManager.getLogger(DataFrameJobConfigManager.class); + + public static final Map TO_XCONTENT_PARAMS; + static { + Map modifiable = new HashMap<>(); + modifiable.put("for_internal_storage", "true"); + TO_XCONTENT_PARAMS = Collections.unmodifiableMap(modifiable); + } + + private final Client client; + private final NamedXContentRegistry xContentRegistry; + + public DataFrameJobConfigManager(Client client, NamedXContentRegistry xContentRegistry) { + this.client = client; + this.xContentRegistry = xContentRegistry; + } + + public void putJobConfiguration(DataFrameJobConfig jobConfig, ActionListener listener) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + XContentBuilder source = jobConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); + + IndexRequest indexRequest = new IndexRequest(DataFrameInternalIndex.INDEX_NAME) + .opType(DocWriteRequest.OpType.CREATE) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .id(DataFrameJobConfig.documentId(jobConfig.getId())) + .source(source); + + executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap(r -> { + listener.onResponse(true); + }, e -> { + if (e instanceof VersionConflictEngineException) { + // the job already exists + listener.onFailure(new ResourceAlreadyExistsException( + DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_JOB_EXISTS, jobConfig.getId()))); + } else { + listener.onFailure(new RuntimeException(DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_PERSIST_JOB_CONFIGURATION, e)); + } + })); + } catch (IOException e) { + // not expected to happen but for the sake of completeness + listener.onFailure(new ElasticsearchParseException( + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_FAILED_TO_SERIALIZE_JOB, jobConfig.getId()), e)); + } + } + + public void getJobConfiguration(String jobId, ActionListener resultListener) { + GetRequest getRequest = new GetRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameJobConfig.documentId(jobId)); + executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap(getResponse -> { + + if (getResponse.isExists() == false) { + resultListener.onFailure(new ResourceNotFoundException( + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + return; + } + BytesReference source = getResponse.getSourceAsBytesRef(); + parseJobLenientlyFromSource(source, jobId, resultListener); + }, e -> { + if (e.getClass() == IndexNotFoundException.class) { + resultListener.onFailure( + new ResourceNotFoundException(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + } else { + resultListener.onFailure(e); + } + })); + } + + public void deleteJobConfiguration(String jobId, ActionListener listener) { + DeleteRequest request = new DeleteRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameJobConfig.documentId(jobId)); + request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, DeleteAction.INSTANCE, request, ActionListener.wrap(deleteResponse -> { + + if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { + listener.onFailure( + new ResourceNotFoundException(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + return; + } + listener.onResponse(true); + }, e -> { + if (e.getClass() == IndexNotFoundException.class) { + listener.onFailure( + new ResourceNotFoundException(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + } else { + listener.onFailure(e); + } + })); + } + + private void parseJobLenientlyFromSource(BytesReference source, String jobId, ActionListener jobListener) { + try (InputStream stream = source.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream)) { + jobListener.onResponse(DataFrameJobConfig.PARSER.parse(parser, jobId)); + } catch (Exception e) { + logger.error(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_PARSE_JOB_CONFIGURATION, jobId), e); + jobListener.onFailure(e); + } + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java index d45af546947f4..5c83926ee1940 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java @@ -14,7 +14,8 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; import java.io.IOException; import java.util.Map; @@ -32,9 +33,9 @@ public final class DataframeIndex { private DataframeIndex() { } - public static void createDestinationIndex(Client client, DataFrameJob job, Map mappings, + public static void createDestinationIndex(Client client, DataFrameJobConfig jobConfig, Map mappings, final ActionListener listener) { - CreateIndexRequest request = new CreateIndexRequest(job.getConfig().getDestinationIndex()); + CreateIndexRequest request = new CreateIndexRequest(jobConfig.getDestinationIndex()); // TODO: revisit number of shards, number of replicas request.settings(Settings.builder() // <1> @@ -45,8 +46,8 @@ public static void createDestinationIndex(Client client, DataFrameJob job, Map { listener.onResponse(true); }, e -> { - String message = "Could not create destination index [" + job.getConfig().getDestinationIndex() + "] for job[" - + job.getConfig().getId() + "]"; + String message = DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_CREATE_DESTINATION_INDEX, + jobConfig.getDestinationIndex(), jobConfig.getId()); logger.error(message); listener.onFailure(new RuntimeException(message, e)); })); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/LocalStateDataFrame.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/LocalStateDataFrame.java new file mode 100644 index 0000000000000..f4b3221ec9d62 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/LocalStateDataFrame.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import java.nio.file.Path; + +public class LocalStateDataFrame extends LocalStateCompositeXPackPlugin { + + public LocalStateDataFrame(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + @SuppressWarnings("resource") + LocalStateDataFrame thisVar = this; + + plugins.add(new DataFrame(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + }); + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java new file mode 100644 index 0000000000000..829efa494f470 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.persistence; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfigTests; +import org.junit.Before; + +public class DataFrameJobConfigManagerTests extends DataFrameSingleNodeTestCase { + + private DataFrameJobConfigManager jobConfigManager; + + @Before + public void createComponents() { + jobConfigManager = new DataFrameJobConfigManager(client(), xContentRegistry()); + } + + public void testGetMissingJob() throws InterruptedException { + // the index does not exist yet + assertAsync(listener -> jobConfigManager.getJobConfiguration("not_there", listener), (DataFrameJobConfig) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); + }); + + // create one job and test with an existing index + assertAsync(listener -> jobConfigManager.putJobConfiguration(DataFrameJobConfigTests.randomDataFrameJobConfig(), listener), true, + null, null); + + // same test, but different code path + assertAsync(listener -> jobConfigManager.getJobConfiguration("not_there", listener), (DataFrameJobConfig) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); + }); + } + + public void testDeleteMissingJob() throws InterruptedException { + // the index does not exist yet + assertAsync(listener -> jobConfigManager.deleteJobConfiguration("not_there", listener), (Boolean) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); + }); + + // create one job and test with an existing index + assertAsync(listener -> jobConfigManager.putJobConfiguration(DataFrameJobConfigTests.randomDataFrameJobConfig(), listener), true, + null, null); + + // same test, but different code path + assertAsync(listener -> jobConfigManager.deleteJobConfiguration("not_there", listener), (Boolean) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); + }); + } + + public void testCreateReadDelete() throws InterruptedException { + DataFrameJobConfig jobConfig = DataFrameJobConfigTests.randomDataFrameJobConfig(); + + // create job + assertAsync(listener -> jobConfigManager.putJobConfiguration(jobConfig, listener), true, null, null); + + // read job + assertAsync(listener -> jobConfigManager.getJobConfiguration(jobConfig.getId(), listener), jobConfig, null, null); + + // try to create again + assertAsync(listener -> jobConfigManager.putJobConfiguration(jobConfig, listener), (Boolean) null, null, e -> { + assertEquals(ResourceAlreadyExistsException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_JOB_EXISTS, jobConfig.getId()), e.getMessage()); + }); + + // delete job + assertAsync(listener -> jobConfigManager.deleteJobConfiguration(jobConfig.getId(), listener), true, null, null); + + // delete again + assertAsync(listener -> jobConfigManager.deleteJobConfiguration(jobConfig.getId(), listener), (Boolean) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobConfig.getId()), e.getMessage()); + }); + + // try to get deleted job + assertAsync(listener -> jobConfigManager.getJobConfiguration(jobConfig.getId(), listener), (DataFrameJobConfig) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobConfig.getId()), e.getMessage()); + }); + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameSingleNodeTestCase.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameSingleNodeTestCase.java new file mode 100644 index 0000000000000..d1691fd094d23 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameSingleNodeTestCase.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.persistence; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.template.TemplateUtils; +import org.elasticsearch.xpack.dataframe.LocalStateDataFrame; +import org.junit.Before; + +import java.util.Collection; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +public abstract class DataFrameSingleNodeTestCase extends ESSingleNodeTestCase { + + @Before + public void waitForTemplates() throws Exception { + assertBusy(() -> { + ClusterState state = client().admin().cluster().prepareState().get().getState(); + assertTrue("Timed out waiting for the data frame templates to be installed", + TemplateUtils.checkTemplateExistsAndVersionIsGTECurrentVersion(DataFrameInternalIndex.INDEX_TEMPLATE_NAME, state)); + }); + } + + @Override + protected Settings nodeSettings() { + Settings.Builder newSettings = Settings.builder(); + newSettings.put(super.nodeSettings()); + + return newSettings.build(); + } + + @Override + protected Collection> getPlugins() { + return pluginList(LocalStateDataFrame.class); + } + + protected void assertAsync(Consumer> function, T expected, CheckedConsumer onAnswer, + Consumer onException) throws InterruptedException { + + CountDownLatch latch = new CountDownLatch(1); + + LatchedActionListener listener = new LatchedActionListener<>(ActionListener.wrap(r -> { + if (expected == null) { + fail("expected an exception but got a response"); + } else { + assertEquals(r, expected); + } + if (onAnswer != null) { + onAnswer.accept(r); + } + }, e -> { + if (onException == null) { + fail("got unexpected exception: " + e.getMessage()); + } else { + onException.accept(e); + } + }), latch); + + function.accept(listener); + latch.await(10, TimeUnit.SECONDS); + } + +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 193e3bdb3bd5c..a8ec65ce59378 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -24,6 +24,7 @@ import java.util.function.Predicate; import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.DATA_FRAME_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.DEPRECATION_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.INDEX_LIFECYCLE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; @@ -110,6 +111,7 @@ public static void switchUserBasedOnActionOriginAndExecute(ThreadContext threadC case WATCHER_ORIGIN: case ML_ORIGIN: case MONITORING_ORIGIN: + case DATA_FRAME_ORIGIN: case DEPRECATION_ORIGIN: case PERSISTENT_TASK_ORIGIN: case ROLLUP_ORIGIN: From 89209c1132460304bfa32ba0935fd1a919d0c36e Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Mon, 14 Jan 2019 08:42:06 +0100 Subject: [PATCH 37/49] return job counts and statistics for usage endpoint (#37346) return information about job counts by state and accumulated job statistics as part of the _xpack/usage endpoint --- .../dataframe/DataFrameFeatureSetUsage.java | 68 +++++++- .../xpack/core/dataframe/DataFrameField.java | 1 + .../job/DataFrameIndexerJobStats.java | 17 +- .../dataframe/job/DataFrameJobState.java | 2 +- .../DataFrameFeatureSetUsageTests.java | 35 ++++ .../job/DataFrameIndexerJobStatsTests.java | 53 ++++++ .../dataframe/job/DataFrameJobStateTests.java | 2 +- .../integration/DataFramePivotRestIT.java | 103 +----------- .../integration/DataFrameRestTestCase.java | 124 +++++++++++++- .../integration/DataFrameUsageIT.java | 58 +++++++ .../xpack/dataframe/DataFrame.java | 2 +- .../xpack/dataframe/DataFrameFeatureSet.java | 32 +++- .../action/DataFrameJobStateAndStats.java | 10 +- .../dataframe/job/AggregationResultUtils.java | 1 + .../xpack/dataframe/job/DataFrameIndexer.java | 1 + .../DataFrameJobPersistentTasksExecutor.java | 1 + .../xpack/dataframe/job/DataFrameJobTask.java | 2 + .../dataframe/DataFrameFeatureSetTests.java | 152 ++++++++++++++++++ .../DataFrameJobStateAndStatsTests.java | 17 +- .../job/AggregationResultUtilsTests.java | 1 + .../job/DataFrameIndexerJobStatsTests.java | 34 ---- 21 files changed, 556 insertions(+), 160 deletions(-) rename x-pack/plugin/{data-frame/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/dataframe/job/DataFrameIndexerJobStats.java (87%) rename x-pack/plugin/{data-frame/src/main/java/org/elasticsearch/xpack => core/src/main/java/org/elasticsearch/xpack/core}/dataframe/job/DataFrameJobState.java (99%) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStatsTests.java rename x-pack/plugin/{data-frame/src/test/java/org/elasticsearch/xpack => core/src/test/java/org/elasticsearch/xpack/core}/dataframe/job/DataFrameJobStateTests.java (97%) create mode 100644 x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java delete mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStatsTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java index 9bcc47d98a71c..16ba73198d1f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java @@ -6,18 +6,78 @@ package org.elasticsearch.xpack.core.dataframe; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; import java.io.IOException; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; public class DataFrameFeatureSetUsage extends Usage { - public DataFrameFeatureSetUsage(StreamInput input) throws IOException { - super(input); + + private final Map jobCountByState; + private final DataFrameIndexerJobStats accumulatedStats; + + public DataFrameFeatureSetUsage(StreamInput in) throws IOException { + super(in); + this.jobCountByState = in.readMap(StreamInput::readString, StreamInput::readLong); + this.accumulatedStats = new DataFrameIndexerJobStats(in); } - public DataFrameFeatureSetUsage(boolean available, boolean enabled) { + public DataFrameFeatureSetUsage(boolean available, boolean enabled, Map jobCountByState, + DataFrameIndexerJobStats accumulatedStats) { super(XPackField.DATA_FRAME, available, enabled); + this.jobCountByState = Objects.requireNonNull(jobCountByState); + this.accumulatedStats = Objects.requireNonNull(accumulatedStats); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(jobCountByState, StreamOutput::writeString, StreamOutput::writeLong); + accumulatedStats.writeTo(out); + } + + @Override + protected void innerXContent(XContentBuilder builder, Params params) throws IOException { + super.innerXContent(builder, params); + if (jobCountByState.isEmpty() == false) { + builder.startObject(DataFrameField.JOBS.getPreferredName()); + long all = 0L; + for (Entry entry : jobCountByState.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + all+=entry.getValue(); + } + builder.field(MetaData.ALL, all); + builder.endObject(); + + // if there are no jobs, do not show any stats + builder.field(DataFrameField.STATS_FIELD.getPreferredName(), accumulatedStats); + } + } + + @Override + public int hashCode() { + return Objects.hash(enabled, available, jobCountByState, accumulatedStats); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + DataFrameFeatureSetUsage other = (DataFrameFeatureSetUsage) obj; + return Objects.equals(name, other.name) && available == other.available && enabled == other.enabled + && Objects.equals(jobCountByState, other.jobCountByState) + && Objects.equals(accumulatedStats, other.accumulatedStats); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java index db14947c8d3af..aa93e6cdd30b8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -19,6 +19,7 @@ public final class DataFrameField { public static final ParseField COUNT = new ParseField("count"); public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion"); + public static final ParseField STATS_FIELD = new ParseField("stats"); // common strings public static final String TASK_NAME = "data_frame/jobs"; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStats.java similarity index 87% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStats.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStats.java index 82dce41296d59..b9906c4129198 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStats.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.core.dataframe.job; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -78,6 +78,21 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public DataFrameIndexerJobStats merge(DataFrameIndexerJobStats other) { + numPages += other.numPages; + numInputDocuments += other.numInputDocuments; + numOuputDocuments += other.numOuputDocuments; + numInvocations += other.numInvocations; + indexTime += other.indexTime; + searchTime += other.searchTime; + indexTotal += other.indexTotal; + searchTotal += other.searchTotal; + indexFailures += other.indexFailures; + searchFailures += other.searchFailures; + + return this; + } + public static DataFrameIndexerJobStats fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobState.java similarity index 99% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobState.java index fe991b8ed6102..12a7d15f0252b 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobState.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.core.dataframe.job; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java new file mode 100644 index 0000000000000..ba3bb3a6808f6 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStatsTests; +import org.elasticsearch.xpack.core.indexing.IndexerState; + +import java.util.HashMap; +import java.util.Map; + +public class DataFrameFeatureSetUsageTests extends AbstractWireSerializingTestCase { + + @Override + protected DataFrameFeatureSetUsage createTestInstance() { + Map jobCountByState = new HashMap<>(); + + if (randomBoolean()) { + jobCountByState.put(randomFrom(IndexerState.values()).toString(), randomLong()); + } + + return new DataFrameFeatureSetUsage(randomBoolean(), randomBoolean(), jobCountByState, DataFrameIndexerJobStatsTests.randomStats()); + } + + @Override + protected Reader instanceReader() { + return DataFrameFeatureSetUsage::new; + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStatsTests.java new file mode 100644 index 0000000000000..0a2006197136d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStatsTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.dataframe.job; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class DataFrameIndexerJobStatsTests extends AbstractSerializingTestCase { + @Override + protected DataFrameIndexerJobStats createTestInstance() { + return randomStats(); + } + + @Override + protected Writeable.Reader instanceReader() { + return DataFrameIndexerJobStats::new; + } + + @Override + protected DataFrameIndexerJobStats doParseInstance(XContentParser parser) { + return DataFrameIndexerJobStats.fromXContent(parser); + } + + public static DataFrameIndexerJobStats randomStats() { + return new DataFrameIndexerJobStats(randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), + randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), + randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L)); + } + + public void testMerge() throws IOException { + DataFrameIndexerJobStats emptyStats = new DataFrameIndexerJobStats(); + DataFrameIndexerJobStats randomStats = randomStats(); + + assertEquals(randomStats, emptyStats.merge(randomStats)); + assertEquals(randomStats, randomStats.merge(emptyStats)); + + DataFrameIndexerJobStats randomStatsClone = copyInstance(randomStats); + + DataFrameIndexerJobStats trippleRandomStats = new DataFrameIndexerJobStats(3 * randomStats.getNumPages(), + 3 * randomStats.getNumDocuments(), 3 * randomStats.getOutputDocuments(), 3 * randomStats.getNumInvocations(), + 3 * randomStats.getIndexTime(), 3 * randomStats.getSearchTime(), 3 * randomStats.getIndexTotal(), + 3 * randomStats.getSearchTotal(), 3 * randomStats.getIndexFailures(), 3 * randomStats.getSearchFailures()); + + assertEquals(trippleRandomStats, randomStats.merge(randomStatsClone).merge(randomStatsClone)); + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobStateTests.java similarity index 97% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobStateTests.java index 93d2e556147bc..a6a93e532e927 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobStateTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.core.dataframe.job; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 63b97fafddb21..bb00c541e11d6 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -6,21 +6,16 @@ package org.elasticsearch.xpack.dataframe.integration; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.junit.AfterClass; import org.junit.Before; + import java.io.IOException; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; public class DataFramePivotRestIT extends DataFrameRestTestCase { @@ -34,112 +29,22 @@ protected boolean preserveIndicesUponCompletion() { } @Before - public void createReviewsIndex() throws IOException { + public void createIndexes() throws IOException { // it's not possible to run it as @BeforeClass as clients aren't initialized then, so we need this little hack if (indicesCreated) { return; } - int[] distributionTable = {5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 4, 3, 3, 2, 1, 1, 1}; - - final int numDocs = 1000; - - // create mapping - try (XContentBuilder builder = jsonBuilder()) { - builder.startObject(); - { - builder.startObject("mappings") - .startObject("_doc") - .startObject("properties") - .startObject("user_id") - .field("type", "keyword") - .endObject() - .startObject("business_id") - .field("type", "keyword") - .endObject() - .startObject("stars") - .field("type", "integer") - .endObject() - .endObject() - .endObject() - .endObject(); - } - builder.endObject(); - final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - Request req = new Request("PUT", "reviews"); - req.setEntity(entity); - client().performRequest(req); - } - - // create index - final StringBuilder bulk = new StringBuilder(); - for (int i = 0; i < numDocs; i++) { - bulk.append("{\"index\":{\"_index\":\"reviews\",\"_type\":\"_doc\"}}\n"); - long user = Math.round(Math.pow(i * 31 % 1000, distributionTable[i % distributionTable.length]) % 27); - int stars = distributionTable[(i * 33) % distributionTable.length]; - long business = Math.round(Math.pow(user * stars, distributionTable[i % distributionTable.length]) % 13); - bulk.append("{\"user_id\":\"") - .append("user_") - .append(user) - .append("\",\"business_id\":\"") - .append("business_") - .append(business) - .append("\",\"stars\":") - .append(stars) - .append("}\n"); - - if (i % 50 == 0) { - bulk.append("\r\n"); - final Request bulkRequest = new Request("POST", "/_bulk"); - bulkRequest.addParameter("refresh", "true"); - bulkRequest.setJsonEntity(bulk.toString()); - client().performRequest(bulkRequest); - // clear the builder - bulk.setLength(0); - } - } - bulk.append("\r\n"); - - final Request bulkRequest = new Request("POST", "/_bulk"); - bulkRequest.addParameter("refresh", "true"); - bulkRequest.setJsonEntity(bulk.toString()); - client().performRequest(bulkRequest); + createReviewsIndex(); indicesCreated = true; } - @AfterClass - public static void removeIndices() throws Exception { - wipeDataFrameJobs(); - waitForPendingDataFrameTasks(); - // we disabled wiping indices, but now its time to get rid of them - // note: can not use super.cleanUpCluster() as this method must be static - wipeIndices(); - } - public void testSimplePivot() throws Exception { String jobId = "simplePivot"; String dataFrameIndex = "pivot_reviews"; - final Request createDataframeJobRequest = new Request("PUT", DATAFRAME_ENDPOINT + jobId); - createDataframeJobRequest.setJsonEntity("{" - + " \"index_pattern\": \"reviews\"," - + " \"destination_index\": \"" + dataFrameIndex + "\"," - + " \"sources\": {" - + " \"sources\": [ {" - + " \"reviewer\": {" - + " \"terms\": {" - + " \"field\": \"user_id\"" - + " } } } ] }," - + " \"aggregations\": {" - + " \"avg_rating\": {" - + " \"avg\": {" - + " \"field\": \"stars\"" - + " } } }" - + "}"); - Map createDataframeJobResponse = entityAsMap(client().performRequest(createDataframeJobRequest)); - assertThat(createDataframeJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); - assertTrue(indexExists(dataFrameIndex)); + createPivotReviewsJob(jobId, dataFrameIndex); // start the job final Request startJobRequest = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_start"); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 8ddd63679a4b1..832c3d6917059 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -6,23 +6,123 @@ package org.elasticsearch.xpack.dataframe.integration; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; +import org.junit.AfterClass; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; -abstract class DataFrameRestTestCase extends ESRestTestCase { +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +public abstract class DataFrameRestTestCase extends ESRestTestCase { protected static final String DATAFRAME_ENDPOINT = DataFrameField.REST_BASE_PATH + "jobs/"; + /** + * Create a simple dataset for testing with reviewers, ratings and businesses + */ + protected void createReviewsIndex() throws IOException { + int[] distributionTable = {5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 4, 3, 3, 2, 1, 1, 1}; + + final int numDocs = 1000; + + // create mapping + try (XContentBuilder builder = jsonBuilder()) { + builder.startObject(); + { + builder.startObject("mappings") + .startObject("_doc") + .startObject("properties") + .startObject("user_id") + .field("type", "keyword") + .endObject() + .startObject("business_id") + .field("type", "keyword") + .endObject() + .startObject("stars") + .field("type", "integer") + .endObject() + .endObject() + .endObject() + .endObject(); + } + builder.endObject(); + final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); + Request req = new Request("PUT", "reviews"); + req.setEntity(entity); + client().performRequest(req); + } + + // create index + final StringBuilder bulk = new StringBuilder(); + for (int i = 0; i < numDocs; i++) { + bulk.append("{\"index\":{\"_index\":\"reviews\",\"_type\":\"_doc\"}}\n"); + long user = Math.round(Math.pow(i * 31 % 1000, distributionTable[i % distributionTable.length]) % 27); + int stars = distributionTable[(i * 33) % distributionTable.length]; + long business = Math.round(Math.pow(user * stars, distributionTable[i % distributionTable.length]) % 13); + bulk.append("{\"user_id\":\"") + .append("user_") + .append(user) + .append("\",\"business_id\":\"") + .append("business_") + .append(business) + .append("\",\"stars\":") + .append(stars) + .append("}\n"); + + if (i % 50 == 0) { + bulk.append("\r\n"); + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.addParameter("refresh", "true"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + // clear the builder + bulk.setLength(0); + } + } + bulk.append("\r\n"); + + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.addParameter("refresh", "true"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + } + + protected void createPivotReviewsJob(String jobId, String dataFrameIndex) throws IOException { + final Request createDataframeJobRequest = new Request("PUT", DATAFRAME_ENDPOINT + jobId); + createDataframeJobRequest.setJsonEntity("{" + + " \"index_pattern\": \"reviews\"," + + " \"destination_index\": \"" + dataFrameIndex + "\"," + + " \"sources\": {" + + " \"sources\": [ {" + + " \"reviewer\": {" + + " \"terms\": {" + + " \"field\": \"user_id\"" + + " } } } ] }," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } }" + + "}"); + Map createDataframeJobResponse = entityAsMap(client().performRequest(createDataframeJobRequest)); + assertThat(createDataframeJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertTrue(indexExists(dataFrameIndex)); + } + @SuppressWarnings("unchecked") private static List> getDataFrameJobs() throws IOException { Response response = adminClient().performRequest(new Request("GET", DATAFRAME_ENDPOINT + "_all")); @@ -39,6 +139,15 @@ protected static String getDataFrameIndexerState(String jobId) throws IOExceptio return (String) XContentMapValues.extractValue("state.job_state", jobStatsAsMap); } + @AfterClass + public static void removeIndices() throws Exception { + wipeDataFrameJobs(); + waitForPendingDataFrameTasks(); + // we might have disabled wiping indices, but now its time to get rid of them + // note: can not use super.cleanUpCluster() as this method must be static + wipeIndices(); + } + protected static void wipeDataFrameJobs() throws IOException, InterruptedException { List> jobConfigs = getDataFrameJobs(); @@ -65,10 +174,17 @@ protected static void wipeDataFrameJobs() throws IOException, InterruptedExcepti // the configuration index should be empty Request request = new Request("GET", DataFrameInternalIndex.INDEX_NAME + "/_search"); - Response searchResponse = adminClient().performRequest(request); - Map searchResult = entityAsMap(searchResponse); + try { + Response searchResponse = adminClient().performRequest(request); + Map searchResult = entityAsMap(searchResponse); - assertEquals(0, XContentMapValues.extractValue("hits.total.value", searchResult)); + assertEquals(0, XContentMapValues.extractValue("hits.total.value", searchResult)); + } catch (ResponseException e) { + // 404 here just means we had no data frame jobs, true for some tests + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + } } protected static void waitForPendingDataFrameTasks() throws Exception { diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java new file mode 100644 index 0000000000000..3f5547dbf63e6 --- /dev/null +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.integration; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.junit.Before; + +import java.io.IOException; +import java.util.Map; + +public class DataFrameUsageIT extends DataFrameRestTestCase { + private boolean indicesCreated = false; + + // preserve indices in order to reuse source indices in several test cases + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @Before + public void createIndexes() throws IOException { + + // it's not possible to run it as @BeforeClass as clients aren't initialized then, so we need this little hack + if (indicesCreated) { + return; + } + + createReviewsIndex(); + indicesCreated = true; + } + + public void testUsage() throws IOException { + Response usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); + + Map usageAsMap = entityAsMap(usageResponse); + assertTrue((boolean) XContentMapValues.extractValue("data_frame.available", usageAsMap)); + assertTrue((boolean) XContentMapValues.extractValue("data_frame.enabled", usageAsMap)); + // no jobs, no stats + assertEquals(null, XContentMapValues.extractValue("data_frame.jobs", usageAsMap)); + assertEquals(null, XContentMapValues.extractValue("data_frame.stats", usageAsMap)); + + // create a job + createPivotReviewsJob("test_usage", "pivot_reviews"); + + usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); + + usageAsMap = entityAsMap(usageResponse); + // we should see some stats + assertEquals(1, XContentMapValues.extractValue("data_frame.jobs._all", usageAsMap)); + assertEquals(0, XContentMapValues.extractValue("data_frame.stats.index_failures", usageAsMap)); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index f6e1620aa13cf..2d5ea245ff8df 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -45,6 +45,7 @@ import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction; @@ -60,7 +61,6 @@ import org.elasticsearch.xpack.dataframe.action.TransportStopDataFrameJobAction; import org.elasticsearch.xpack.dataframe.job.DataFrameJob; import org.elasticsearch.xpack.dataframe.job.DataFrameJobPersistentTasksExecutor; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobState; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; import org.elasticsearch.xpack.dataframe.rest.action.RestDeleteDataFrameJobAction; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java index 7150a0a8eb836..a213e6a4a3d6f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.dataframe; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -15,17 +17,24 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.dataframe.DataFrameFeatureSetUsage; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; +import java.util.Objects; public class DataFrameFeatureSet implements XPackFeatureSet { private final boolean enabled; + private final Client client; private final XPackLicenseState licenseState; @Inject - public DataFrameFeatureSet(Settings settings, @Nullable XPackLicenseState licenseState) { + public DataFrameFeatureSet(Settings settings, Client client, @Nullable XPackLicenseState licenseState) { this.enabled = XPackSettings.DATA_FRAME_ENABLED.get(settings); + this.client = Objects.requireNonNull(client); this.licenseState = licenseState; } @@ -56,7 +65,24 @@ public Map nativeCodeInfo() { @Override public void usage(ActionListener listener) { - // TODO retrieve and send something useful - listener.onResponse(new DataFrameFeatureSetUsage(available(), enabled())); + if (enabled == false) { + listener.onResponse( + new DataFrameFeatureSetUsage(available(), enabled(), Collections.emptyMap(), new DataFrameIndexerJobStats())); + return; + } + + GetDataFrameJobsStatsAction.Request jobStatsRequest = new GetDataFrameJobsStatsAction.Request(MetaData.ALL); + + client.execute(GetDataFrameJobsStatsAction.INSTANCE, jobStatsRequest, ActionListener.wrap(jobStatsResponse -> { + Map jobCountByState = new HashMap<>(); + DataFrameIndexerJobStats accumulatedStats = new DataFrameIndexerJobStats(); + + jobStatsResponse.getJobsStateAndStats().stream().forEach(singleResult -> { + jobCountByState.merge(singleResult.getJobState().getIndexerState().value(), 1L, Long::sum); + accumulatedStats.merge(singleResult.getJobStats()); + }); + + listener.onResponse(new DataFrameFeatureSetUsage(available(), enabled(), jobCountByState, accumulatedStats)); + }, listener::onFailure)); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java index b4585dc3f3611..99471ec8fc9ce 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java @@ -14,8 +14,8 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.job.DataFrameIndexerJobStats; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobState; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; import java.io.IOException; import java.util.Objects; @@ -23,7 +23,6 @@ public class DataFrameJobStateAndStats implements Writeable, ToXContentObject { public static final ParseField STATE_FIELD = new ParseField("state"); - public static final ParseField STATS_FIELD = new ParseField("stats"); private final String id; private final DataFrameJobState jobState; @@ -36,7 +35,8 @@ public class DataFrameJobStateAndStats implements Writeable, ToXContentObject { static { PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameField.ID); PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataFrameJobState.PARSER::apply, STATE_FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerJobStats.fromXContent(p), STATS_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerJobStats.fromXContent(p), + DataFrameField.STATS_FIELD); } public DataFrameJobStateAndStats(String id, DataFrameJobState state, DataFrameIndexerJobStats stats) { @@ -56,7 +56,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(DataFrameField.ID.getPreferredName(), id); builder.field(STATE_FIELD.getPreferredName(), jobState); - builder.field(STATS_FIELD.getPreferredName(), jobStats); + builder.field(DataFrameField.STATS_FIELD.getPreferredName(), jobStats); builder.endObject(); return builder; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java index e70f3503f9b00..82bc2e00344e3 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; import java.util.Collection; import java.util.HashMap; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java index 2effab1f3eb84..8f9ffb67177a8 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java @@ -19,6 +19,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java index 449502e29f774..990e2c9ee481c 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java @@ -17,6 +17,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.dataframe.DataFrame; import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java index 464255e2dd54d..ec86062b84ddb 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java @@ -24,6 +24,8 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java new file mode 100644 index 0000000000000..b7ebfbeff5ee0 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.dataframe.action.DataFrameJobStateAndStats; +import org.elasticsearch.xpack.dataframe.action.DataFrameJobStateAndStatsTests; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Response; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static java.lang.Math.toIntExact; +import static org.hamcrest.core.Is.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.same; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DataFrameFeatureSetTests extends ESTestCase { + private XPackLicenseState licenseState; + + @Before + public void init() { + licenseState = mock(XPackLicenseState.class); + } + + public void testAvailable() { + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, mock(Client.class), licenseState); + boolean available = randomBoolean(); + when(licenseState.isDataFrameAllowed()).thenReturn(available); + assertThat(featureSet.available(), is(available)); + } + + public void testEnabledSetting() { + boolean enabled = randomBoolean(); + Settings.Builder settings = Settings.builder(); + settings.put("xpack.data_frame.enabled", enabled); + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(settings.build(), mock(Client.class), licenseState); + assertThat(featureSet.enabled(), is(enabled)); + } + + public void testEnabledDefault() { + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, mock(Client.class), licenseState); + assertTrue(featureSet.enabled()); + } + + public void testUsage() throws InterruptedException, ExecutionException, IOException { + Client client = mock(Client.class); + when(licenseState.isDataFrameAllowed()).thenReturn(true); + + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, client, licenseState); + + List jobsStateAndStats = new ArrayList<>(); + for (int i = 0; i < randomIntBetween(0, 10); ++i) { + jobsStateAndStats.add(DataFrameJobStateAndStatsTests.randomDataFrameJobStateAndStats()); + } + + GetDataFrameJobsStatsAction.Response mockResponse = new GetDataFrameJobsStatsAction.Response(jobsStateAndStats); + + doAnswer(invocationOnMock -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(mockResponse); + return Void.TYPE; + }).when(client).execute(same(GetDataFrameJobsStatsAction.INSTANCE), any(), any()); + + PlainActionFuture future = new PlainActionFuture<>(); + featureSet.usage(future); + XPackFeatureSet.Usage usage = future.get(); + + assertTrue(usage.enabled()); + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + + XContentParser parser = createParser(builder); + Map usageAsMap = parser.map(); + assertTrue((boolean) XContentMapValues.extractValue("available", usageAsMap)); + + if (jobsStateAndStats.isEmpty()) { + // no jobs, no stats + assertEquals(null, XContentMapValues.extractValue("jobs", usageAsMap)); + assertEquals(null, XContentMapValues.extractValue("stats", usageAsMap)); + } else { + assertEquals(jobsStateAndStats.size(), XContentMapValues.extractValue("jobs._all", usageAsMap)); + + Map stateCounts = new HashMap<>(); + jobsStateAndStats.stream().map(x -> x.getJobState().getIndexerState().value()) + .forEach(x -> stateCounts.merge(x, 1, Integer::sum)); + stateCounts.forEach((k, v) -> assertEquals(v, XContentMapValues.extractValue("jobs." + k, usageAsMap))); + + DataFrameIndexerJobStats combinedStats = jobsStateAndStats.stream().map(x -> x.getJobStats()).reduce((l, r) -> l.merge(r)) + .get(); + + assertEquals(toIntExact(combinedStats.getIndexFailures()), + XContentMapValues.extractValue("stats.index_failures", usageAsMap)); + assertEquals(toIntExact(combinedStats.getIndexTotal()), XContentMapValues.extractValue("stats.index_total", usageAsMap)); + assertEquals(toIntExact(combinedStats.getSearchTime()), + XContentMapValues.extractValue("stats.search_time_in_ms", usageAsMap)); + assertEquals(toIntExact(combinedStats.getNumDocuments()), + XContentMapValues.extractValue("stats.documents_processed", usageAsMap)); + } + } + } + + public void testUsageDisabled() throws IOException, InterruptedException, ExecutionException { + when(licenseState.isDataFrameAllowed()).thenReturn(true); + Settings.Builder settings = Settings.builder(); + settings.put("xpack.data_frame.enabled", false); + DataFrameFeatureSet featureSet = new DataFrameFeatureSet(settings.build(), mock(Client.class), licenseState); + PlainActionFuture future = new PlainActionFuture<>(); + featureSet.usage(future); + XPackFeatureSet.Usage usage = future.get(); + + assertFalse(usage.enabled()); + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + usage.toXContent(builder, ToXContent.EMPTY_PARAMS); + + XContentParser parser = createParser(builder); + Map usageAsMap = parser.map(); + assertTrue((boolean) XContentMapValues.extractValue("available", usageAsMap)); + assertFalse((boolean) XContentMapValues.extractValue("enabled", usageAsMap)); + // not enabled -> no jobs, no stats + assertEquals(null, XContentMapValues.extractValue("jobs", usageAsMap)); + assertEquals(null, XContentMapValues.extractValue("stats", usageAsMap)); + } + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java index 17010c4afd9ff..ff615519ac2c9 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java @@ -8,14 +8,19 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStatsTests; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobStateTests; import org.elasticsearch.xpack.dataframe.job.AbstractSerializingDataFrameTestCase; -import org.elasticsearch.xpack.dataframe.job.DataFrameIndexerJobStatsTests; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobStateTests; import java.io.IOException; -public class DataFrameJobStateAndStatsTests - extends AbstractSerializingDataFrameTestCase { +public class DataFrameJobStateAndStatsTests extends AbstractSerializingDataFrameTestCase { + + public static DataFrameJobStateAndStats randomDataFrameJobStateAndStats() { + return new DataFrameJobStateAndStats(randomAlphaOfLengthBetween(1, 10), + DataFrameJobStateTests.randomDataFrameJobState(), + DataFrameIndexerJobStatsTests.randomStats()); + } @Override protected DataFrameJobStateAndStats doParseInstance(XContentParser parser) throws IOException { @@ -24,9 +29,7 @@ protected DataFrameJobStateAndStats doParseInstance(XContentParser parser) throw @Override protected DataFrameJobStateAndStats createTestInstance() { - return new DataFrameJobStateAndStats(randomAlphaOfLengthBetween(1,10), - DataFrameJobStateTests.randomDataFrameJobState(), - DataFrameIndexerJobStatsTests.randomStats()); + return randomDataFrameJobStateAndStats(); } @Override diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java index b440277cf1471..0e69197691e01 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java @@ -45,6 +45,7 @@ import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; import java.io.IOException; import java.util.Collection; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStatsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStatsTests.java deleted file mode 100644 index 90629b0a406fd..0000000000000 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexerJobStatsTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.dataframe.job; - -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractSerializingTestCase; - -public class DataFrameIndexerJobStatsTests extends AbstractSerializingTestCase{ - @Override - protected DataFrameIndexerJobStats createTestInstance() { - return randomStats(); - } - - @Override - protected Writeable.Reader instanceReader() { - return DataFrameIndexerJobStats::new; - } - - @Override - protected DataFrameIndexerJobStats doParseInstance(XContentParser parser) { - return DataFrameIndexerJobStats.fromXContent(parser); - } - - public static DataFrameIndexerJobStats randomStats() { - return new DataFrameIndexerJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - randomNonNegativeLong(), randomNonNegativeLong()); - } -} From 13ef708e58cc08c8f705f1e3de8a6d4d0fb62888 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 17 Jan 2019 12:53:47 +0100 Subject: [PATCH 38/49] [ML-DataFrame] rename jobs to transforms (#37518) rename jobs to transforms --- .../dataframe/DataFrameFeatureSetUsage.java | 30 ++-- .../xpack/core/dataframe/DataFrameField.java | 6 +- .../core/dataframe/DataFrameMessages.java | 23 +-- .../DataFrameIndexerTransformStats.java} | 23 ++- .../DataFrameTransformState.java} | 18 +- .../DataFrameFeatureSetUsageTests.java | 9 +- .../dataframe/DataFrameMessagesTests.java | 6 +- .../DataFrameIndexerTransformStatsTests.java} | 28 ++-- .../DataFrameTransformStateTests.java} | 21 +-- .../DataFrameConfigurationIndexIT.java | 13 +- .../integration/DataFramePivotRestIT.java | 26 +-- .../integration/DataFrameRestTestCase.java | 58 +++---- .../integration/DataFrameUsageIT.java | 10 +- .../xpack/dataframe/DataFrame.java | 92 +++++------ .../xpack/dataframe/DataFrameFeatureSet.java | 22 +-- ...a => DataFrameTransformStateAndStats.java} | 56 +++---- ...va => DeleteDataFrameTransformAction.java} | 15 +- ...java => GetDataFrameTransformsAction.java} | 42 ++--- ...=> GetDataFrameTransformsStatsAction.java} | 40 ++--- ....java => PutDataFrameTransformAction.java} | 22 +-- ...ava => StartDataFrameTransformAction.java} | 8 +- ...java => StopDataFrameTransformAction.java} | 8 +- ...nsportDeleteDataFrameTransformAction.java} | 42 ++--- ...ransportGetDataFrameTransformsAction.java} | 51 +++--- ...ortGetDataFrameTransformsStatsAction.java} | 51 +++--- ...TransportPutDataFrameTransformAction.java} | 81 ++++----- ...ansportStartDataFrameTransformAction.java} | 55 ++++--- ...ransportStopDataFrameTransformAction.java} | 54 +++--- .../persistence/DataFrameInternalIndex.java | 10 +- .../DataFramePersistentTaskUtils.java | 18 +- ... => DataFrameTransformsConfigManager.java} | 60 +++---- .../dataframe/persistence/DataframeIndex.java | 8 +- ...> RestDeleteDataFrameTransformAction.java} | 18 +- ... => RestGetDataFrameTransformsAction.java} | 14 +- ...estGetDataFrameTransformsStatsAction.java} | 14 +- ...a => RestPutDataFrameTransformAction.java} | 16 +- ...=> RestStartDataFrameTransformAction.java} | 18 +- ... => RestStopDataFrameTransformAction.java} | 17 +- ...Validator.java => TransformValidator.java} | 10 +- .../{job => transform}/AggregationConfig.java | 2 +- .../AggregationResultUtils.java | 12 +- .../{job => transform}/DataFrameIndexer.java | 26 +-- .../DataFrameTransform.java} | 32 ++-- .../DataFrameTransformConfig.java} | 35 ++-- ...rameTransformPersistentTasksExecutor.java} | 37 ++--- .../DataFrameTransformTask.java} | 154 +++++++++--------- .../{job => transform}/SourceConfig.java | 2 +- .../dataframe/DataFrameFeatureSetTests.java | 38 ++--- .../DataFrameJobStateAndStatsTests.java | 40 ----- .../DataFrameTransformStateAndStatsTests.java | 40 +++++ ...DataFrameTransformActionRequestTests.java} | 7 +- ...ataFrameTransformsActionRequestTests.java} | 4 +- ...ameTransformsStatsActionRequestTests.java} | 4 +- ...DataFrameTransformActionRequestTests.java} | 18 +- ...> StartDataFrameTransformActionTests.java} | 7 +- ...DataFrameTransformActionRequestTests.java} | 4 +- .../job/DataFrameJobConfigTests.java | 48 ------ .../DataFrameJobConfigManagerTests.java | 91 ----------- ...DataFrameTransformsConfigManagerTests.java | 105 ++++++++++++ ...ests.java => TransformValidatorTests.java} | 38 ++--- .../AbstractSerializingDataFrameTestCase.java | 2 +- .../AggregationConfigTests.java | 3 +- .../AggregationResultUtilsTests.java | 7 +- .../DataFrameTransformConfigTests.java | 49 ++++++ .../{job => transform}/SourceConfigTests.java | 3 +- 65 files changed, 980 insertions(+), 941 deletions(-) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/{job/DataFrameIndexerJobStats.java => transform/DataFrameIndexerTransformStats.java} (80%) rename x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/{job/DataFrameJobState.java => transform/DataFrameTransformState.java} (85%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/{job/DataFrameIndexerJobStatsTests.java => transform/DataFrameIndexerTransformStatsTests.java} (54%) rename x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/{job/DataFrameJobStateTests.java => transform/DataFrameTransformStateTests.java} (59%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{DataFrameJobStateAndStats.java => DataFrameTransformStateAndStats.java} (53%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{DeleteDataFrameJobAction.java => DeleteDataFrameTransformAction.java} (89%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{GetDataFrameJobsAction.java => GetDataFrameTransformsAction.java} (78%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{GetDataFrameJobsStatsAction.java => GetDataFrameTransformsStatsAction.java} (78%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{PutDataFrameJobAction.java => PutDataFrameTransformAction.java} (81%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{StartDataFrameJobAction.java => StartDataFrameTransformAction.java} (94%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{StopDataFrameJobAction.java => StopDataFrameTransformAction.java} (95%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{TransportDeleteDataFrameJobAction.java => TransportDeleteDataFrameTransformAction.java} (65%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{TransportGetDataFrameJobsAction.java => TransportGetDataFrameTransformsAction.java} (58%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{TransportGetDataFrameJobsStatsAction.java => TransportGetDataFrameTransformsStatsAction.java} (56%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{TransportPutDataFrameJobAction.java => TransportPutDataFrameTransformAction.java} (59%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{TransportStartDataFrameJobAction.java => TransportStartDataFrameTransformAction.java} (55%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/{TransportStopDataFrameJobAction.java => TransportStopDataFrameTransformAction.java} (64%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/{DataFrameJobConfigManager.java => DataFrameTransformsConfigManager.java} (66%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/{RestDeleteDataFrameJobAction.java => RestDeleteDataFrameTransformAction.java} (68%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/{RestGetDataFrameJobsStatsAction.java => RestGetDataFrameTransformsAction.java} (63%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/{RestGetDataFrameJobsAction.java => RestGetDataFrameTransformsStatsAction.java} (61%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/{RestPutDataFrameJobAction.java => RestPutDataFrameTransformAction.java} (66%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/{RestStartDataFrameJobAction.java => RestStartDataFrameTransformAction.java} (64%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/{RestStopDataFrameJobAction.java => RestStopDataFrameTransformAction.java} (64%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/{JobValidator.java => TransformValidator.java} (96%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job => transform}/AggregationConfig.java (97%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job => transform}/AggregationResultUtils.java (86%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job => transform}/DataFrameIndexer.java (80%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job/DataFrameJob.java => transform/DataFrameTransform.java} (66%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job/DataFrameJobConfig.java => transform/DataFrameTransformConfig.java} (80%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job/DataFrameJobPersistentTasksExecutor.java => transform/DataFrameTransformPersistentTasksExecutor.java} (59%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job/DataFrameJobTask.java => transform/DataFrameTransformTask.java} (58%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{job => transform}/SourceConfig.java (98%) delete mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/{StartDataFrameJobActionTests.java => DeleteDataFrameTransformActionRequestTests.java} (76%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/{GetDataFrameJobsActionRequestTests.java => GetDataFrameTransformsActionRequestTests.java} (80%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/{GetDataFrameJobsStatsActionRequestTests.java => GetDataFrameTransformsStatsActionRequestTests.java} (79%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/{PutDataFrameJobActionRequestTests.java => PutDataFrameTransformActionRequestTests.java} (75%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/{DeleteDataFrameJobActionRequestTests.java => StartDataFrameTransformActionTests.java} (77%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/{StopDataFrameJobActionRequestTests.java => StopDataFrameTransformActionRequestTests.java} (86%) delete mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java delete mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/{JobValidatorTests.java => TransformValidatorTests.java} (83%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{job => transform}/AbstractSerializingDataFrameTestCase.java (97%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{job => transform}/AggregationConfigTests.java (95%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{job => transform}/AggregationResultUtilsTests.java (98%) create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfigTests.java rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{job => transform}/SourceConfigTests.java (94%) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java index 16ba73198d1f4..48cdcd37572c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsage.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; import org.elasticsearch.xpack.core.XPackField; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; import java.io.IOException; import java.util.Map; @@ -21,50 +21,50 @@ public class DataFrameFeatureSetUsage extends Usage { - private final Map jobCountByState; - private final DataFrameIndexerJobStats accumulatedStats; + private final Map transformCountByState; + private final DataFrameIndexerTransformStats accumulatedStats; public DataFrameFeatureSetUsage(StreamInput in) throws IOException { super(in); - this.jobCountByState = in.readMap(StreamInput::readString, StreamInput::readLong); - this.accumulatedStats = new DataFrameIndexerJobStats(in); + this.transformCountByState = in.readMap(StreamInput::readString, StreamInput::readLong); + this.accumulatedStats = new DataFrameIndexerTransformStats(in); } - public DataFrameFeatureSetUsage(boolean available, boolean enabled, Map jobCountByState, - DataFrameIndexerJobStats accumulatedStats) { + public DataFrameFeatureSetUsage(boolean available, boolean enabled, Map transformCountByState, + DataFrameIndexerTransformStats accumulatedStats) { super(XPackField.DATA_FRAME, available, enabled); - this.jobCountByState = Objects.requireNonNull(jobCountByState); + this.transformCountByState = Objects.requireNonNull(transformCountByState); this.accumulatedStats = Objects.requireNonNull(accumulatedStats); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeMap(jobCountByState, StreamOutput::writeString, StreamOutput::writeLong); + out.writeMap(transformCountByState, StreamOutput::writeString, StreamOutput::writeLong); accumulatedStats.writeTo(out); } @Override protected void innerXContent(XContentBuilder builder, Params params) throws IOException { super.innerXContent(builder, params); - if (jobCountByState.isEmpty() == false) { - builder.startObject(DataFrameField.JOBS.getPreferredName()); + if (transformCountByState.isEmpty() == false) { + builder.startObject(DataFrameField.TRANSFORMS.getPreferredName()); long all = 0L; - for (Entry entry : jobCountByState.entrySet()) { + for (Entry entry : transformCountByState.entrySet()) { builder.field(entry.getKey(), entry.getValue()); all+=entry.getValue(); } builder.field(MetaData.ALL, all); builder.endObject(); - // if there are no jobs, do not show any stats + // if there are no transforms, do not show any stats builder.field(DataFrameField.STATS_FIELD.getPreferredName(), accumulatedStats); } } @Override public int hashCode() { - return Objects.hash(enabled, available, jobCountByState, accumulatedStats); + return Objects.hash(enabled, available, transformCountByState, accumulatedStats); } @Override @@ -77,7 +77,7 @@ public boolean equals(Object obj) { } DataFrameFeatureSetUsage other = (DataFrameFeatureSetUsage) obj; return Objects.equals(name, other.name) && available == other.available && enabled == other.enabled - && Objects.equals(jobCountByState, other.jobCountByState) + && Objects.equals(transformCountByState, other.transformCountByState) && Objects.equals(accumulatedStats, other.accumulatedStats); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java index aa93e6cdd30b8..41852aa972900 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -15,16 +15,16 @@ public final class DataFrameField { // common parse fields public static final ParseField ID = new ParseField("id"); - public static final ParseField JOBS = new ParseField("jobs"); + public static final ParseField TRANSFORMS = new ParseField("transforms"); public static final ParseField COUNT = new ParseField("count"); public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion"); public static final ParseField STATS_FIELD = new ParseField("stats"); // common strings - public static final String TASK_NAME = "data_frame/jobs"; + public static final String TASK_NAME = "data_frame/transforms"; public static final String REST_BASE_PATH = "/_data_frame/"; - public static final String REST_BASE_PATH_JOBS_BY_ID = REST_BASE_PATH + "jobs/{id}/"; + public static final String REST_BASE_PATH_TRANSFORMS_BY_ID = REST_BASE_PATH + "transforms/{id}/"; // note: this is used to match tasks public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "data_frame_"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java index d95c6355fcad7..8c47be2b579cd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -11,23 +11,26 @@ public class DataFrameMessages { - public static final String REST_STOP_JOB_WAIT_FOR_COMPLETION_TIMEOUT = - "Timed out after [{0}] while waiting for data frame job [{1}] to stop"; - public static final String REST_STOP_JOB_WAIT_FOR_COMPLETION_INTERRUPT = "Interrupted while waiting for data frame job [{0}] to stop"; - public static final String REST_PUT_DATA_FRAME_JOB_EXISTS = "Job with id [{0}] already exists"; - public static final String REST_DATA_FRAME_UNKNOWN_JOB = "Job with id [{0}] could not be found"; + public static final String REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT = + "Timed out after [{0}] while waiting for data frame transform [{1}] to stop"; + public static final String REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_INTERRUPT = + "Interrupted while waiting for data frame transform [{0}] to stop"; + public static final String REST_PUT_DATA_FRAME_TRANSFORM_EXISTS = "Transform with id [{0}] already exists"; + public static final String REST_DATA_FRAME_UNKNOWN_TRANSFORM = "Transform with id [{0}] could not be found"; public static final String REST_PUT_DATA_FRAME_FAILED_TO_VALIDATE_DATA_FRAME_CONFIGURATION = "Failed to validate data frame configuration"; - public static final String REST_PUT_DATA_FRAME_FAILED_PERSIST_JOB_CONFIGURATION = "Failed to persist data frame configuration"; + public static final String REST_PUT_DATA_FRAME_FAILED_PERSIST_TRANSFORM_CONFIGURATION = "Failed to persist data frame configuration"; public static final String REST_PUT_DATA_FRAME_FAILED_TO_DEDUCE_TARGET_MAPPINGS = "Failed to deduce target mappings"; public static final String REST_PUT_DATA_FRAME_FAILED_TO_CREATE_TARGET_INDEX = "Failed to create target index"; public static final String REST_PUT_DATA_FRAME_FAILED_TO_START_PERSISTENT_TASK = "Failed to start persistent task, configuration has been cleaned up: [{0}]"; - public static final String REST_DATA_FRAME_FAILED_TO_SERIALIZE_JOB = "Failed to serialise job [{0}]"; + public static final String REST_DATA_FRAME_FAILED_TO_SERIALIZE_TRANSFORM = "Failed to serialise transform [{0}]"; - public static final String FAILED_TO_CREATE_DESTINATION_INDEX = "Could not create destination index [{0}] for job[{1}]"; - public static final String FAILED_TO_LOAD_JOB_CONFIGURATION = "Failed to load data frame job configuration for job [{0}]"; - public static final String FAILED_TO_PARSE_JOB_CONFIGURATION = "Failed to parse job configuration for data frame job [{0}]"; + public static final String FAILED_TO_CREATE_DESTINATION_INDEX = "Could not create destination index [{0}] for transform[{1}]"; + public static final String FAILED_TO_LOAD_TRANSFORM_CONFIGURATION = + "Failed to load data frame transform configuration for transform [{0}]"; + public static final String FAILED_TO_PARSE_TRANSFORM_CONFIGURATION = + "Failed to parse transform configuration for data frame transform [{0}]"; private DataFrameMessages() { } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameIndexerTransformStats.java similarity index 80% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStats.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameIndexerTransformStats.java index b9906c4129198..d6778e8656452 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameIndexerTransformStats.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.dataframe.job; +package org.elasticsearch.xpack.core.dataframe.transform; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -17,7 +17,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -public class DataFrameIndexerJobStats extends IndexerJobStats { +public class DataFrameIndexerTransformStats extends IndexerJobStats { private static ParseField NUM_PAGES = new ParseField("pages_processed"); private static ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed"); private static ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("documents_indexed"); @@ -29,10 +29,9 @@ public class DataFrameIndexerJobStats extends IndexerJobStats { private static ParseField SEARCH_FAILURES = new ParseField("search_failures"); private static ParseField INDEX_FAILURES = new ParseField("index_failures"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - args -> new DataFrameIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3], (long) args[4], - (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME.getPreferredName(), args -> new DataFrameIndexerTransformStats((long) args[0], (long) args[1], (long) args[2], + (long) args[3], (long) args[4], (long) args[5], (long) args[6], (long) args[7], (long) args[8], (long) args[9])); static { PARSER.declareLong(constructorArg(), NUM_PAGES); @@ -47,17 +46,17 @@ public class DataFrameIndexerJobStats extends IndexerJobStats { PARSER.declareLong(constructorArg(), SEARCH_FAILURES); } - public DataFrameIndexerJobStats() { + public DataFrameIndexerTransformStats() { super(); } - public DataFrameIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, long indexTime, - long searchTime, long indexTotal, long searchTotal, long indexFailures, long searchFailures) { + public DataFrameIndexerTransformStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations, + long indexTime, long searchTime, long indexTotal, long searchTotal, long indexFailures, long searchFailures) { super(numPages, numInputDocuments, numOuputDocuments, numInvocations, indexTime, searchTime, indexTotal, searchTotal, indexFailures, searchFailures); } - public DataFrameIndexerJobStats(StreamInput in) throws IOException { + public DataFrameIndexerTransformStats(StreamInput in) throws IOException { super(in); } @@ -78,7 +77,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public DataFrameIndexerJobStats merge(DataFrameIndexerJobStats other) { + public DataFrameIndexerTransformStats merge(DataFrameIndexerTransformStats other) { numPages += other.numPages; numInputDocuments += other.numInputDocuments; numOuputDocuments += other.numOuputDocuments; @@ -93,7 +92,7 @@ public DataFrameIndexerJobStats merge(DataFrameIndexerJobStats other) { return this; } - public static DataFrameIndexerJobStats fromXContent(XContentParser parser) { + public static DataFrameIndexerTransformStats fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); } catch (IOException e) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameTransformState.java similarity index 85% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobState.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameTransformState.java index 12a7d15f0252b..2338ec1f4dafb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameTransformState.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.dataframe.job; +package org.elasticsearch.xpack.core.dataframe.transform; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -30,7 +30,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DataFrameJobState implements Task.Status, PersistentTaskState { +public class DataFrameTransformState implements Task.Status, PersistentTaskState { public static final String NAME = DataFrameField.TASK_NAME; private final IndexerState state; @@ -39,13 +39,13 @@ public class DataFrameJobState implements Task.Status, PersistentTaskState { @Nullable private final SortedMap currentPosition; - private static final ParseField STATE = new ParseField("job_state"); + private static final ParseField STATE = new ParseField("transform_state"); private static final ParseField CURRENT_POSITION = new ParseField("current_position"); private static final ParseField GENERATION = new ParseField("generation"); @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - args -> new DataFrameJobState((IndexerState) args[0], (HashMap) args[1], (long) args[2])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + args -> new DataFrameTransformState((IndexerState) args[0], (HashMap) args[1], (long) args[2])); static { PARSER.declareField(constructorArg(), p -> { @@ -67,13 +67,13 @@ public class DataFrameJobState implements Task.Status, PersistentTaskState { PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), GENERATION); } - public DataFrameJobState(IndexerState state, @Nullable Map position, long generation) { + public DataFrameTransformState(IndexerState state, @Nullable Map position, long generation) { this.state = state; this.currentPosition = position == null ? null : Collections.unmodifiableSortedMap(new TreeMap<>(position)); this.generation = generation; } - public DataFrameJobState(StreamInput in) throws IOException { + public DataFrameTransformState(StreamInput in) throws IOException { state = IndexerState.fromStream(in); currentPosition = in.readBoolean() ? Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())) : null; generation = in.readLong(); @@ -91,7 +91,7 @@ public long getGeneration() { return generation; } - public static DataFrameJobState fromXContent(XContentParser parser) { + public static DataFrameTransformState fromXContent(XContentParser parser) { try { return PARSER.parse(parser, null); } catch (IOException e) { @@ -136,7 +136,7 @@ public boolean equals(Object other) { return false; } - DataFrameJobState that = (DataFrameJobState) other; + DataFrameTransformState that = (DataFrameTransformState) other; return Objects.equals(this.state, that.state) && Objects.equals(this.currentPosition, that.currentPosition) && this.generation == that.generation; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java index ba3bb3a6808f6..f9b741d335587 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameFeatureSetUsageTests.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStatsTests; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStatsTests; import org.elasticsearch.xpack.core.indexing.IndexerState; import java.util.HashMap; @@ -18,13 +18,14 @@ public class DataFrameFeatureSetUsageTests extends AbstractWireSerializingTestCa @Override protected DataFrameFeatureSetUsage createTestInstance() { - Map jobCountByState = new HashMap<>(); + Map transformCountByState = new HashMap<>(); if (randomBoolean()) { - jobCountByState.put(randomFrom(IndexerState.values()).toString(), randomLong()); + transformCountByState.put(randomFrom(IndexerState.values()).toString(), randomLong()); } - return new DataFrameFeatureSetUsage(randomBoolean(), randomBoolean(), jobCountByState, DataFrameIndexerJobStatsTests.randomStats()); + return new DataFrameFeatureSetUsage(randomBoolean(), randomBoolean(), transformCountByState, + DataFrameIndexerTransformStatsTests.randomStats()); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java index 8cb4a49639d3b..fc67dc8ce64e8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessagesTests.java @@ -17,9 +17,9 @@ public class DataFrameMessagesTests extends ESTestCase { public void testGetMessage_WithFormatStrings() { - String formattedMessage = DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_JOB_WAIT_FOR_COMPLETION_TIMEOUT, "30s", - "my_job"); - assertEquals("Timed out after [30s] while waiting for data frame job [my_job] to stop", formattedMessage); + String formattedMessage = DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT, "30s", + "my_transform"); + assertEquals("Timed out after [30s] while waiting for data frame transform [my_transform] to stop", formattedMessage); } public void testMessageProperFormat() throws IllegalArgumentException, IllegalAccessException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameIndexerTransformStatsTests.java similarity index 54% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStatsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameIndexerTransformStatsTests.java index 0a2006197136d..2a762584b88d6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameIndexerJobStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameIndexerTransformStatsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.dataframe.job; +package org.elasticsearch.xpack.core.dataframe.transform; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; @@ -12,38 +12,38 @@ import java.io.IOException; -public class DataFrameIndexerJobStatsTests extends AbstractSerializingTestCase { +public class DataFrameIndexerTransformStatsTests extends AbstractSerializingTestCase { @Override - protected DataFrameIndexerJobStats createTestInstance() { + protected DataFrameIndexerTransformStats createTestInstance() { return randomStats(); } @Override - protected Writeable.Reader instanceReader() { - return DataFrameIndexerJobStats::new; + protected Writeable.Reader instanceReader() { + return DataFrameIndexerTransformStats::new; } @Override - protected DataFrameIndexerJobStats doParseInstance(XContentParser parser) { - return DataFrameIndexerJobStats.fromXContent(parser); + protected DataFrameIndexerTransformStats doParseInstance(XContentParser parser) { + return DataFrameIndexerTransformStats.fromXContent(parser); } - public static DataFrameIndexerJobStats randomStats() { - return new DataFrameIndexerJobStats(randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), + public static DataFrameIndexerTransformStats randomStats() { + return new DataFrameIndexerTransformStats(randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), - randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L)); + randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L), randomLongBetween(0L, 10000L)); } public void testMerge() throws IOException { - DataFrameIndexerJobStats emptyStats = new DataFrameIndexerJobStats(); - DataFrameIndexerJobStats randomStats = randomStats(); + DataFrameIndexerTransformStats emptyStats = new DataFrameIndexerTransformStats(); + DataFrameIndexerTransformStats randomStats = randomStats(); assertEquals(randomStats, emptyStats.merge(randomStats)); assertEquals(randomStats, randomStats.merge(emptyStats)); - DataFrameIndexerJobStats randomStatsClone = copyInstance(randomStats); + DataFrameIndexerTransformStats randomStatsClone = copyInstance(randomStats); - DataFrameIndexerJobStats trippleRandomStats = new DataFrameIndexerJobStats(3 * randomStats.getNumPages(), + DataFrameIndexerTransformStats trippleRandomStats = new DataFrameIndexerTransformStats(3 * randomStats.getNumPages(), 3 * randomStats.getNumDocuments(), 3 * randomStats.getOutputDocuments(), 3 * randomStats.getNumInvocations(), 3 * randomStats.getIndexTime(), 3 * randomStats.getSearchTime(), 3 * randomStats.getIndexTotal(), 3 * randomStats.getSearchTotal(), 3 * randomStats.getIndexFailures(), 3 * randomStats.getSearchFailures()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameTransformStateTests.java similarity index 59% rename from x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobStateTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameTransformStateTests.java index a6a93e532e927..df5a377d57b02 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/job/DataFrameJobStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/dataframe/transform/DataFrameTransformStateTests.java @@ -4,36 +4,37 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.core.dataframe.job; +package org.elasticsearch.xpack.core.dataframe.transform; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameTransformState; import org.elasticsearch.xpack.core.indexing.IndexerState; import java.io.IOException; import java.util.HashMap; import java.util.Map; -public class DataFrameJobStateTests extends AbstractSerializingTestCase { +public class DataFrameTransformStateTests extends AbstractSerializingTestCase { - public static DataFrameJobState randomDataFrameJobState() { - return new DataFrameJobState(randomFrom(IndexerState.values()), randomPosition(), randomLongBetween(0,10)); + public static DataFrameTransformState randomDataFrameTransformState() { + return new DataFrameTransformState(randomFrom(IndexerState.values()), randomPosition(), randomLongBetween(0,10)); } @Override - protected DataFrameJobState doParseInstance(XContentParser parser) throws IOException { - return DataFrameJobState.fromXContent(parser); + protected DataFrameTransformState doParseInstance(XContentParser parser) throws IOException { + return DataFrameTransformState.fromXContent(parser); } @Override - protected DataFrameJobState createTestInstance() { - return randomDataFrameJobState(); + protected DataFrameTransformState createTestInstance() { + return randomDataFrameTransformState(); } @Override - protected Reader instanceReader() { - return DataFrameJobState::new; + protected Reader instanceReader() { + return DataFrameTransformState::new; } private static Map randomPosition() { diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java index 08c797f63cdcf..780350f2f5b92 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java @@ -15,8 +15,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; import java.io.IOException; @@ -25,28 +25,29 @@ public class DataFrameConfigurationIndexIT extends DataFrameRestTestCase { /** - * Tests the corner case that for some reason a job configuration still exists in the index but + * Tests the corner case that for some reason a transform configuration still exists in the index but * the persistent task disappeared * * test note: {@link DataFrameRestTestCase} checks for an empty index as part of the test case cleanup, * so we do not need to check that the document has been deleted in this place */ public void testDeleteConfigurationLeftOver() throws IOException { - String fakeJobName = randomAlphaOfLengthBetween(5, 20); + String fakeTransformName = randomAlphaOfLengthBetween(5, 20); try (XContentBuilder builder = jsonBuilder()) { builder.startObject(); { - builder.field(DataFrameField.ID.getPreferredName(), fakeJobName); + builder.field(DataFrameField.ID.getPreferredName(), fakeTransformName); } builder.endObject(); final StringEntity entity = new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - Request req = new Request("PUT", DataFrameInternalIndex.INDEX_NAME + "/_doc/" + DataFrameJobConfig.documentId(fakeJobName)); + Request req = new Request("PUT", + DataFrameInternalIndex.INDEX_NAME + "/_doc/" + DataFrameTransformConfig.documentId(fakeTransformName)); req.setEntity(entity); client().performRequest(req); } - Request deleteRequest = new Request("DELETE", DATAFRAME_ENDPOINT + fakeJobName); + Request deleteRequest = new Request("DELETE", DATAFRAME_ENDPOINT + fakeTransformName); Response deleteResponse = client().performRequest(deleteRequest); assertOK(deleteResponse); assertTrue((boolean)XContentMapValues.extractValue("acknowledged", entityAsMap(deleteResponse))); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index bb00c541e11d6..adf8e728c99ec 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -41,18 +41,18 @@ public void createIndexes() throws IOException { } public void testSimplePivot() throws Exception { - String jobId = "simplePivot"; + String transformId = "simplePivot"; String dataFrameIndex = "pivot_reviews"; - createPivotReviewsJob(jobId, dataFrameIndex); + createPivotReviewsTransform(transformId, dataFrameIndex); - // start the job - final Request startJobRequest = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_start"); - Map startJobResponse = entityAsMap(client().performRequest(startJobRequest)); - assertThat(startJobResponse.get("started"), equalTo(Boolean.TRUE)); + // start the transform + final Request startTransformRequest = new Request("POST", DATAFRAME_ENDPOINT + transformId + "/_start"); + Map startTransformResponse = entityAsMap(client().performRequest(startTransformRequest)); + assertThat(startTransformResponse.get("started"), equalTo(Boolean.TRUE)); // wait until the dataframe has been created and all data is available - waitForDataFrameGeneration(jobId); + waitForDataFrameGeneration(transformId); refreshIndex(dataFrameIndex); // we expect 27 documents as there shall be 27 user_id's @@ -67,18 +67,18 @@ public void testSimplePivot() throws Exception { assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); } - private void waitForDataFrameGeneration(String jobId) throws Exception { + private void waitForDataFrameGeneration(String transformId) throws Exception { assertBusy(() -> { - long generation = getDataFrameGeneration(jobId); + long generation = getDataFrameGeneration(transformId); assertEquals(1, generation); }, 30, TimeUnit.SECONDS); } - private static int getDataFrameGeneration(String jobId) throws IOException { - Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + jobId + "/_stats")); + private static int getDataFrameGeneration(String transformId) throws IOException { + Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + transformId + "/_stats")); - Map jobStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("jobs")).get(0); - return (int) XContentMapValues.extractValue("state.generation", jobStatsAsMap); + Map transformStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("transforms")).get(0); + return (int) XContentMapValues.extractValue("state.generation", transformStatsAsMap); } private void refreshIndex(String index) throws IOException { diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 832c3d6917059..a4853289ff11c 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -29,7 +29,7 @@ public abstract class DataFrameRestTestCase extends ESRestTestCase { - protected static final String DATAFRAME_ENDPOINT = DataFrameField.REST_BASE_PATH + "jobs/"; + protected static final String DATAFRAME_ENDPOINT = DataFrameField.REST_BASE_PATH + "transforms/"; /** * Create a simple dataset for testing with reviewers, ratings and businesses @@ -69,7 +69,7 @@ protected void createReviewsIndex() throws IOException { // create index final StringBuilder bulk = new StringBuilder(); for (int i = 0; i < numDocs; i++) { - bulk.append("{\"index\":{\"_index\":\"reviews\",\"_type\":\"_doc\"}}\n"); + bulk.append("{\"index\":{\"_index\":\"reviews\"}}\n"); long user = Math.round(Math.pow(i * 31 % 1000, distributionTable[i % distributionTable.length]) % 27); int stars = distributionTable[(i * 33) % distributionTable.length]; long business = Math.round(Math.pow(user * stars, distributionTable[i % distributionTable.length]) % 13); @@ -101,9 +101,9 @@ protected void createReviewsIndex() throws IOException { client().performRequest(bulkRequest); } - protected void createPivotReviewsJob(String jobId, String dataFrameIndex) throws IOException { - final Request createDataframeJobRequest = new Request("PUT", DATAFRAME_ENDPOINT + jobId); - createDataframeJobRequest.setJsonEntity("{" + protected void createPivotReviewsTransform(String transformId, String dataFrameIndex) throws IOException { + final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); + createDataframeTransformRequest.setJsonEntity("{" + " \"index_pattern\": \"reviews\"," + " \"destination_index\": \"" + dataFrameIndex + "\"," + " \"sources\": {" @@ -118,59 +118,59 @@ protected void createPivotReviewsJob(String jobId, String dataFrameIndex) throws + " \"field\": \"stars\"" + " } } }" + "}"); - Map createDataframeJobResponse = entityAsMap(client().performRequest(createDataframeJobRequest)); - assertThat(createDataframeJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); assertTrue(indexExists(dataFrameIndex)); } @SuppressWarnings("unchecked") - private static List> getDataFrameJobs() throws IOException { + private static List> getDataFrameTransforms() throws IOException { Response response = adminClient().performRequest(new Request("GET", DATAFRAME_ENDPOINT + "_all")); - Map jobs = entityAsMap(response); - List> jobConfigs = (List>) XContentMapValues.extractValue("jobs", jobs); + Map transforms = entityAsMap(response); + List> transformConfigs = (List>) XContentMapValues.extractValue("transforms", transforms); - return jobConfigs == null ? Collections.emptyList() : jobConfigs; + return transformConfigs == null ? Collections.emptyList() : transformConfigs; } - protected static String getDataFrameIndexerState(String jobId) throws IOException { - Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + jobId + "/_stats")); + protected static String getDataFrameIndexerState(String transformId) throws IOException { + Response statsResponse = client().performRequest(new Request("GET", DATAFRAME_ENDPOINT + transformId + "/_stats")); - Map jobStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("jobs")).get(0); - return (String) XContentMapValues.extractValue("state.job_state", jobStatsAsMap); + Map transformStatsAsMap = (Map) ((List) entityAsMap(statsResponse).get("transforms")).get(0); + return (String) XContentMapValues.extractValue("state.transform_state", transformStatsAsMap); } @AfterClass public static void removeIndices() throws Exception { - wipeDataFrameJobs(); + wipeDataFrameTransforms(); waitForPendingDataFrameTasks(); // we might have disabled wiping indices, but now its time to get rid of them // note: can not use super.cleanUpCluster() as this method must be static wipeIndices(); } - protected static void wipeDataFrameJobs() throws IOException, InterruptedException { - List> jobConfigs = getDataFrameJobs(); + protected static void wipeDataFrameTransforms() throws IOException, InterruptedException { + List> transformConfigs = getDataFrameTransforms(); - for (Map jobConfig : jobConfigs) { - String jobId = (String) jobConfig.get("id"); - Request request = new Request("POST", DATAFRAME_ENDPOINT + jobId + "/_stop"); + for (Map transformConfig : transformConfigs) { + String transformId = (String) transformConfig.get("id"); + Request request = new Request("POST", DATAFRAME_ENDPOINT + transformId + "/_stop"); request.addParameter("wait_for_completion", "true"); request.addParameter("timeout", "10s"); request.addParameter("ignore", "404"); adminClient().performRequest(request); - assertEquals("stopped", getDataFrameIndexerState(jobId)); + assertEquals("stopped", getDataFrameIndexerState(transformId)); } - for (Map jobConfig : jobConfigs) { - String jobId = (String) jobConfig.get("id"); - Request request = new Request("DELETE", DATAFRAME_ENDPOINT + jobId); + for (Map transformConfig : transformConfigs) { + String transformId = (String) transformConfig.get("id"); + Request request = new Request("DELETE", DATAFRAME_ENDPOINT + transformId); request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this adminClient().performRequest(request); } - // jobs should be all gone - jobConfigs = getDataFrameJobs(); - assertTrue(jobConfigs.isEmpty()); + // transforms should be all gone + transformConfigs = getDataFrameTransforms(); + assertTrue(transformConfigs.isEmpty()); // the configuration index should be empty Request request = new Request("GET", DataFrameInternalIndex.INDEX_NAME + "/_search"); @@ -180,7 +180,7 @@ protected static void wipeDataFrameJobs() throws IOException, InterruptedExcepti assertEquals(0, XContentMapValues.extractValue("hits.total.value", searchResult)); } catch (ResponseException e) { - // 404 here just means we had no data frame jobs, true for some tests + // 404 here just means we had no data frame transforms, true for some tests if (e.getResponse().getStatusLine().getStatusCode() != 404) { throw e; } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java index 3f5547dbf63e6..abaa2091554a4 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java @@ -41,18 +41,18 @@ public void testUsage() throws IOException { Map usageAsMap = entityAsMap(usageResponse); assertTrue((boolean) XContentMapValues.extractValue("data_frame.available", usageAsMap)); assertTrue((boolean) XContentMapValues.extractValue("data_frame.enabled", usageAsMap)); - // no jobs, no stats - assertEquals(null, XContentMapValues.extractValue("data_frame.jobs", usageAsMap)); + // no transforms, no stats + assertEquals(null, XContentMapValues.extractValue("data_frame.transforms", usageAsMap)); assertEquals(null, XContentMapValues.extractValue("data_frame.stats", usageAsMap)); - // create a job - createPivotReviewsJob("test_usage", "pivot_reviews"); + // create a transform + createPivotReviewsTransform("test_usage", "pivot_reviews"); usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); usageAsMap = entityAsMap(usageResponse); // we should see some stats - assertEquals(1, XContentMapValues.extractValue("data_frame.jobs._all", usageAsMap)); + assertEquals(1, XContentMapValues.extractValue("data_frame.transforms._all", usageAsMap)); assertEquals(0, XContentMapValues.extractValue("data_frame.stats.index_failures", usageAsMap)); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index 2d5ea245ff8df..0a055c00d8098 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -45,30 +45,30 @@ import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameTransformState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; -import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.TransportDeleteDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.TransportGetDataFrameJobsAction; -import org.elasticsearch.xpack.dataframe.action.TransportGetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.dataframe.action.TransportPutDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.TransportStartDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.TransportStopDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobPersistentTasksExecutor; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.TransportDeleteDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.TransportGetDataFrameTransformsAction; +import org.elasticsearch.xpack.dataframe.action.TransportGetDataFrameTransformsStatsAction; +import org.elasticsearch.xpack.dataframe.action.TransportPutDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.TransportStartDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.TransportStopDataFrameTransformAction; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; -import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; -import org.elasticsearch.xpack.dataframe.rest.action.RestDeleteDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameJobsAction; -import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.dataframe.rest.action.RestPutDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.rest.action.RestStartDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.rest.action.RestStopDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; +import org.elasticsearch.xpack.dataframe.rest.action.RestDeleteDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameTransformsAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestGetDataFrameTransformsStatsAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestPutDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestStartDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.rest.action.RestStopDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransform; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformPersistentTasksExecutor; import java.io.IOException; import java.time.Clock; @@ -90,7 +90,7 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu public static final String NAME = "data_frame"; public static final String TASK_THREAD_POOL_NAME = "data_frame_indexing"; - // list of headers that will be stored when a job is created + // list of headers that will be stored when a transform is created public static final Set HEADER_FILTERS = new HashSet<>( Arrays.asList("es-security-runas-user", "_xpack_security_authentication")); @@ -99,7 +99,7 @@ public class DataFrame extends Plugin implements ActionPlugin, PersistentTaskPlu private final boolean enabled; private final Settings settings; private final boolean transportClientMode; - private final SetOnce dataFrameJobConfigManager = new SetOnce<>(); + private final SetOnce dataFrameTransformsConfigManager = new SetOnce<>(); public DataFrame(Settings settings) { this.settings = settings; @@ -132,12 +132,12 @@ public List getRestHandlers(final Settings settings, final RestCont } return Arrays.asList( - new RestPutDataFrameJobAction(settings, restController), - new RestStartDataFrameJobAction(settings, restController), - new RestStopDataFrameJobAction(settings, restController), - new RestDeleteDataFrameJobAction(settings, restController), - new RestGetDataFrameJobsAction(settings, restController), - new RestGetDataFrameJobsStatsAction(settings, restController) + new RestPutDataFrameTransformAction(settings, restController), + new RestStartDataFrameTransformAction(settings, restController), + new RestStopDataFrameTransformAction(settings, restController), + new RestDeleteDataFrameTransformAction(settings, restController), + new RestGetDataFrameTransformsAction(settings, restController), + new RestGetDataFrameTransformsStatsAction(settings, restController) ); } @@ -148,12 +148,12 @@ public List getRestHandlers(final Settings settings, final RestCont } return Arrays.asList( - new ActionHandler<>(PutDataFrameJobAction.INSTANCE, TransportPutDataFrameJobAction.class), - new ActionHandler<>(StartDataFrameJobAction.INSTANCE, TransportStartDataFrameJobAction.class), - new ActionHandler<>(StopDataFrameJobAction.INSTANCE, TransportStopDataFrameJobAction.class), - new ActionHandler<>(DeleteDataFrameJobAction.INSTANCE, TransportDeleteDataFrameJobAction.class), - new ActionHandler<>(GetDataFrameJobsAction.INSTANCE, TransportGetDataFrameJobsAction.class), - new ActionHandler<>(GetDataFrameJobsStatsAction.INSTANCE, TransportGetDataFrameJobsStatsAction.class) + new ActionHandler<>(PutDataFrameTransformAction.INSTANCE, TransportPutDataFrameTransformAction.class), + new ActionHandler<>(StartDataFrameTransformAction.INSTANCE, TransportStartDataFrameTransformAction.class), + new ActionHandler<>(StopDataFrameTransformAction.INSTANCE, TransportStopDataFrameTransformAction.class), + new ActionHandler<>(DeleteDataFrameTransformAction.INSTANCE, TransportDeleteDataFrameTransformAction.class), + new ActionHandler<>(GetDataFrameTransformsAction.INSTANCE, TransportGetDataFrameTransformsAction.class), + new ActionHandler<>(GetDataFrameTransformsStatsAction.INSTANCE, TransportGetDataFrameTransformsStatsAction.class) ); } @@ -177,9 +177,9 @@ public Collection createComponents(Client client, ClusterService cluster return emptyList(); } - dataFrameJobConfigManager.set(new DataFrameJobConfigManager(client, xContentRegistry)); + dataFrameTransformsConfigManager.set(new DataFrameTransformsConfigManager(client, xContentRegistry)); - return Collections.singletonList(dataFrameJobConfigManager.get()); + return Collections.singletonList(dataFrameTransformsConfigManager.get()); } @Override @@ -203,10 +203,10 @@ public List> getPersistentTasksExecutor(ClusterServic SchedulerEngine schedulerEngine = new SchedulerEngine(settings, Clock.systemUTC()); - // the job config manager should have been created - assert dataFrameJobConfigManager.get() != null; + // the transforms config manager should have been created + assert dataFrameTransformsConfigManager.get() != null; return Collections.singletonList( - new DataFrameJobPersistentTasksExecutor(client, dataFrameJobConfigManager.get(), schedulerEngine, threadPool)); + new DataFrameTransformPersistentTasksExecutor(client, dataFrameTransformsConfigManager.get(), schedulerEngine, threadPool)); } @Override @@ -216,11 +216,11 @@ public List getNamedXContent() { } return Arrays.asList( new NamedXContentRegistry.Entry(PersistentTaskParams.class, new ParseField(DataFrameField.TASK_NAME), - DataFrameJob::fromXContent), - new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(DataFrameJobState.NAME), - DataFrameJobState::fromXContent), - new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(DataFrameJobState.NAME), - DataFrameJobState::fromXContent) + DataFrameTransform::fromXContent), + new NamedXContentRegistry.Entry(Task.Status.class, new ParseField(DataFrameTransformState.NAME), + DataFrameTransformState::fromXContent), + new NamedXContentRegistry.Entry(PersistentTaskState.class, new ParseField(DataFrameTransformState.NAME), + DataFrameTransformState::fromXContent) ); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java index a213e6a4a3d6f..9be80024975d8 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSet.java @@ -17,8 +17,8 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.dataframe.DataFrameFeatureSetUsage; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction; import java.util.Collections; import java.util.HashMap; @@ -67,22 +67,22 @@ public Map nativeCodeInfo() { public void usage(ActionListener listener) { if (enabled == false) { listener.onResponse( - new DataFrameFeatureSetUsage(available(), enabled(), Collections.emptyMap(), new DataFrameIndexerJobStats())); + new DataFrameFeatureSetUsage(available(), enabled(), Collections.emptyMap(), new DataFrameIndexerTransformStats())); return; } - GetDataFrameJobsStatsAction.Request jobStatsRequest = new GetDataFrameJobsStatsAction.Request(MetaData.ALL); + GetDataFrameTransformsStatsAction.Request transformStatsRequest = new GetDataFrameTransformsStatsAction.Request(MetaData.ALL); - client.execute(GetDataFrameJobsStatsAction.INSTANCE, jobStatsRequest, ActionListener.wrap(jobStatsResponse -> { - Map jobCountByState = new HashMap<>(); - DataFrameIndexerJobStats accumulatedStats = new DataFrameIndexerJobStats(); + client.execute(GetDataFrameTransformsStatsAction.INSTANCE, transformStatsRequest, ActionListener.wrap(transformStatsResponse -> { + Map transformsCountByState = new HashMap<>(); + DataFrameIndexerTransformStats accumulatedStats = new DataFrameIndexerTransformStats(); - jobStatsResponse.getJobsStateAndStats().stream().forEach(singleResult -> { - jobCountByState.merge(singleResult.getJobState().getIndexerState().value(), 1L, Long::sum); - accumulatedStats.merge(singleResult.getJobStats()); + transformStatsResponse.getTransformsStateAndStats().stream().forEach(singleResult -> { + transformsCountByState.merge(singleResult.getTransformState().getIndexerState().value(), 1L, Long::sum); + accumulatedStats.merge(singleResult.getTransformStats()); }); - listener.onResponse(new DataFrameFeatureSetUsage(available(), enabled(), jobCountByState, accumulatedStats)); + listener.onResponse(new DataFrameFeatureSetUsage(available(), enabled(), transformsCountByState, accumulatedStats)); }, listener::onFailure)); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStats.java similarity index 53% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStats.java index 99471ec8fc9ce..1b8a7dfbd6805 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStats.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStats.java @@ -14,49 +14,49 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameTransformState; import java.io.IOException; import java.util.Objects; -public class DataFrameJobStateAndStats implements Writeable, ToXContentObject { +public class DataFrameTransformStateAndStats implements Writeable, ToXContentObject { public static final ParseField STATE_FIELD = new ParseField("state"); private final String id; - private final DataFrameJobState jobState; - private final DataFrameIndexerJobStats jobStats; + private final DataFrameTransformState transformState; + private final DataFrameIndexerTransformStats transformStats; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - GetDataFrameJobsAction.NAME, - a -> new DataFrameJobStateAndStats((String) a[0], (DataFrameJobState) a[1], (DataFrameIndexerJobStats) a[2])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + GetDataFrameTransformsAction.NAME, + a -> new DataFrameTransformStateAndStats((String) a[0], (DataFrameTransformState) a[1], (DataFrameIndexerTransformStats) a[2])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameField.ID); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataFrameJobState.PARSER::apply, STATE_FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerJobStats.fromXContent(p), + PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataFrameTransformState.PARSER::apply, STATE_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> DataFrameIndexerTransformStats.fromXContent(p), DataFrameField.STATS_FIELD); } - public DataFrameJobStateAndStats(String id, DataFrameJobState state, DataFrameIndexerJobStats stats) { + public DataFrameTransformStateAndStats(String id, DataFrameTransformState state, DataFrameIndexerTransformStats stats) { this.id = Objects.requireNonNull(id); - this.jobState = Objects.requireNonNull(state); - this.jobStats = Objects.requireNonNull(stats); + this.transformState = Objects.requireNonNull(state); + this.transformStats = Objects.requireNonNull(stats); } - public DataFrameJobStateAndStats(StreamInput in) throws IOException { + public DataFrameTransformStateAndStats(StreamInput in) throws IOException { this.id = in.readString(); - this.jobState = new DataFrameJobState(in); - this.jobStats = new DataFrameIndexerJobStats(in); + this.transformState = new DataFrameTransformState(in); + this.transformStats = new DataFrameIndexerTransformStats(in); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(DataFrameField.ID.getPreferredName(), id); - builder.field(STATE_FIELD.getPreferredName(), jobState); - builder.field(DataFrameField.STATS_FIELD.getPreferredName(), jobStats); + builder.field(STATE_FIELD.getPreferredName(), transformState); + builder.field(DataFrameField.STATS_FIELD.getPreferredName(), transformStats); builder.endObject(); return builder; } @@ -64,13 +64,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); - jobState.writeTo(out); - jobStats.writeTo(out); + transformState.writeTo(out); + transformStats.writeTo(out); } @Override public int hashCode() { - return Objects.hash(id, jobState, jobStats); + return Objects.hash(id, transformState, transformStats); } @Override @@ -83,21 +83,21 @@ public boolean equals(Object other) { return false; } - DataFrameJobStateAndStats that = (DataFrameJobStateAndStats) other; + DataFrameTransformStateAndStats that = (DataFrameTransformStateAndStats) other; - return Objects.equals(this.id, that.id) && Objects.equals(this.jobState, that.jobState) - && Objects.equals(this.jobStats, that.jobStats); + return Objects.equals(this.id, that.id) && Objects.equals(this.transformState, that.transformState) + && Objects.equals(this.transformStats, that.transformStats); } public String getId() { return id; } - public DataFrameIndexerJobStats getJobStats() { - return jobStats; + public DataFrameIndexerTransformStats getTransformStats() { + return transformStats; } - public DataFrameJobState getJobState() { - return jobState; + public DataFrameTransformState getTransformState() { + return transformState; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameTransformAction.java similarity index 89% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameTransformAction.java index 1aec0746ef2a9..06a8c5cf47d07 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameTransformAction.java @@ -28,12 +28,12 @@ import java.util.List; import java.util.Objects; -public class DeleteDataFrameJobAction extends Action { +public class DeleteDataFrameTransformAction extends Action { - public static final DeleteDataFrameJobAction INSTANCE = new DeleteDataFrameJobAction(); + public static final DeleteDataFrameTransformAction INSTANCE = new DeleteDataFrameTransformAction(); public static final String NAME = "cluster:admin/data_frame/delete"; - private DeleteDataFrameJobAction() { + private DeleteDataFrameTransformAction() { super(NAME); } @@ -102,10 +102,11 @@ public boolean equals(Object obj) { } } - public static class RequestBuilder extends ActionRequestBuilder { + public static class RequestBuilder + extends ActionRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, DeleteDataFrameJobAction action) { - super(client, action, new DeleteDataFrameJobAction.Request()); + protected RequestBuilder(ElasticsearchClient client, DeleteDataFrameTransformAction action) { + super(client, action, new DeleteDataFrameTransformAction.Request()); } } @@ -162,7 +163,7 @@ public boolean equals(Object o) { return true; if (o == null || getClass() != o.getClass()) return false; - DeleteDataFrameJobAction.Response response = (DeleteDataFrameJobAction.Response) o; + DeleteDataFrameTransformAction.Response response = (DeleteDataFrameTransformAction.Response) o; return super.equals(o) && acknowledged == response.acknowledged; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java similarity index 78% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java index d783f9b501f14..ce4935fdc6a3f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java @@ -24,19 +24,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Objects; -public class GetDataFrameJobsAction extends Action{ +public class GetDataFrameTransformsAction extends Action{ - public static final GetDataFrameJobsAction INSTANCE = new GetDataFrameJobsAction(); + public static final GetDataFrameTransformsAction INSTANCE = new GetDataFrameTransformsAction(); public static final String NAME = "cluster:monitor/data_frame/get"; - private GetDataFrameJobsAction() { + private GetDataFrameTransformsAction() { super(NAME); } @@ -65,7 +65,7 @@ public Request(StreamInput in) throws IOException { @Override public boolean match(Task task) { - // If we are retrieving all the jobs, the task description does not contain the id + // If we are retrieving all the transforms, the task description does not contain the id if (id.equals(MetaData.ALL)) { return task.getDescription().startsWith(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX); } @@ -114,24 +114,24 @@ public boolean equals(Object obj) { public static class RequestBuilder extends ActionRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, GetDataFrameJobsAction action) { + protected RequestBuilder(ElasticsearchClient client, GetDataFrameTransformsAction action) { super(client, action, new Request()); } } public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { - private List jobConfigurations; + private List transformConfigurations; - public Response(List jobConfigs) { + public Response(List transformConfigs) { super(Collections.emptyList(), Collections.emptyList()); - this.jobConfigurations = jobConfigs; + this.transformConfigurations = transformConfigs; } - public Response(List jobConfigs, List taskFailures, + public Response(List transformConfigs, List taskFailures, List nodeFailures) { super(taskFailures, nodeFailures); - this.jobConfigurations = jobConfigs; + this.transformConfigurations = transformConfigs; } public Response() { @@ -143,31 +143,31 @@ public Response(StreamInput in) throws IOException { readFrom(in); } - public List getJobConfigurations() { - return jobConfigurations; + public List getTransformConfigurations() { + return transformConfigurations; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - jobConfigurations = in.readList(DataFrameJobConfig::new); + transformConfigurations = in.readList(DataFrameTransformConfig::new); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeList(jobConfigurations); + out.writeList(transformConfigurations); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(DataFrameField.COUNT.getPreferredName(), jobConfigurations.size()); + builder.field(DataFrameField.COUNT.getPreferredName(), transformConfigurations.size()); // XContentBuilder does not support passing the params object for Iterables - builder.field(DataFrameField.JOBS.getPreferredName()); + builder.field(DataFrameField.TRANSFORMS.getPreferredName()); builder.startArray(); - for (DataFrameJobConfig jobResponse : jobConfigurations) { - jobResponse.toXContent(builder, params); + for (DataFrameTransformConfig configResponse : transformConfigurations) { + configResponse.toXContent(builder, params); } builder.endArray(); builder.endObject(); @@ -176,7 +176,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash(jobConfigurations); + return Objects.hash(transformConfigurations); } @Override @@ -190,7 +190,7 @@ public boolean equals(Object other) { } final Response that = (Response) other; - return Objects.equals(this.jobConfigurations, that.jobConfigurations); + return Objects.equals(this.transformConfigurations, that.transformConfigurations); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsStatsAction.java similarity index 78% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsStatsAction.java index 7a0cac7feafd1..0dff8923dfeaa 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsStatsAction.java @@ -19,9 +19,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.dataframe.DataFrameField; @@ -30,11 +30,11 @@ import java.util.List; import java.util.Objects; -public class GetDataFrameJobsStatsAction extends Action { +public class GetDataFrameTransformsStatsAction extends Action { - public static final GetDataFrameJobsStatsAction INSTANCE = new GetDataFrameJobsStatsAction(); + public static final GetDataFrameTransformsStatsAction INSTANCE = new GetDataFrameTransformsStatsAction(); public static final String NAME = "cluster:monitor/data_frame_stats/get"; - public GetDataFrameJobsStatsAction() { + public GetDataFrameTransformsStatsAction() { super(NAME); } @@ -63,7 +63,7 @@ public Request(StreamInput in) throws IOException { @Override public boolean match(Task task) { - // If we are retrieving all the jobs, the task description does not contain the id + // If we are retrieving all the transforms, the task description does not contain the id if (id.equals(MetaData.ALL)) { return task.getDescription().startsWith(DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX); } @@ -112,28 +112,28 @@ public boolean equals(Object obj) { public static class RequestBuilder extends ActionRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, GetDataFrameJobsStatsAction action) { + protected RequestBuilder(ElasticsearchClient client, GetDataFrameTransformsStatsAction action) { super(client, action, new Request()); } } public static class Response extends BaseTasksResponse implements Writeable, ToXContentObject { - private List jobsStateAndStats; + private List transformsStateAndStats; - public Response(List jobsStateAndStats) { + public Response(List transformsStateAndStats) { super(Collections.emptyList(), Collections.emptyList()); - this.jobsStateAndStats = jobsStateAndStats; + this.transformsStateAndStats = transformsStateAndStats; } - public Response(List jobsStateAndStats, List taskFailures, + public Response(List transformsStateAndStats, List taskFailures, List nodeFailures) { super(taskFailures, nodeFailures); - this.jobsStateAndStats = jobsStateAndStats; + this.transformsStateAndStats = transformsStateAndStats; } public Response() { super(Collections.emptyList(), Collections.emptyList()); - this.jobsStateAndStats = Collections.emptyList(); + this.transformsStateAndStats = Collections.emptyList(); } public Response(StreamInput in) throws IOException { @@ -141,34 +141,34 @@ public Response(StreamInput in) throws IOException { readFrom(in); } - public List getJobsStateAndStats() { - return jobsStateAndStats; + public List getTransformsStateAndStats() { + return transformsStateAndStats; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - jobsStateAndStats = in.readList(DataFrameJobStateAndStats::new); + transformsStateAndStats = in.readList(DataFrameTransformStateAndStats::new); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeList(jobsStateAndStats); + out.writeList(transformsStateAndStats); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(DataFrameField.COUNT.getPreferredName(), jobsStateAndStats.size()); - builder.field(DataFrameField.JOBS.getPreferredName(), jobsStateAndStats); + builder.field(DataFrameField.COUNT.getPreferredName(), transformsStateAndStats.size()); + builder.field(DataFrameField.TRANSFORMS.getPreferredName(), transformsStateAndStats); builder.endObject(); return builder; } @Override public int hashCode() { - return Objects.hash(jobsStateAndStats); + return Objects.hash(transformsStateAndStats); } @Override @@ -182,7 +182,7 @@ public boolean equals(Object other) { } final Response that = (Response) other; - return Objects.equals(this.jobsStateAndStats, that.jobsStateAndStats); + return Objects.equals(this.transformsStateAndStats, that.transformsStateAndStats); } @Override diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformAction.java similarity index 81% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformAction.java index 946522fe9a199..89cc6a4c7ef99 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformAction.java @@ -17,17 +17,17 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; import java.io.IOException; import java.util.Objects; -public class PutDataFrameJobAction extends Action { +public class PutDataFrameTransformAction extends Action { - public static final PutDataFrameJobAction INSTANCE = new PutDataFrameJobAction(); + public static final PutDataFrameTransformAction INSTANCE = new PutDataFrameTransformAction(); public static final String NAME = "cluster:admin/data_frame/put"; - private PutDataFrameJobAction() { + private PutDataFrameTransformAction() { super(NAME); } @@ -38,9 +38,9 @@ public Response newResponse() { public static class Request extends AcknowledgedRequest implements ToXContentObject { - private DataFrameJobConfig config; + private DataFrameTransformConfig config; - public Request(DataFrameJobConfig config) { + public Request(DataFrameTransformConfig config) { this.setConfig(config); } @@ -49,7 +49,7 @@ public Request() { } public static Request fromXContent(final XContentParser parser, final String id) throws IOException { - return new Request(DataFrameJobConfig.fromXContent(parser, id)); + return new Request(DataFrameTransformConfig.fromXContent(parser, id)); } @Override @@ -62,18 +62,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return this.config.toXContent(builder, params); } - public DataFrameJobConfig getConfig() { + public DataFrameTransformConfig getConfig() { return config; } - public void setConfig(DataFrameJobConfig config) { + public void setConfig(DataFrameTransformConfig config) { this.config = config; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - this.config = new DataFrameJobConfig(in); + this.config = new DataFrameTransformConfig(in); } @Override @@ -102,7 +102,7 @@ public boolean equals(Object obj) { public static class RequestBuilder extends MasterNodeOperationRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, PutDataFrameJobAction action) { + protected RequestBuilder(ElasticsearchClient client, PutDataFrameTransformAction action) { super(client, action, new Request()); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameTransformAction.java similarity index 94% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameTransformAction.java index e8013b4798892..9a2e227f9e083 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameTransformAction.java @@ -25,12 +25,12 @@ import java.util.Collections; import java.util.Objects; -public class StartDataFrameJobAction extends Action { +public class StartDataFrameTransformAction extends Action { - public static final StartDataFrameJobAction INSTANCE = new StartDataFrameJobAction(); + public static final StartDataFrameTransformAction INSTANCE = new StartDataFrameTransformAction(); public static final String NAME = "cluster:admin/data_frame/start"; - private StartDataFrameJobAction() { + private StartDataFrameTransformAction() { super(NAME); } @@ -95,7 +95,7 @@ public boolean equals(Object obj) { public static class RequestBuilder extends ActionRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, StartDataFrameJobAction action) { + protected RequestBuilder(ElasticsearchClient client, StartDataFrameTransformAction action) { super(client, action, new Request()); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameTransformAction.java similarity index 95% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameTransformAction.java index b31bde4f843c6..9fe0a75a120d1 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameTransformAction.java @@ -28,14 +28,14 @@ import java.util.Objects; import java.util.concurrent.TimeUnit; -public class StopDataFrameJobAction extends Action { +public class StopDataFrameTransformAction extends Action { - public static final StopDataFrameJobAction INSTANCE = new StopDataFrameJobAction(); + public static final StopDataFrameTransformAction INSTANCE = new StopDataFrameTransformAction(); public static final String NAME = "cluster:admin/data_frame/stop"; public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); - private StopDataFrameJobAction() { + private StopDataFrameTransformAction() { super(NAME); } @@ -135,7 +135,7 @@ public boolean match(Task task) { public static class RequestBuilder extends ActionRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, StopDataFrameJobAction action) { + protected RequestBuilder(ElasticsearchClient client, StopDataFrameTransformAction action) { super(client, action, new Request()); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java similarity index 65% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java index e1658e8b9f690..fcdccc5f7b8c7 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportDeleteDataFrameTransformAction.java @@ -23,24 +23,24 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.indexing.IndexerState; -import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Request; -import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Response; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; -import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameTransformAction.Request; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameTransformAction.Response; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformTask; import java.util.List; -public class TransportDeleteDataFrameJobAction extends TransportTasksAction { +public class TransportDeleteDataFrameTransformAction extends TransportTasksAction { - private final DataFrameJobConfigManager jobConfigManager; + private final DataFrameTransformsConfigManager transformsConfigManager; @Inject - public TransportDeleteDataFrameJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, + public TransportDeleteDataFrameTransformAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, PersistentTasksService persistentTasksService, - ClusterService clusterService, DataFrameJobConfigManager jobConfigManager) { - super(DeleteDataFrameJobAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, Response::new, - ThreadPool.Names.SAME); - this.jobConfigManager = jobConfigManager; + ClusterService clusterService, DataFrameTransformsConfigManager transformsConfigManager) { + super(DeleteDataFrameTransformAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, + Response::new, ThreadPool.Names.SAME); + this.transformsConfigManager = transformsConfigManager; } @Override @@ -53,17 +53,17 @@ protected Response newResponse(Request request, List tasks, List listener) { - assert task.getJobId().equals(request.getId()); + protected void taskOperation(Request request, DataFrameTransformTask task, ActionListener listener) { + assert task.getTransformId().equals(request.getId()); IndexerState state = task.getState().getIndexerState(); if (state.equals(IndexerState.STOPPED)) { task.onCancelled(); - jobConfigManager.deleteJobConfiguration(request.getId(), ActionListener.wrap(r -> { + transformsConfigManager.deleteTransformConfiguration(request.getId(), ActionListener.wrap(r -> { listener.onResponse(new Response(true)); }, listener::onFailure)); } else { - listener.onFailure(new IllegalStateException("Could not delete job [" + request.getId() + "] because " + "indexer state is [" - + state + "]. Job must be [" + IndexerState.STOPPED + "] before deletion.")); + listener.onFailure(new IllegalStateException("Could not delete transform [" + request.getId() + "] because " + + "indexer state is [" + state + "]. Transform must be [" + IndexerState.STOPPED + "] before deletion.")); } } @@ -76,17 +76,17 @@ protected void doExecute(Task task, Request request, ActionListener li if (pTasksMeta != null && pTasksMeta.getTask(request.getId()) != null) { super.doExecute(task, request, listener); } else { - // we couldn't find the job in the persistent task CS, but maybe the job exists in the configuration index, + // we couldn't find the transform in the persistent task CS, but maybe the transform exists in the configuration index, // if so delete the orphaned document and do not throw (for the normal case we want to stop the task first, - // than delete the configuration document if and only if the data frame job is in stopped state) - jobConfigManager.deleteJobConfiguration(request.getId(), ActionListener.wrap(r -> { + // than delete the configuration document if and only if the data frame transform is in stopped state) + transformsConfigManager.deleteTransformConfiguration(request.getId(), ActionListener.wrap(r -> { listener.onResponse(new Response(true)); return; }, listener::onFailure)); } } else { - // Delegates DeleteJob to elected master node, so it becomes the coordinating node. - // Non-master nodes may have a stale cluster state that shows jobs which are cancelled + // Delegates DeleteTransform to elected master node, so it becomes the coordinating node. + // Non-master nodes may have a stale cluster state that shows transforms which are cancelled // on the master, which makes testing difficult. if (nodes.getMasterNode() == null) { listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsAction.java similarity index 58% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsAction.java index 2d85c04163688..2e9e7f9498123 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsAction.java @@ -21,49 +21,48 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Request; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Response; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; -import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsAction.Response; import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformTask; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -public class TransportGetDataFrameJobsAction extends - TransportTasksAction { +public class TransportGetDataFrameTransformsAction extends + TransportTasksAction { - private final DataFrameJobConfigManager jobConfigManager; + private final DataFrameTransformsConfigManager transformsConfigManager; @Inject - public TransportGetDataFrameJobsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, - DataFrameJobConfigManager jobConfigManager) { - super(GetDataFrameJobsAction.NAME, clusterService, transportService, actionFilters, GetDataFrameJobsAction.Request::new, - GetDataFrameJobsAction.Response::new, GetDataFrameJobsAction.Response::new, ThreadPool.Names.SAME); - this.jobConfigManager = jobConfigManager; + public TransportGetDataFrameTransformsAction(TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService, DataFrameTransformsConfigManager transformsConfigManager) { + super(GetDataFrameTransformsAction.NAME, clusterService, transportService, actionFilters, GetDataFrameTransformsAction.Request::new, + GetDataFrameTransformsAction.Response::new, GetDataFrameTransformsAction.Response::new, ThreadPool.Names.SAME); + this.transformsConfigManager = transformsConfigManager; } @Override protected Response newResponse(Request request, List tasks, List taskOperationFailures, List failedNodeExceptions) { - List configs = tasks.stream().map(GetDataFrameJobsAction.Response::getJobConfigurations) + List configs = tasks.stream().map(GetDataFrameTransformsAction.Response::getTransformConfigurations) .flatMap(Collection::stream).collect(Collectors.toList()); return new Response(configs, taskOperationFailures, failedNodeExceptions); } @Override - protected void taskOperation(Request request, DataFrameJobTask task, ActionListener listener) { - assert task.getJobId().equals(request.getId()) || request.getId().equals(MetaData.ALL); - // Little extra insurance, make sure we only return jobs that aren't - // cancelled + protected void taskOperation(Request request, DataFrameTransformTask task, ActionListener listener) { + assert task.getTransformId().equals(request.getId()) || request.getId().equals(MetaData.ALL); + // Little extra insurance, make sure we only return transforms that aren't cancelled if (task.isCancelled() == false) { - jobConfigManager.getJobConfiguration(task.getJobId(), ActionListener.wrap(config -> { + transformsConfigManager.getTransformConfiguration(task.getTransformId(), ActionListener.wrap(config -> { listener.onResponse(new Response(Collections.singletonList(config))); }, e -> { listener.onFailure(new RuntimeException("failed to retrieve...", e)); @@ -79,17 +78,17 @@ protected void doExecute(Task task, Request request, ActionListener li final DiscoveryNodes nodes = state.nodes(); if (nodes.isLocalNodeElectedMaster()) { - if (DataFramePersistentTaskUtils.stateHasDataFrameJobs(request.getId(), state)) { + if (DataFramePersistentTaskUtils.stateHasDataFrameTransforms(request.getId(), state)) { super.doExecute(task, request, listener); } else { - // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this GET + // If we couldn't find the transform in the persistent task CS, it means it was deleted prior to this GET // and we can just send an empty response, no need to go looking for the allocated task listener.onResponse(new Response(Collections.emptyList())); } } else { - // Delegates GetJobs to elected master node, so it becomes the coordinating node. - // Non-master nodes may have a stale cluster state that shows jobs which are cancelled + // Delegates GetTransforms to elected master node, so it becomes the coordinating node. + // Non-master nodes may have a stale cluster state that shows transforms which are cancelled // on the master, which makes testing difficult. if (nodes.getMasterNode() == null) { listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java similarity index 56% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java index 6a64514437f9f..384854ed240fe 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportGetDataFrameTransformsStatsAction.java @@ -21,51 +21,52 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Request; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Response; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction.Response; import org.elasticsearch.xpack.dataframe.persistence.DataFramePersistentTaskUtils; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformTask; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; -public class TransportGetDataFrameJobsStatsAction extends - TransportTasksAction { +public class TransportGetDataFrameTransformsStatsAction extends + TransportTasksAction { @Inject - public TransportGetDataFrameJobsStatsAction(TransportService transportService, ActionFilters actionFilters, + public TransportGetDataFrameTransformsStatsAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(GetDataFrameJobsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, Response::new, - ThreadPool.Names.SAME); + super(GetDataFrameTransformsStatsAction.NAME, clusterService, transportService, actionFilters, Request::new, Response::new, + Response::new, ThreadPool.Names.SAME); } @Override protected Response newResponse(Request request, List tasks, List taskOperationFailures, List failedNodeExceptions) { - List responses = tasks.stream().map(GetDataFrameJobsStatsAction.Response::getJobsStateAndStats) - .flatMap(Collection::stream).collect(Collectors.toList()); + List responses = tasks.stream() + .map(GetDataFrameTransformsStatsAction.Response::getTransformsStateAndStats).flatMap(Collection::stream) + .collect(Collectors.toList()); return new Response(responses, taskOperationFailures, failedNodeExceptions); } @Override - protected void taskOperation(Request request, DataFrameJobTask task, ActionListener listener) { - List jobsStateAndStats = Collections.emptyList(); + protected void taskOperation(Request request, DataFrameTransformTask task, ActionListener listener) { + List transformsStateAndStats = Collections.emptyList(); - assert task.getJobId().equals(request.getId()) || request.getId().equals(MetaData.ALL); + assert task.getTransformId().equals(request.getId()) || request.getId().equals(MetaData.ALL); - // Little extra insurance, make sure we only return jobs that aren't cancelled + // Little extra insurance, make sure we only return transforms that aren't cancelled if (task.isCancelled() == false) { - DataFrameJobStateAndStats jobStateAndStats = new DataFrameJobStateAndStats(task.getJobId(), task.getState(), - task.getStats()); - jobsStateAndStats = Collections.singletonList(jobStateAndStats); + DataFrameTransformStateAndStats transformStateAndStats = new DataFrameTransformStateAndStats(task.getTransformId(), + task.getState(), task.getStats()); + transformsStateAndStats = Collections.singletonList(transformStateAndStats); } - listener.onResponse(new Response(jobsStateAndStats)); + listener.onResponse(new Response(transformsStateAndStats)); } @Override @@ -74,17 +75,17 @@ protected void doExecute(Task task, Request request, ActionListener li final DiscoveryNodes nodes = state.nodes(); if (nodes.isLocalNodeElectedMaster()) { - if (DataFramePersistentTaskUtils.stateHasDataFrameJobs(request.getId(), state)) { + if (DataFramePersistentTaskUtils.stateHasDataFrameTransforms(request.getId(), state)) { super.doExecute(task, request, listener); } else { - // If we couldn't find the job in the persistent task CS, it means it was deleted prior to this GET + // If we couldn't find the transform in the persistent task CS, it means it was deleted prior to this GET // and we can just send an empty response, no need to go looking for the allocated task listener.onResponse(new Response(Collections.emptyList())); } } else { - // Delegates GetJobs to elected master node, so it becomes the coordinating node. - // Non-master nodes may have a stale cluster state that shows jobs which are cancelled + // Delegates GetTransforms to elected master node, so it becomes the coordinating node. + // Non-master nodes may have a stale cluster state that shows transforms which are cancelled // on the master, which makes testing difficult. if (nodes.getMasterNode() == null) { listener.onFailure(new MasterNotDiscoveredException("no known master nodes")); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java similarity index 59% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java index 485696bf8fe4d..4836205c38542 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportPutDataFrameTransformAction.java @@ -28,33 +28,34 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Request; -import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Response; -import org.elasticsearch.xpack.dataframe.job.DataFrameJob; -import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameTransformAction.Request; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameTransformAction.Response; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import org.elasticsearch.xpack.dataframe.persistence.DataframeIndex; -import org.elasticsearch.xpack.dataframe.support.JobValidator; +import org.elasticsearch.xpack.dataframe.support.TransformValidator; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransform; -public class TransportPutDataFrameJobAction - extends TransportMasterNodeAction { +public class TransportPutDataFrameTransformAction + extends TransportMasterNodeAction { - private static final Logger logger = LogManager.getLogger(TransportPutDataFrameJobAction.class); + private static final Logger logger = LogManager.getLogger(TransportPutDataFrameTransformAction.class); private final XPackLicenseState licenseState; private final PersistentTasksService persistentTasksService; private final Client client; - private final DataFrameJobConfigManager dataFrameJobConfigManager; + private final DataFrameTransformsConfigManager dataFrameTransformsConfigManager; @Inject - public TransportPutDataFrameJobAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, + public TransportPutDataFrameTransformAction(TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, XPackLicenseState licenseState, - PersistentTasksService persistentTasksService, DataFrameJobConfigManager dataFrameJobConfigManager, Client client) { - super(PutDataFrameJobAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, PutDataFrameJobAction.Request::new); + PersistentTasksService persistentTasksService, DataFrameTransformsConfigManager dataFrameTransformsConfigManager, + Client client) { + super(PutDataFrameTransformAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, + PutDataFrameTransformAction.Request::new); this.licenseState = licenseState; this.persistentTasksService = persistentTasksService; this.client = client; - this.dataFrameJobConfigManager = dataFrameJobConfigManager; + this.dataFrameTransformsConfigManager = dataFrameTransformsConfigManager; } @Override @@ -63,8 +64,8 @@ protected String executor() { } @Override - protected PutDataFrameJobAction.Response newResponse() { - return new PutDataFrameJobAction.Response(); + protected PutDataFrameTransformAction.Response newResponse() { + return new PutDataFrameTransformAction.Response(); } @Override @@ -77,39 +78,41 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); - String jobId = request.getConfig().getId(); - // quick check whether a job has already been created under that name - if (PersistentTasksCustomMetaData.getTaskWithId(clusterState, jobId) != null) { + String transformId = request.getConfig().getId(); + // quick check whether a transform has already been created under that name + if (PersistentTasksCustomMetaData.getTaskWithId(clusterState, transformId) != null) { listener.onFailure(new ResourceAlreadyExistsException( - DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_JOB_EXISTS, jobId))); + DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_TRANSFORM_EXISTS, transformId))); return; } - // create the job, note the non-state creating steps are done first, so we minimize the chance to end up with orphaned state - // job validation - JobValidator jobCreator = new JobValidator(request.getConfig(), client); - jobCreator.validate(ActionListener.wrap(validationResult -> { + // create the transform, note the non-state creating steps are done first, so we minimize the chance to end up with orphaned state + // transform validation + TransformValidator transformValidator = new TransformValidator(request.getConfig(), client); + transformValidator.validate(ActionListener.wrap(validationResult -> { // deduce target mappings - jobCreator.deduceMappings(ActionListener.wrap(mappings -> { + transformValidator.deduceMappings(ActionListener.wrap(mappings -> { // create the destination index DataframeIndex.createDestinationIndex(client, request.getConfig(), mappings, ActionListener.wrap(createIndexResult -> { - DataFrameJob job = createDataFrameJob(jobId, threadPool); - // create the job configuration and store it in the internal index - dataFrameJobConfigManager.putJobConfiguration(request.getConfig(), ActionListener.wrap(r -> { + DataFrameTransform transform = createDataFrameTransform(transformId, threadPool); + // create the transform configuration and store it in the internal index + dataFrameTransformsConfigManager.putTransformConfiguration(request.getConfig(), ActionListener.wrap(r -> { // finally start the persistent task - persistentTasksService.sendStartRequest(job.getId(), DataFrameJob.NAME, job, ActionListener.wrap(persistentTask -> { - listener.onResponse(new PutDataFrameJobAction.Response(true)); + persistentTasksService.sendStartRequest(transform.getId(), DataFrameTransform.NAME, transform, + ActionListener.wrap(persistentTask -> { + listener.onResponse(new PutDataFrameTransformAction.Response(true)); }, startPersistentTaskException -> { - // delete the otherwise orphaned job configuration, for now we do not delete the destination index - dataFrameJobConfigManager.deleteJobConfiguration(jobId, ActionListener.wrap(r2 -> { - logger.debug("Deleted data frame job [{}] configuration from data frame configuration index", jobId); - listener.onFailure( + // delete the otherwise orphaned transform configuration, for now we do not delete the destination index + dataFrameTransformsConfigManager.deleteTransformConfiguration(transformId, ActionListener.wrap(r2 -> { + logger.debug("Deleted data frame transform [{}] configuration from data frame configuration index", + transformId); + listener.onFailure( new RuntimeException( DataFrameMessages.getMessage( DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_TO_START_PERSISTENT_TASK, r2), startPersistentTaskException)); - }, deleteJobFromIndexException -> { - logger.error("Failed to cleanup orphaned data frame job [{}] configuration", jobId); + }, deleteTransformFromIndexException -> { + logger.error("Failed to cleanup orphaned data frame transform [{}] configuration", transformId); listener.onFailure( new RuntimeException( DataFrameMessages.getMessage( @@ -132,12 +135,12 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio })); } - private static DataFrameJob createDataFrameJob(String jobId, ThreadPool threadPool) { - return new DataFrameJob(jobId); + private static DataFrameTransform createDataFrameTransform(String transformId, ThreadPool threadPool) { + return new DataFrameTransform(transformId); } @Override - protected ClusterBlockException checkBlock(PutDataFrameJobAction.Request request, ClusterState state) { + protected ClusterBlockException checkBlock(PutDataFrameTransformAction.Request request, ClusterState state) { return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java similarity index 55% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java index ab5a01e2ad5e7..35576880eb330 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java @@ -20,38 +20,39 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformTask; import java.util.List; import java.util.function.Consumer; -public class TransportStartDataFrameJobAction extends - TransportTasksAction { +public class TransportStartDataFrameTransformAction extends + TransportTasksAction { private final XPackLicenseState licenseState; @Inject - public TransportStartDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, - XPackLicenseState licenseState) { - super(StartDataFrameJobAction.NAME, clusterService, transportService, actionFilters, StartDataFrameJobAction.Request::new, - StartDataFrameJobAction.Response::new, StartDataFrameJobAction.Response::new, ThreadPool.Names.SAME); + public TransportStartDataFrameTransformAction(TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService, XPackLicenseState licenseState) { + super(StartDataFrameTransformAction.NAME, clusterService, transportService, actionFilters, + StartDataFrameTransformAction.Request::new, StartDataFrameTransformAction.Response::new, + StartDataFrameTransformAction.Response::new, ThreadPool.Names.SAME); this.licenseState = licenseState; } @Override - protected void processTasks(StartDataFrameJobAction.Request request, Consumer operation) { - DataFrameJobTask matchingTask = null; + protected void processTasks(StartDataFrameTransformAction.Request request, Consumer operation) { + DataFrameTransformTask matchingTask = null; // todo: re-factor, see rollup TransportTaskHelper for (Task task : taskManager.getTasks().values()) { - if (task instanceof DataFrameJobTask - && ((DataFrameJobTask) task).getJobId().equals(request.getId())) { + if (task instanceof DataFrameTransformTask + && ((DataFrameTransformTask) task).getTransformId().equals(request.getId())) { if (matchingTask != null) { - throw new IllegalArgumentException("Found more than one matching task for data frame job [" + request.getId() + throw new IllegalArgumentException("Found more than one matching task for data frame transform [" + request.getId() + "] when " + "there should only be one."); } - matchingTask = (DataFrameJobTask) task; + matchingTask = (DataFrameTransformTask) task; } } @@ -61,8 +62,8 @@ protected void processTasks(StartDataFrameJobAction.Request request, Consumer listener) { + protected void doExecute(Task task, StartDataFrameTransformAction.Request request, + ActionListener listener) { if (!licenseState.isDataFrameAllowed()) { listener.onFailure(LicenseUtils.newComplianceException(XPackField.DATA_FRAME)); @@ -73,19 +74,19 @@ protected void doExecute(Task task, StartDataFrameJobAction.Request request, } @Override - protected void taskOperation(StartDataFrameJobAction.Request request, DataFrameJobTask jobTask, - ActionListener listener) { - if (jobTask.getJobId().equals(request.getId())) { - jobTask.start(listener); + protected void taskOperation(StartDataFrameTransformAction.Request request, DataFrameTransformTask transformTask, + ActionListener listener) { + if (transformTask.getTransformId().equals(request.getId())) { + transformTask.start(listener); } else { - listener.onFailure(new RuntimeException("ID of data frame job task [" + jobTask.getJobId() + listener.onFailure(new RuntimeException("ID of data frame transform task [" + transformTask.getTransformId() + "] does not match request's ID [" + request.getId() + "]")); } } @Override - protected StartDataFrameJobAction.Response newResponse(StartDataFrameJobAction.Request request, - List tasks, List taskOperationFailures, + protected StartDataFrameTransformAction.Response newResponse(StartDataFrameTransformAction.Request request, + List tasks, List taskOperationFailures, List failedNodeExceptions) { if (taskOperationFailures.isEmpty() == false) { @@ -94,16 +95,16 @@ protected StartDataFrameJobAction.Response newResponse(StartDataFrameJobAction.R throw org.elasticsearch.ExceptionsHelper.convertToElastic(failedNodeExceptions.get(0)); } - // Either the job doesn't exist (the user didn't create it yet) or was deleted + // Either the transform doesn't exist (the user didn't create it yet) or was deleted // after the StartAPI executed. // In either case, let the user know if (tasks.size() == 0) { - throw new ResourceNotFoundException("Task for data frame job [" + request.getId() + "] not found"); + throw new ResourceNotFoundException("Task for data frame transform [" + request.getId() + "] not found"); } assert tasks.size() == 1; - boolean allStarted = tasks.stream().allMatch(StartDataFrameJobAction.Response::isStarted); - return new StartDataFrameJobAction.Response(allStarted); + boolean allStarted = tasks.stream().allMatch(StartDataFrameTransformAction.Response::isStarted); + return new StartDataFrameTransformAction.Response(allStarted); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java similarity index 64% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java index 5e52efc4e42d9..c7142ecd9ddae 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java @@ -21,41 +21,41 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobTask; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformTask; import java.util.List; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; -public class TransportStopDataFrameJobAction extends - TransportTasksAction { +public class TransportStopDataFrameTransformAction extends + TransportTasksAction { private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100); private final ThreadPool threadPool; @Inject - public TransportStopDataFrameJobAction(TransportService transportService, ActionFilters actionFilters, ClusterService clusterService, - ThreadPool threadPool) { - super(StopDataFrameJobAction.NAME, clusterService, transportService, actionFilters, StopDataFrameJobAction.Request::new, - StopDataFrameJobAction.Response::new, StopDataFrameJobAction.Response::new, ThreadPool.Names.SAME); + public TransportStopDataFrameTransformAction(TransportService transportService, ActionFilters actionFilters, + ClusterService clusterService, ThreadPool threadPool) { + super(StopDataFrameTransformAction.NAME, clusterService, transportService, actionFilters, StopDataFrameTransformAction.Request::new, + StopDataFrameTransformAction.Response::new, StopDataFrameTransformAction.Response::new, ThreadPool.Names.SAME); this.threadPool = threadPool; } @Override - protected void doExecute(Task task, StopDataFrameJobAction.Request request, - ActionListener listener) { + protected void doExecute(Task task, StopDataFrameTransformAction.Request request, + ActionListener listener) { super.doExecute(task, request, listener); } @Override - protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJobTask jobTask, - ActionListener listener) { - if (jobTask.getJobId().equals(request.getId())) { + protected void taskOperation(StopDataFrameTransformAction.Request request, DataFrameTransformTask transformTask, + ActionListener listener) { + if (transformTask.getTransformId().equals(request.getId())) { if (request.waitForCompletion() == false) { - jobTask.stop(listener); + transformTask.stop(listener); } else { - ActionListener blockingListener = ActionListener.wrap(response -> { + ActionListener blockingListener = ActionListener.wrap(response -> { if (response.isStopped()) { // The Task acknowledged that it is stopped/stopping... wait until the status actually // changes over before returning. Switch over to Generic threadpool so @@ -65,7 +65,7 @@ protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJo long untilInNanos = System.nanoTime() + request.getTimeout().getNanos(); while (System.nanoTime() - untilInNanos < 0) { - if (jobTask.isStopped()) { + if (transformTask.isStopped()) { listener.onResponse(response); return; } @@ -73,11 +73,11 @@ protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJo } // ran out of time listener.onFailure(new ElasticsearchTimeoutException( - DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_JOB_WAIT_FOR_COMPLETION_TIMEOUT, + DataFrameMessages.getMessage(DataFrameMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_TIMEOUT, request.getTimeout().getStringRep(), request.getId()))); } catch (InterruptedException e) { - listener.onFailure(new ElasticsearchException(DataFrameMessages - .getMessage(DataFrameMessages.REST_STOP_JOB_WAIT_FOR_COMPLETION_INTERRUPT, request.getId()), e)); + listener.onFailure(new ElasticsearchException(DataFrameMessages.getMessage( + DataFrameMessages.REST_STOP_TRANSFORM_WAIT_FOR_COMPLETION_INTERRUPT, request.getId()), e)); } }); } else { @@ -86,17 +86,17 @@ protected void taskOperation(StopDataFrameJobAction.Request request, DataFrameJo } }, listener::onFailure); - jobTask.stop(blockingListener); + transformTask.stop(blockingListener); } } else { - listener.onFailure(new RuntimeException("ID of data frame indexer task [" + jobTask.getJobId() + listener.onFailure(new RuntimeException("ID of data frame indexer task [" + transformTask.getTransformId() + "] does not match request's ID [" + request.getId() + "]")); } } @Override - protected StopDataFrameJobAction.Response newResponse(StopDataFrameJobAction.Request request, - List tasks, List taskOperationFailures, + protected StopDataFrameTransformAction.Response newResponse(StopDataFrameTransformAction.Request request, + List tasks, List taskOperationFailures, List failedNodeExceptions) { if (taskOperationFailures.isEmpty() == false) { @@ -105,16 +105,16 @@ protected StopDataFrameJobAction.Response newResponse(StopDataFrameJobAction.Req throw ExceptionsHelper.convertToElastic(failedNodeExceptions.get(0)); } - // Either the job doesn't exist (the user didn't create it yet) or was deleted + // Either the transform doesn't exist (the user didn't create it yet) or was deleted // after the Stop API executed. // In either case, let the user know if (tasks.size() == 0) { - throw new ResourceNotFoundException("Task for Data Frame Job [" + request.getId() + "] not found"); + throw new ResourceNotFoundException("Task for Data Frame transform [" + request.getId() + "] not found"); } assert tasks.size() == 1; - boolean allStopped = tasks.stream().allMatch(StopDataFrameJobAction.Response::isStopped); - return new StopDataFrameJobAction.Response(allStopped); + boolean allStopped = tasks.stream().allMatch(StopDataFrameTransformAction.Response::isStopped); + return new StopDataFrameTransformAction.Response(allStopped); } } \ No newline at end of file diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java index 00101db66812a..d9e0471e0a3fc 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameInternalIndex.java @@ -14,8 +14,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xpack.core.dataframe.DataFrameField; + import java.io.IOException; import java.util.Collections; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; public final class DataFrameInternalIndex { @@ -36,7 +38,7 @@ public final class DataFrameInternalIndex { public static final String DOUBLE = "double"; public static final String KEYWORD = "keyword"; - // internal document types, e.g. "job_config" + // internal document types, e.g. "transform_config" public static final String DOC_TYPE = "doc_type"; public static IndexTemplateMetaData getIndexTemplateMetaData() throws IOException { @@ -68,8 +70,8 @@ private static XContentBuilder mappings() throws IOException { builder.startObject(PROPERTIES); // overall doc type builder.startObject(DOC_TYPE).field(TYPE, KEYWORD).endObject(); - // add the schema for job configurations - addDataFrameJobConfigMappings(builder); + // add the schema for transform configurations + addDataFrameTransformsConfigMappings(builder); // end type builder.endObject(); @@ -80,7 +82,7 @@ private static XContentBuilder mappings() throws IOException { return builder; } - private static XContentBuilder addDataFrameJobConfigMappings(XContentBuilder builder) throws IOException { + private static XContentBuilder addDataFrameTransformsConfigMappings(XContentBuilder builder) throws IOException { return builder .startObject(DataFrameField.ID.getPreferredName()) .field(TYPE, KEYWORD) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java index 496cf18a3dc6a..76e635df0d8fd 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFramePersistentTaskUtils.java @@ -17,25 +17,25 @@ private DataFramePersistentTaskUtils() { } /** - * Check to see if the PersistentTask's cluster state contains the job(s) we + * Check to see if the PersistentTask's cluster state contains the data frame transform(s) we * are interested in */ - public static boolean stateHasDataFrameJobs(String id, ClusterState state) { - boolean hasJobs = false; + public static boolean stateHasDataFrameTransforms(String id, ClusterState state) { + boolean hasTransforms = false; PersistentTasksCustomMetaData pTasksMeta = state.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); if (pTasksMeta != null) { - // If the request was for _all jobs, we need to look through the list of - // persistent tasks and see if at least once has a DataFrameJob param + // If the request was for _all transforms, we need to look through the list of + // persistent tasks and see if at least one is a data frame task if (id.equals(MetaData.ALL)) { - hasJobs = pTasksMeta.tasks().stream() + hasTransforms = pTasksMeta.tasks().stream() .anyMatch(persistentTask -> persistentTask.getTaskName().equals(DataFrameField.TASK_NAME)); } else if (pTasksMeta.getTask(id) != null) { - // If we're looking for a single job, we can just check directly - hasJobs = true; + // If we're looking for a single transform, we can just check directly + hasTransforms = true; } } - return hasJobs; + return hasTransforms; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManager.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java similarity index 66% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManager.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java index 734e09f53aef5..fe00e4b5ec519 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManager.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java @@ -33,7 +33,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; import java.io.IOException; import java.io.InputStream; @@ -44,9 +44,9 @@ import static org.elasticsearch.xpack.core.ClientHelper.DATA_FRAME_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -public class DataFrameJobConfigManager { +public class DataFrameTransformsConfigManager { - private static final Logger logger = LogManager.getLogger(DataFrameJobConfigManager.class); + private static final Logger logger = LogManager.getLogger(DataFrameTransformsConfigManager.class); public static final Map TO_XCONTENT_PARAMS; static { @@ -58,90 +58,94 @@ public class DataFrameJobConfigManager { private final Client client; private final NamedXContentRegistry xContentRegistry; - public DataFrameJobConfigManager(Client client, NamedXContentRegistry xContentRegistry) { + public DataFrameTransformsConfigManager(Client client, NamedXContentRegistry xContentRegistry) { this.client = client; this.xContentRegistry = xContentRegistry; } - public void putJobConfiguration(DataFrameJobConfig jobConfig, ActionListener listener) { + public void putTransformConfiguration(DataFrameTransformConfig transformConfig, ActionListener listener) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - XContentBuilder source = jobConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); + XContentBuilder source = transformConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); IndexRequest indexRequest = new IndexRequest(DataFrameInternalIndex.INDEX_NAME) .opType(DocWriteRequest.OpType.CREATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .id(DataFrameJobConfig.documentId(jobConfig.getId())) + .id(DataFrameTransformConfig.documentId(transformConfig.getId())) .source(source); executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, IndexAction.INSTANCE, indexRequest, ActionListener.wrap(r -> { listener.onResponse(true); }, e -> { if (e instanceof VersionConflictEngineException) { - // the job already exists + // the transform already exists listener.onFailure(new ResourceAlreadyExistsException( - DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_JOB_EXISTS, jobConfig.getId()))); + DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_TRANSFORM_EXISTS, + transformConfig.getId()))); } else { - listener.onFailure(new RuntimeException(DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_PERSIST_JOB_CONFIGURATION, e)); + listener.onFailure( + new RuntimeException(DataFrameMessages.REST_PUT_DATA_FRAME_FAILED_PERSIST_TRANSFORM_CONFIGURATION, e)); } })); } catch (IOException e) { // not expected to happen but for the sake of completeness listener.onFailure(new ElasticsearchParseException( - DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_FAILED_TO_SERIALIZE_JOB, jobConfig.getId()), e)); + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_FAILED_TO_SERIALIZE_TRANSFORM, transformConfig.getId()), + e)); } } - public void getJobConfiguration(String jobId, ActionListener resultListener) { - GetRequest getRequest = new GetRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameJobConfig.documentId(jobId)); + public void getTransformConfiguration(String transformId, ActionListener resultListener) { + GetRequest getRequest = new GetRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameTransformConfig.documentId(transformId)); executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, GetAction.INSTANCE, getRequest, ActionListener.wrap(getResponse -> { if (getResponse.isExists() == false) { resultListener.onFailure(new ResourceNotFoundException( - DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, transformId))); return; } BytesReference source = getResponse.getSourceAsBytesRef(); - parseJobLenientlyFromSource(source, jobId, resultListener); + parseTransformLenientlyFromSource(source, transformId, resultListener); }, e -> { if (e.getClass() == IndexNotFoundException.class) { - resultListener.onFailure( - new ResourceNotFoundException(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + resultListener.onFailure(new ResourceNotFoundException( + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, transformId))); } else { resultListener.onFailure(e); } })); } - public void deleteJobConfiguration(String jobId, ActionListener listener) { - DeleteRequest request = new DeleteRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameJobConfig.documentId(jobId)); + public void deleteTransformConfiguration(String transformId, ActionListener listener) { + DeleteRequest request = new DeleteRequest(DataFrameInternalIndex.INDEX_NAME, DataFrameTransformConfig.documentId(transformId)); request.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); executeAsyncWithOrigin(client, DATA_FRAME_ORIGIN, DeleteAction.INSTANCE, request, ActionListener.wrap(deleteResponse -> { if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { - listener.onFailure( - new ResourceNotFoundException(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + listener.onFailure(new ResourceNotFoundException( + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, transformId))); return; } listener.onResponse(true); }, e -> { if (e.getClass() == IndexNotFoundException.class) { - listener.onFailure( - new ResourceNotFoundException(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobId))); + listener.onFailure(new ResourceNotFoundException( + DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, transformId))); } else { listener.onFailure(e); } })); } - private void parseJobLenientlyFromSource(BytesReference source, String jobId, ActionListener jobListener) { + private void parseTransformLenientlyFromSource(BytesReference source, String transformId, + ActionListener transformListener) { try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream)) { - jobListener.onResponse(DataFrameJobConfig.PARSER.parse(parser, jobId)); + transformListener.onResponse(DataFrameTransformConfig.PARSER.parse(parser, transformId)); } catch (Exception e) { - logger.error(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_PARSE_JOB_CONFIGURATION, jobId), e); - jobListener.onFailure(e); + logger.error(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_PARSE_TRANSFORM_CONFIGURATION, transformId), e); + transformListener.onFailure(e); } } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java index 5c83926ee1940..54e8d3854ebd6 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; import java.io.IOException; import java.util.Map; @@ -33,9 +33,9 @@ public final class DataframeIndex { private DataframeIndex() { } - public static void createDestinationIndex(Client client, DataFrameJobConfig jobConfig, Map mappings, + public static void createDestinationIndex(Client client, DataFrameTransformConfig transformConfig, Map mappings, final ActionListener listener) { - CreateIndexRequest request = new CreateIndexRequest(jobConfig.getDestinationIndex()); + CreateIndexRequest request = new CreateIndexRequest(transformConfig.getDestinationIndex()); // TODO: revisit number of shards, number of replicas request.settings(Settings.builder() // <1> @@ -47,7 +47,7 @@ public static void createDestinationIndex(Client client, DataFrameJobConfig jobC listener.onResponse(true); }, e -> { String message = DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_CREATE_DESTINATION_INDEX, - jobConfig.getDestinationIndex(), jobConfig.getId()); + transformConfig.getDestinationIndex(), transformConfig.getId()); logger.error(message); listener.onFailure(new RuntimeException(message, e)); })); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java similarity index 68% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java index d7c88e726949a..bd3917af9a7ce 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestDeleteDataFrameTransformAction.java @@ -14,26 +14,26 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameTransformAction; import java.io.IOException; -public class RestDeleteDataFrameJobAction extends BaseRestHandler { +public class RestDeleteDataFrameTransformAction extends BaseRestHandler { - public RestDeleteDataFrameJobAction(Settings settings, RestController controller) { + public RestDeleteDataFrameTransformAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.DELETE, DataFrameField.REST_BASE_PATH_JOBS_BY_ID, this); + controller.registerHandler(RestRequest.Method.DELETE, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID, this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String id = restRequest.param(DataFrameField.ID.getPreferredName()); - DeleteDataFrameJobAction.Request request = new DeleteDataFrameJobAction.Request(id); + DeleteDataFrameTransformAction.Request request = new DeleteDataFrameTransformAction.Request(id); - return channel -> client.execute(DeleteDataFrameJobAction.INSTANCE, request, - new RestToXContentListener(channel) { + return channel -> client.execute(DeleteDataFrameTransformAction.INSTANCE, request, + new RestToXContentListener(channel) { @Override - protected RestStatus getStatus(DeleteDataFrameJobAction.Response response) { + protected RestStatus getStatus(DeleteDataFrameTransformAction.Response response) { if (response.getNodeFailures().size() > 0 || response.getTaskFailures().size() > 0) { return RestStatus.INTERNAL_SERVER_ERROR; } @@ -44,6 +44,6 @@ protected RestStatus getStatus(DeleteDataFrameJobAction.Response response) { @Override public String getName() { - return "data_frame_delete_job_action"; + return "data_frame_delete_transform_action"; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java similarity index 63% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java index 7e8fc16b58b3f..1d35f7212108f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsStatsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsAction.java @@ -13,24 +13,24 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsAction; -public class RestGetDataFrameJobsStatsAction extends BaseRestHandler { +public class RestGetDataFrameTransformsAction extends BaseRestHandler { - public RestGetDataFrameJobsStatsAction(Settings settings, RestController controller) { + public RestGetDataFrameTransformsAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_JOBS_BY_ID + "_stats", this); + controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID, this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(DataFrameField.ID.getPreferredName()); - GetDataFrameJobsStatsAction.Request request = new GetDataFrameJobsStatsAction.Request(id); - return channel -> client.execute(GetDataFrameJobsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + GetDataFrameTransformsAction.Request request = new GetDataFrameTransformsAction.Request(id); + return channel -> client.execute(GetDataFrameTransformsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override public String getName() { - return "data_frame_get_jobs_stats_action"; + return "data_frame_get_transforms_action"; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java similarity index 61% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java index c54c48ad6f182..6ae2c16166704 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameJobsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestGetDataFrameTransformsStatsAction.java @@ -13,24 +13,24 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction; -public class RestGetDataFrameJobsAction extends BaseRestHandler { +public class RestGetDataFrameTransformsStatsAction extends BaseRestHandler { - public RestGetDataFrameJobsAction(Settings settings, RestController controller) { + public RestGetDataFrameTransformsStatsAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_JOBS_BY_ID, this); + controller.registerHandler(RestRequest.Method.GET, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID + "_stats", this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String id = restRequest.param(DataFrameField.ID.getPreferredName()); - GetDataFrameJobsAction.Request request = new GetDataFrameJobsAction.Request(id); - return channel -> client.execute(GetDataFrameJobsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + GetDataFrameTransformsStatsAction.Request request = new GetDataFrameTransformsStatsAction.Request(id); + return channel -> client.execute(GetDataFrameTransformsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override public String getName() { - return "data_frame_get_jobs_action"; + return "data_frame_get_transforms_stats_action"; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameTransformAction.java similarity index 66% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameTransformAction.java index ac04981586dec..a4a3222cfd6b5 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestPutDataFrameTransformAction.java @@ -14,20 +14,20 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameTransformAction; import java.io.IOException; -public class RestPutDataFrameJobAction extends BaseRestHandler { - - public RestPutDataFrameJobAction(Settings settings, RestController controller) { +public class RestPutDataFrameTransformAction extends BaseRestHandler { + + public RestPutDataFrameTransformAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.PUT, DataFrameField.REST_BASE_PATH_JOBS_BY_ID, this); + controller.registerHandler(RestRequest.Method.PUT, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID, this); } @Override public String getName() { - return "data_frame_put_job_action"; + return "data_frame_put_transform_action"; } @Override @@ -35,8 +35,8 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(DataFrameField.ID.getPreferredName()); XContentParser parser = restRequest.contentParser(); - PutDataFrameJobAction.Request request = PutDataFrameJobAction.Request.fromXContent(parser, id); + PutDataFrameTransformAction.Request request = PutDataFrameTransformAction.Request.fromXContent(parser, id); - return channel -> client.execute(PutDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(PutDataFrameTransformAction.INSTANCE, request, new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java similarity index 64% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java index 4e984f587bda3..c889e75bf8363 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStartDataFrameTransformAction.java @@ -14,27 +14,27 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.rollup.RollupField; -import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameTransformAction; import java.io.IOException; -public class RestStartDataFrameJobAction extends BaseRestHandler { - - public RestStartDataFrameJobAction(Settings settings, RestController controller) { +public class RestStartDataFrameTransformAction extends BaseRestHandler { + + public RestStartDataFrameTransformAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_JOBS_BY_ID + "_start", this); + controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID + "_start", this); } - + @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String id = restRequest.param(RollupField.ID.getPreferredName()); - StartDataFrameJobAction.Request request = new StartDataFrameJobAction.Request(id); + StartDataFrameTransformAction.Request request = new StartDataFrameTransformAction.Request(id); - return channel -> client.execute(StartDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(StartDataFrameTransformAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override public String getName() { - return "data_frame_start_job_action"; + return "data_frame_start_transform_action"; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java similarity index 64% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java index f5979264d07fd..510b40c31806d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameJobAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/rest/action/RestStopDataFrameTransformAction.java @@ -13,30 +13,31 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameTransformAction; import java.io.IOException; -public class RestStopDataFrameJobAction extends BaseRestHandler { +public class RestStopDataFrameTransformAction extends BaseRestHandler { - public RestStopDataFrameJobAction(Settings settings, RestController controller) { + public RestStopDataFrameTransformAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_JOBS_BY_ID + "_stop", this); + controller.registerHandler(RestRequest.Method.POST, DataFrameField.REST_BASE_PATH_TRANSFORMS_BY_ID + "_stop", this); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String id = restRequest.param(DataFrameField.ID.getPreferredName()); - TimeValue timeout = restRequest.paramAsTime(DataFrameField.TIMEOUT.getPreferredName(), StopDataFrameJobAction.DEFAULT_TIMEOUT); + TimeValue timeout = restRequest.paramAsTime(DataFrameField.TIMEOUT.getPreferredName(), + StopDataFrameTransformAction.DEFAULT_TIMEOUT); boolean waitForCompletion = restRequest.paramAsBoolean(DataFrameField.WAIT_FOR_COMPLETION.getPreferredName(), false); - StopDataFrameJobAction.Request request = new StopDataFrameJobAction.Request(id, waitForCompletion, timeout); + StopDataFrameTransformAction.Request request = new StopDataFrameTransformAction.Request(id, waitForCompletion, timeout); - return channel -> client.execute(StopDataFrameJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); + return channel -> client.execute(StopDataFrameTransformAction.INSTANCE, request, new RestToXContentListener<>(channel)); } @Override public String getName() { - return "data_frame_stop_job_action"; + return "data_frame_stop_transform_action"; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/JobValidator.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/TransformValidator.java similarity index 96% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/JobValidator.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/TransformValidator.java index cbbb8c806a228..36c9c40de8551 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/JobValidator.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/TransformValidator.java @@ -23,22 +23,22 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; -public class JobValidator { +public class TransformValidator { private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; - private static final Logger logger = LogManager.getLogger(JobValidator.class); + private static final Logger logger = LogManager.getLogger(TransformValidator.class); private final Client client; - private final DataFrameJobConfig config; + private final DataFrameTransformConfig config; - public JobValidator(DataFrameJobConfig config, Client client) { + public TransformValidator(DataFrameTransformConfig config, Client client) { this.client = Objects.requireNonNull(client); this.config = Objects.requireNonNull(config); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfig.java similarity index 97% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationConfig.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfig.java index 363ec245fec8f..400bfc4e23a6d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfig.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtils.java similarity index 86% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtils.java index 82bc2e00344e3..8e1080d44b9e0 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtils.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -14,7 +14,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; import java.util.Collection; import java.util.HashMap; @@ -31,14 +31,14 @@ final class AggregationResultUtils { * @param agg The aggregation result * @param sources The original sources used for querying * @param aggregationBuilders the aggregation used for querying - * @param dataFrameIndexerJobStats stats collector + * @param dataFrameIndexerTransformStats stats collector * @return a map containing the results of the aggregation in a consumable way */ public static Stream> extractCompositeAggregationResults(CompositeAggregation agg, List> sources, Collection aggregationBuilders, - DataFrameIndexerJobStats dataFrameIndexerJobStats) { + DataFrameIndexerTransformStats dataFrameIndexerTransformStats) { return agg.getBuckets().stream().map(bucket -> { - dataFrameIndexerJobStats.incrementNumDocuments(bucket.getDocCount()); + dataFrameIndexerTransformStats.incrementNumDocuments(bucket.getDocCount()); Map document = new HashMap<>(); for (CompositeValuesSourceBuilder source : sources) { @@ -56,7 +56,7 @@ public static Stream> extractCompositeAggregationResults(Com document.put(aggName, aggResultSingleValue.value()); } else { // Execution should never reach this point! - // Creating jobs with unsupported aggregations shall not be possible + // Creating transforms with unsupported aggregations shall not be possible logger.error("Dataframe Internal Error: unsupported aggregation ["+ aggResult.getName() +"], ignoring"); assert false; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameIndexer.java similarity index 80% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameIndexer.java index 8f9ffb67177a8..1e426753ccd5b 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameIndexer.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameIndexer.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -19,7 +19,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; @@ -36,16 +36,16 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -public abstract class DataFrameIndexer extends AsyncTwoPhaseIndexer, DataFrameIndexerJobStats> { +public abstract class DataFrameIndexer extends AsyncTwoPhaseIndexer, DataFrameIndexerTransformStats> { private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; private static final Logger logger = LogManager.getLogger(DataFrameIndexer.class); public DataFrameIndexer(Executor executor, AtomicReference initialState, Map initialPosition) { - super(executor, initialState, initialPosition, new DataFrameIndexerJobStats()); + super(executor, initialState, initialPosition, new DataFrameIndexerTransformStats()); } - protected abstract DataFrameJobConfig getConfig(); + protected abstract DataFrameTransformConfig getConfig(); @Override protected void onStartJob(long now) { @@ -67,10 +67,10 @@ protected IterationResult> doProcess(SearchResponse searchRe * in later versions, see {@link IngestDocument). */ private Stream processBucketsToIndexRequests(CompositeAggregation agg) { - final DataFrameJobConfig jobConfig = getConfig(); - String indexName = jobConfig.getDestinationIndex(); - List> sources = jobConfig.getSourceConfig().getSources(); - Collection aggregationBuilders = jobConfig.getAggregationConfig().getAggregatorFactories(); + final DataFrameTransformConfig transformConfig = getConfig(); + String indexName = transformConfig.getDestinationIndex(); + List> sources = transformConfig.getSourceConfig().getSources(); + Collection aggregationBuilders = transformConfig.getAggregationConfig().getAggregatorFactories(); return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders, getStats()).map(document -> { XContentBuilder builder; @@ -89,12 +89,12 @@ private Stream processBucketsToIndexRequests(CompositeAggregation @Override protected SearchRequest buildSearchRequest() { final Map position = getPosition(); - final DataFrameJobConfig jobConfig = getConfig(); + final DataFrameTransformConfig transformConfig = getConfig(); QueryBuilder queryBuilder = new MatchAllQueryBuilder(); - SearchRequest searchRequest = new SearchRequest(jobConfig.getIndexPattern()); + SearchRequest searchRequest = new SearchRequest(transformConfig.getIndexPattern()); - List> sources = jobConfig.getSourceConfig().getSources(); + List> sources = transformConfig.getSourceConfig().getSources(); CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder(COMPOSITE_AGGREGATION_NAME, sources); compositeAggregation.size(1000); @@ -103,7 +103,7 @@ protected SearchRequest buildSearchRequest() { compositeAggregation.aggregateAfter(position); } - for (AggregationBuilder agg : jobConfig.getAggregationConfig().getAggregatorFactories()) { + for (AggregationBuilder agg : transformConfig.getAggregationConfig().getAggregatorFactories()) { compositeAggregation.subAggregation(agg); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransform.java similarity index 66% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransform.java index b56925b7716ea..2b26acabf4774 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJob.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransform.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; @@ -21,25 +21,25 @@ import java.util.Map; import java.util.Objects; -public class DataFrameJob extends AbstractDiffable implements XPackPlugin.XPackPersistentTaskParams { +public class DataFrameTransform extends AbstractDiffable implements XPackPlugin.XPackPersistentTaskParams { public static final String NAME = DataFrameField.TASK_NAME; - private final String jobId; + private final String transformId; - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, - a -> new DataFrameJob((String) a[0])); + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, + a -> new DataFrameTransform((String) a[0])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), DataFrameField.ID); } - public DataFrameJob(String jobId) { - this.jobId = jobId; + public DataFrameTransform(String transformId) { + this.transformId = transformId; } - public DataFrameJob(StreamInput in) throws IOException { - this.jobId = in.readString(); + public DataFrameTransform(StreamInput in) throws IOException { + this.transformId = in.readString(); } @Override @@ -55,22 +55,22 @@ public Version getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(jobId); + out.writeString(transformId); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(DataFrameField.ID.getPreferredName(), jobId); + builder.field(DataFrameField.ID.getPreferredName(), transformId); builder.endObject(); return builder; } public String getId() { - return jobId; + return transformId; } - public static DataFrameJob fromXContent(XContentParser parser) throws IOException { + public static DataFrameTransform fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } @@ -84,14 +84,14 @@ public boolean equals(Object other) { return false; } - DataFrameJob that = (DataFrameJob) other; + DataFrameTransform that = (DataFrameTransform) other; - return Objects.equals(this.jobId, that.jobId); + return Objects.equals(this.transformId, that.transformId); } @Override public int hashCode() { - return Objects.hash(jobId); + return Objects.hash(transformId); } public Map getHeaders() { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfig.java similarity index 80% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfig.java index 4cc0ad004a39e..6ac620e051480 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfig.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -18,6 +18,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + import java.io.IOException; import java.util.Objects; @@ -25,11 +26,11 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** - * This class holds the configuration details of a data frame job + * This class holds the configuration details of a data frame transform */ -public class DataFrameJobConfig implements NamedWriteable, ToXContentObject { +public class DataFrameTransformConfig implements NamedWriteable, ToXContentObject { - private static final String NAME = "xpack/data_frame/jobconfig"; + private static final String NAME = "xpack/data_frame/transformconfig"; private static final ParseField INDEX_PATTERN = new ParseField("index_pattern"); private static final ParseField DESTINATION_INDEX = new ParseField("destination_index"); private static final ParseField SOURCES = new ParseField("sources"); @@ -41,18 +42,18 @@ public class DataFrameJobConfig implements NamedWriteable, ToXContentObject { private final SourceConfig sourceConfig; private final AggregationConfig aggregationConfig; - public static final ConstructingObjectParser PARSER = createParser(false); - public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + public static final ConstructingObjectParser PARSER = createParser(false); + public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); - private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, (args, optionalId) -> { String id = args[0] != null ? (String) args[0] : optionalId; String indexPattern = (String) args[1]; String destinationIndex = (String) args[2]; SourceConfig sourceConfig = (SourceConfig) args[3]; AggregationConfig aggregationConfig = (AggregationConfig) args[4]; - return new DataFrameJobConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); + return new DataFrameTransformConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); }); parser.declareString(optionalConstructorArg(), DataFrameField.ID); @@ -64,11 +65,11 @@ private static ConstructingObjectParser createParser return parser; } - public static String documentId(String jobId) { - return "dataframe-" + jobId; + public static String documentId(String transformId) { + return "dataframe-" + transformId; } - public DataFrameJobConfig(final String id, + public DataFrameTransformConfig(final String id, final String indexPattern, final String destinationIndex, final SourceConfig sourceConfig, @@ -82,7 +83,7 @@ public DataFrameJobConfig(final String id, this.aggregationConfig = aggregationConfig; } - public DataFrameJobConfig(final StreamInput in) throws IOException { + public DataFrameTransformConfig(final StreamInput in) throws IOException { id = in.readString(); indexPattern = in.readString(); destinationIndex = in.readString(); @@ -114,6 +115,7 @@ public AggregationConfig getAggregationConfig() { return aggregationConfig; } + @Override public void writeTo(final StreamOutput out) throws IOException { out.writeString(id); out.writeString(indexPattern); @@ -122,6 +124,7 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeOptionalWriteable(aggregationConfig); } + @Override public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); builder.field(DataFrameField.ID.getPreferredName(), id); @@ -152,7 +155,7 @@ public boolean equals(Object other) { return false; } - final DataFrameJobConfig that = (DataFrameJobConfig) other; + final DataFrameTransformConfig that = (DataFrameTransformConfig) other; return Objects.equals(this.id, that.id) && Objects.equals(this.indexPattern, that.indexPattern) @@ -171,8 +174,8 @@ public String toString() { return Strings.toString(this, true, true); } - public static DataFrameJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) + public static DataFrameTransformConfig fromXContent(final XContentParser parser, @Nullable final String optionalTransformId) throws IOException { - return PARSER.parse(parser, optionalJobId); + return PARSER.parse(parser, optionalTransformId); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformPersistentTasksExecutor.java similarity index 59% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformPersistentTasksExecutor.java index 990e2c9ee481c..f9d9ec282fffd 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformPersistentTasksExecutor.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -17,44 +17,43 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameTransformState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.dataframe.DataFrame; -import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import java.util.Map; -public class DataFrameJobPersistentTasksExecutor extends PersistentTasksExecutor { +public class DataFrameTransformPersistentTasksExecutor extends PersistentTasksExecutor { - private static final Logger logger = LogManager.getLogger(DataFrameJobPersistentTasksExecutor.class); + private static final Logger logger = LogManager.getLogger(DataFrameTransformPersistentTasksExecutor.class); private final Client client; - private final DataFrameJobConfigManager jobConfigManager; + private final DataFrameTransformsConfigManager transformsConfigManager; private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; - public DataFrameJobPersistentTasksExecutor(Client client, DataFrameJobConfigManager jobConfigManager, SchedulerEngine schedulerEngine, - ThreadPool threadPool) { + public DataFrameTransformPersistentTasksExecutor(Client client, DataFrameTransformsConfigManager transformsConfigManager, + SchedulerEngine schedulerEngine, ThreadPool threadPool) { super(DataFrameField.TASK_NAME, DataFrame.TASK_THREAD_POOL_NAME); this.client = client; - this.jobConfigManager = jobConfigManager; + this.transformsConfigManager = transformsConfigManager; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; } @Override - protected void nodeOperation(AllocatedPersistentTask task, @Nullable DataFrameJob params, PersistentTaskState state) { - DataFrameJobTask buildTask = (DataFrameJobTask) task; + protected void nodeOperation(AllocatedPersistentTask task, @Nullable DataFrameTransform params, PersistentTaskState state) { + DataFrameTransformTask buildTask = (DataFrameTransformTask) task; SchedulerEngine.Job schedulerJob = new SchedulerEngine.Job( - DataFrameJobTask.SCHEDULE_NAME + "_" + params.getId(), next()); + DataFrameTransformTask.SCHEDULE_NAME + "_" + params.getId(), next()); - // Note that while the task is added to the scheduler here, the internal state - // will prevent - // it from doing any work until the task is "started" via the StartJob api + // Note that while the task is added to the scheduler here, the internal state will prevent + // it from doing any work until the task is "started" via the StartTransform api schedulerEngine.register(buildTask); schedulerEngine.add(schedulerJob); - logger.info("Data frame job [" + params.getId() + "] created."); + logger.info("Data frame transform [" + params.getId() + "] created."); } static SchedulerEngine.Schedule next() { @@ -65,8 +64,8 @@ static SchedulerEngine.Schedule next() { @Override protected AllocatedPersistentTask createTask(long id, String type, String action, TaskId parentTaskId, - PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { - return new DataFrameJobTask(id, type, action, parentTaskId, persistentTask.getParams(), - (DataFrameJobState) persistentTask.getState(), client, jobConfigManager, schedulerEngine, threadPool, headers); + PersistentTasksCustomMetaData.PersistentTask persistentTask, Map headers) { + return new DataFrameTransformTask(id, type, action, parentTaskId, persistentTask.getParams(), + (DataFrameTransformState) persistentTask.getState(), client, transformsConfigManager, schedulerEngine, threadPool, headers); } } \ No newline at end of file diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformTask.java similarity index 58% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformTask.java index ec86062b84ddb..7826d08a06366 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformTask.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -24,27 +24,27 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobState; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameTransformState; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine; import org.elasticsearch.xpack.core.scheduler.SchedulerEngine.Event; -import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction; -import org.elasticsearch.xpack.dataframe.persistence.DataFrameJobConfigManager; -import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction.Response; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameTransformAction.Response; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameTransformAction; +import org.elasticsearch.xpack.dataframe.persistence.DataFrameTransformsConfigManager; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -public class DataFrameJobTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { +public class DataFrameTransformTask extends AllocatedPersistentTask implements SchedulerEngine.Listener { - private static final Logger logger = LogManager.getLogger(DataFrameJobTask.class); + private static final Logger logger = LogManager.getLogger(DataFrameTransformTask.class); public static final String SCHEDULE_NAME = DataFrameField.TASK_NAME + "/schedule"; - private final DataFrameJob job; + private final DataFrameTransform transform; private final SchedulerEngine schedulerEngine; private final ThreadPool threadPool; private final DataFrameIndexer indexer; @@ -54,20 +54,20 @@ public class DataFrameJobTask extends AllocatedPersistentTask implements Schedul // 1: data frame complete, all data has been indexed private final AtomicReference generation; - public DataFrameJobTask(long id, String type, String action, TaskId parentTask, DataFrameJob job, DataFrameJobState state, - Client client, DataFrameJobConfigManager jobConfigManager, SchedulerEngine schedulerEngine, ThreadPool threadPool, - Map headers) { - super(id, type, action, DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + job.getId(), parentTask, headers); - this.job = job; + public DataFrameTransformTask(long id, String type, String action, TaskId parentTask, DataFrameTransform transform, + DataFrameTransformState state, Client client, DataFrameTransformsConfigManager transformsConfigManager, + SchedulerEngine schedulerEngine, ThreadPool threadPool, Map headers) { + super(id, type, action, DataFrameField.PERSISTENT_TASK_DESCRIPTION_PREFIX + transform.getId(), parentTask, headers); + this.transform = transform; this.schedulerEngine = schedulerEngine; this.threadPool = threadPool; IndexerState initialState = IndexerState.STOPPED; long initialGeneration = 0; Map initialPosition = null; - logger.info("[{}] init, got state: [{}]", job.getId(), state != null); + logger.info("[{}] init, got state: [{}]", transform.getId(), state != null); if (state != null) { final IndexerState existingState = state.getIndexerState(); - logger.info("[{}] Loading existing state: [{}], position [{}]", job.getId(), existingState, state.getPosition()); + logger.info("[{}] Loading existing state: [{}], position [{}]", transform.getId(), existingState, state.getPosition()); if (existingState.equals(IndexerState.INDEXING)) { // reset to started as no indexer is running initialState = IndexerState.STARTED; @@ -81,13 +81,13 @@ public DataFrameJobTask(long id, String type, String action, TaskId parentTask, initialGeneration = state.getGeneration(); } - this.indexer = new ClientDataFrameIndexer(job.getId(), jobConfigManager, new AtomicReference<>(initialState), initialPosition, - client); + this.indexer = new ClientDataFrameIndexer(transform.getId(), transformsConfigManager, new AtomicReference<>(initialState), + initialPosition, client); this.generation = new AtomicReference(initialGeneration); } - public String getJobId() { - return job.getId(); + public String getTransformId() { + return transform.getId(); } /** @@ -98,11 +98,11 @@ public Status getStatus() { return getState(); } - public DataFrameJobState getState() { - return new DataFrameJobState(indexer.getState(), indexer.getPosition(), generation.get()); + public DataFrameTransformState getState() { + return new DataFrameTransformState(indexer.getState(), indexer.getPosition(), generation.get()); } - public DataFrameIndexerJobStats getStats() { + public DataFrameIndexerTransformStats getStats() { return indexer.getStats(); } @@ -118,42 +118,42 @@ public synchronized void start(ActionListener listener) { final IndexerState prevState = indexer.getState(); if (prevState != IndexerState.STOPPED) { // fails if the task is not STOPPED - listener.onFailure(new ElasticsearchException("Cannot start task for data frame job [{}], because state was [{}]", - job.getId(), prevState)); + listener.onFailure(new ElasticsearchException("Cannot start task for data frame transform [{}], because state was [{}]", + transform.getId(), prevState)); return; } final IndexerState newState = indexer.start(); if (newState != IndexerState.STARTED) { - listener.onFailure(new ElasticsearchException("Cannot start task for data frame job [{}], because state was [{}]", - job.getId(), newState)); + listener.onFailure(new ElasticsearchException("Cannot start task for data frame transform [{}], because state was [{}]", + transform.getId(), newState)); return; } - final DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); + final DataFrameTransformState state = new DataFrameTransformState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); - logger.debug("Updating state for data frame job [{}] to [{}][{}]", job.getId(), state.getIndexerState(), + logger.debug("Updating state for data frame transform [{}] to [{}][{}]", transform.getId(), state.getIndexerState(), state.getPosition()); updatePersistentTaskState(state, ActionListener.wrap( (task) -> { - logger.debug("Successfully updated state for data frame job [" + job.getId() + "] to [" + logger.debug("Successfully updated state for data frame transform [" + transform.getId() + "] to [" + state.getIndexerState() + "][" + state.getPosition() + "]"); - listener.onResponse(new StartDataFrameJobAction.Response(true)); + listener.onResponse(new StartDataFrameTransformAction.Response(true)); }, (exc) -> { // We were unable to update the persistent status, so we need to shutdown the indexer too. indexer.stop(); - listener.onFailure(new ElasticsearchException("Error while updating state for data frame job [" - + job.getId() + "] to [" + state.getIndexerState() + "].", exc)); + listener.onFailure(new ElasticsearchException("Error while updating state for data frame transform [" + + transform.getId() + "] to [" + state.getIndexerState() + "].", exc)); }) ); } - public synchronized void stop(ActionListener listener) { + public synchronized void stop(ActionListener listener) { final IndexerState newState = indexer.stop(); switch (newState) { case STOPPED: - listener.onResponse(new StopDataFrameJobAction.Response(true)); + listener.onResponse(new StopDataFrameTransformAction.Response(true)); break; case STOPPING: @@ -162,41 +162,41 @@ public synchronized void stop(ActionListener li // position. // 2. we persist STOPPED now, indexer continues a bit but then dies. When/if we resume we'll pick up at last checkpoint, // overwrite some docs and eventually checkpoint. - DataFrameJobState state = new DataFrameJobState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); + DataFrameTransformState state = new DataFrameTransformState(IndexerState.STOPPED, indexer.getPosition(), generation.get()); updatePersistentTaskState(state, ActionListener.wrap((task) -> { - logger.debug("Successfully updated state for data frame job [{}] to [{}]", job.getId(), + logger.debug("Successfully updated state for data frame transform [{}] to [{}]", transform.getId(), state.getIndexerState()); - listener.onResponse(new StopDataFrameJobAction.Response(true)); + listener.onResponse(new StopDataFrameTransformAction.Response(true)); }, (exc) -> { - listener.onFailure(new ElasticsearchException("Error while updating state for data frame job [{}] to [{}]", exc, - job.getId(), state.getIndexerState())); + listener.onFailure(new ElasticsearchException("Error while updating state for data frame transform [{}] to [{}]", exc, + transform.getId(), state.getIndexerState())); })); break; default: - listener.onFailure(new ElasticsearchException("Cannot stop task for data frame job [{}], because state was [{}]", - job.getId(), newState)); + listener.onFailure(new ElasticsearchException("Cannot stop task for data frame transform [{}], because state was [{}]", + transform.getId(), newState)); break; } } @Override public synchronized void triggered(Event event) { - if (generation.get() == 0 && event.getJobName().equals(SCHEDULE_NAME + "_" + job.getId())) { + if (generation.get() == 0 && event.getJobName().equals(SCHEDULE_NAME + "_" + transform.getId())) { logger.debug("Data frame indexer [" + event.getJobName() + "] schedule has triggered, state: [" + indexer.getState() + "]"); indexer.maybeTriggerAsyncJob(System.currentTimeMillis()); } } /** - * Attempt to gracefully cleanup the data frame job so it can be terminated. + * Attempt to gracefully cleanup the data frame transform so it can be terminated. * This tries to remove the job from the scheduler, and potentially any other * cleanup operations in the future */ synchronized void shutdown() { try { - logger.info("Data frame indexer [" + job.getId() + "] received abort request, stopping indexer."); - schedulerEngine.remove(SCHEDULE_NAME + "_" + job.getId()); + logger.info("Data frame indexer [" + transform.getId() + "] received abort request, stopping indexer."); + schedulerEngine.remove(SCHEDULE_NAME + "_" + transform.getId()); schedulerEngine.unregister(this); } catch (Exception e) { markAsFailed(e); @@ -213,54 +213,56 @@ synchronized void shutdown() { @Override public synchronized void onCancelled() { logger.info( - "Received cancellation request for data frame job [" + job.getId() + "], state: [" + indexer.getState() + "]"); + "Received cancellation request for data frame transform [" + transform.getId() + "], state: [" + indexer.getState() + "]"); if (indexer.abort()) { - // there is no background job running, we can shutdown safely + // there is no background transform running, we can shutdown safely shutdown(); } } protected class ClientDataFrameIndexer extends DataFrameIndexer { - private static final int LOAD_JOB_TIMEOUT_IN_SECONDS = 30; + private static final int LOAD_TRANSFORM_TIMEOUT_IN_SECONDS = 30; private final Client client; - private final DataFrameJobConfigManager jobConfigManager; - private final String jobId; + private final DataFrameTransformsConfigManager transformsConfigManager; + private final String transformId; - private DataFrameJobConfig jobConfig = null; + private DataFrameTransformConfig transformConfig = null; - public ClientDataFrameIndexer(String jobId, DataFrameJobConfigManager jobConfigManager, AtomicReference initialState, - Map initialPosition, Client client) { + public ClientDataFrameIndexer(String transformId, DataFrameTransformsConfigManager transformsConfigManager, + AtomicReference initialState, Map initialPosition, Client client) { super(threadPool.executor(ThreadPool.Names.GENERIC), initialState, initialPosition); - this.jobId = jobId; - this.jobConfigManager = jobConfigManager; + this.transformId = transformId; + this.transformsConfigManager = transformsConfigManager; this.client = client; } @Override - protected DataFrameJobConfig getConfig() { - return jobConfig; + protected DataFrameTransformConfig getConfig() { + return transformConfig; } @Override protected String getJobId() { - return jobId; + return transformId; } @Override public synchronized boolean maybeTriggerAsyncJob(long now) { - if (jobConfig == null) { + if (transformConfig == null) { CountDownLatch latch = new CountDownLatch(1); - jobConfigManager.getJobConfiguration(jobId, new LatchedActionListener<>(ActionListener.wrap(config -> { - jobConfig = config; + transformsConfigManager.getTransformConfiguration(transformId, new LatchedActionListener<>(ActionListener.wrap(config -> { + transformConfig = config; }, e -> { - throw new RuntimeException(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_JOB_CONFIGURATION, jobId), e); + throw new RuntimeException( + DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_TRANSFORM_CONFIGURATION, transformId), e); }), latch)); try { - latch.await(LOAD_JOB_TIMEOUT_IN_SECONDS, TimeUnit.SECONDS); + latch.await(LOAD_TRANSFORM_TIMEOUT_IN_SECONDS, TimeUnit.SECONDS); } catch (InterruptedException e) { - throw new RuntimeException(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_JOB_CONFIGURATION, jobId), e); + throw new RuntimeException( + DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_TRANSFORM_CONFIGURATION, transformId), e); } } return super.maybeTriggerAsyncJob(now); @@ -268,14 +270,14 @@ public synchronized boolean maybeTriggerAsyncJob(long now) { @Override protected void doNextSearch(SearchRequest request, ActionListener nextPhase) { - ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, SearchAction.INSTANCE, request, - nextPhase); + ClientHelper.executeWithHeadersAsync(transform.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, SearchAction.INSTANCE, + request, nextPhase); } @Override protected void doNextBulk(BulkRequest request, ActionListener nextPhase) { - ClientHelper.executeWithHeadersAsync(job.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, BulkAction.INSTANCE, request, - nextPhase); + ClientHelper.executeWithHeadersAsync(transform.getHeaders(), ClientHelper.DATA_FRAME_ORIGIN, client, BulkAction.INSTANCE, + request, nextPhase); } @Override @@ -291,28 +293,28 @@ protected void doSaveState(IndexerState indexerState, Map positi generation.compareAndSet(0L, 1L); } - final DataFrameJobState state = new DataFrameJobState(indexerState, getPosition(), generation.get()); - logger.info("Updating persistent state of job [" + job.getId() + "] to [" + state.toString() + "]"); + final DataFrameTransformState state = new DataFrameTransformState(indexerState, getPosition(), generation.get()); + logger.info("Updating persistent state of transform [" + transform.getId() + "] to [" + state.toString() + "]"); updatePersistentTaskState(state, ActionListener.wrap(task -> next.run(), exc -> { - logger.error("Updating persistent state of job [" + job.getId() + "] failed", exc); + logger.error("Updating persistent state of transform [" + transform.getId() + "] failed", exc); next.run(); })); } @Override protected void onFailure(Exception exc) { - logger.warn("Data frame job [" + job.getId() + "] failed with an exception: ", exc); + logger.warn("Data frame transform [" + transform.getId() + "] failed with an exception: ", exc); } @Override protected void onFinish() { - logger.info("Finished indexing for data frame job [" + job.getId() + "]"); + logger.info("Finished indexing for data frame transform [" + transform.getId() + "]"); } @Override protected void onAbort() { - logger.info("Data frame job [" + job.getId() + "] received abort request, stopping indexer"); + logger.info("Data frame transform [" + transform.getId() + "] received abort request, stopping indexer"); shutdown(); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/SourceConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/SourceConfig.java similarity index 98% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/SourceConfig.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/SourceConfig.java index 0c976dceda783..d420579c1b237 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/job/SourceConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/SourceConfig.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java index b7ebfbeff5ee0..4dd667f04c84f 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/DataFrameFeatureSetTests.java @@ -19,11 +19,11 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; -import org.elasticsearch.xpack.dataframe.action.DataFrameJobStateAndStats; -import org.elasticsearch.xpack.dataframe.action.DataFrameJobStateAndStatsTests; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Response; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; +import org.elasticsearch.xpack.dataframe.action.DataFrameTransformStateAndStats; +import org.elasticsearch.xpack.dataframe.action.DataFrameTransformStateAndStatsTests; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction.Response; import org.junit.Before; import java.io.IOException; @@ -75,19 +75,19 @@ public void testUsage() throws InterruptedException, ExecutionException, IOExcep DataFrameFeatureSet featureSet = new DataFrameFeatureSet(Settings.EMPTY, client, licenseState); - List jobsStateAndStats = new ArrayList<>(); + List transformsStateAndStats = new ArrayList<>(); for (int i = 0; i < randomIntBetween(0, 10); ++i) { - jobsStateAndStats.add(DataFrameJobStateAndStatsTests.randomDataFrameJobStateAndStats()); + transformsStateAndStats.add(DataFrameTransformStateAndStatsTests.randomDataFrameTransformStateAndStats()); } - GetDataFrameJobsStatsAction.Response mockResponse = new GetDataFrameJobsStatsAction.Response(jobsStateAndStats); + GetDataFrameTransformsStatsAction.Response mockResponse = new GetDataFrameTransformsStatsAction.Response(transformsStateAndStats); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(mockResponse); return Void.TYPE; - }).when(client).execute(same(GetDataFrameJobsStatsAction.INSTANCE), any(), any()); + }).when(client).execute(same(GetDataFrameTransformsStatsAction.INSTANCE), any(), any()); PlainActionFuture future = new PlainActionFuture<>(); featureSet.usage(future); @@ -101,20 +101,20 @@ public void testUsage() throws InterruptedException, ExecutionException, IOExcep Map usageAsMap = parser.map(); assertTrue((boolean) XContentMapValues.extractValue("available", usageAsMap)); - if (jobsStateAndStats.isEmpty()) { - // no jobs, no stats - assertEquals(null, XContentMapValues.extractValue("jobs", usageAsMap)); + if (transformsStateAndStats.isEmpty()) { + // no transforms, no stats + assertEquals(null, XContentMapValues.extractValue("transforms", usageAsMap)); assertEquals(null, XContentMapValues.extractValue("stats", usageAsMap)); } else { - assertEquals(jobsStateAndStats.size(), XContentMapValues.extractValue("jobs._all", usageAsMap)); + assertEquals(transformsStateAndStats.size(), XContentMapValues.extractValue("transforms._all", usageAsMap)); Map stateCounts = new HashMap<>(); - jobsStateAndStats.stream().map(x -> x.getJobState().getIndexerState().value()) + transformsStateAndStats.stream().map(x -> x.getTransformState().getIndexerState().value()) .forEach(x -> stateCounts.merge(x, 1, Integer::sum)); - stateCounts.forEach((k, v) -> assertEquals(v, XContentMapValues.extractValue("jobs." + k, usageAsMap))); + stateCounts.forEach((k, v) -> assertEquals(v, XContentMapValues.extractValue("transforms." + k, usageAsMap))); - DataFrameIndexerJobStats combinedStats = jobsStateAndStats.stream().map(x -> x.getJobStats()).reduce((l, r) -> l.merge(r)) - .get(); + DataFrameIndexerTransformStats combinedStats = transformsStateAndStats.stream().map(x -> x.getTransformStats()) + .reduce((l, r) -> l.merge(r)).get(); assertEquals(toIntExact(combinedStats.getIndexFailures()), XContentMapValues.extractValue("stats.index_failures", usageAsMap)); @@ -144,8 +144,8 @@ public void testUsageDisabled() throws IOException, InterruptedException, Execut Map usageAsMap = parser.map(); assertTrue((boolean) XContentMapValues.extractValue("available", usageAsMap)); assertFalse((boolean) XContentMapValues.extractValue("enabled", usageAsMap)); - // not enabled -> no jobs, no stats - assertEquals(null, XContentMapValues.extractValue("jobs", usageAsMap)); + // not enabled -> no transforms, no stats + assertEquals(null, XContentMapValues.extractValue("transforms", usageAsMap)); assertEquals(null, XContentMapValues.extractValue("stats", usageAsMap)); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java deleted file mode 100644 index ff615519ac2c9..0000000000000 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameJobStateAndStatsTests.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.dataframe.action; - -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStatsTests; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameJobStateTests; -import org.elasticsearch.xpack.dataframe.job.AbstractSerializingDataFrameTestCase; - -import java.io.IOException; - -public class DataFrameJobStateAndStatsTests extends AbstractSerializingDataFrameTestCase { - - public static DataFrameJobStateAndStats randomDataFrameJobStateAndStats() { - return new DataFrameJobStateAndStats(randomAlphaOfLengthBetween(1, 10), - DataFrameJobStateTests.randomDataFrameJobState(), - DataFrameIndexerJobStatsTests.randomStats()); - } - - @Override - protected DataFrameJobStateAndStats doParseInstance(XContentParser parser) throws IOException { - return DataFrameJobStateAndStats.PARSER.apply(parser, null); - } - - @Override - protected DataFrameJobStateAndStats createTestInstance() { - return randomDataFrameJobStateAndStats(); - } - - @Override - protected Reader instanceReader() { - return DataFrameJobStateAndStats::new; - } - -} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java new file mode 100644 index 0000000000000..12beb8b86c2b8 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStatsTests; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameTransformStateTests; +import org.elasticsearch.xpack.dataframe.transform.AbstractSerializingDataFrameTestCase; + +import java.io.IOException; + +public class DataFrameTransformStateAndStatsTests extends AbstractSerializingDataFrameTestCase { + + public static DataFrameTransformStateAndStats randomDataFrameTransformStateAndStats() { + return new DataFrameTransformStateAndStats(randomAlphaOfLengthBetween(1, 10), + DataFrameTransformStateTests.randomDataFrameTransformState(), + DataFrameIndexerTransformStatsTests.randomStats()); + } + + @Override + protected DataFrameTransformStateAndStats doParseInstance(XContentParser parser) throws IOException { + return DataFrameTransformStateAndStats.PARSER.apply(parser, null); + } + + @Override + protected DataFrameTransformStateAndStats createTestInstance() { + return randomDataFrameTransformStateAndStats(); + } + + @Override + protected Reader instanceReader() { + return DataFrameTransformStateAndStats::new; + } + +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameTransformActionRequestTests.java similarity index 76% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameTransformActionRequestTests.java index 063cb6c0bc675..d9825cfd5fb3b 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameJobActionTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameTransformActionRequestTests.java @@ -1,15 +1,16 @@ -package org.elasticsearch.xpack.dataframe.action; /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ +package org.elasticsearch.xpack.dataframe.action; + import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.dataframe.action.StartDataFrameJobAction.Request; +import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameTransformAction.Request; -public class StartDataFrameJobActionTests extends AbstractWireSerializingTestCase { +public class DeleteDataFrameTransformActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { return new Request(randomAlphaOfLengthBetween(1, 20)); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsActionRequestTests.java similarity index 80% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsActionRequestTests.java index af6a58f42f050..87ec615e6c169 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsActionRequestTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsAction.Request; -public class GetDataFrameJobsActionRequestTests extends AbstractWireSerializingTestCase { +public class GetDataFrameTransformsActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsStatsActionRequestTests.java similarity index 79% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsStatsActionRequestTests.java index 5f21648befa22..0751a8fd6cda5 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameJobsStatsActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsStatsActionRequestTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.dataframe.action.GetDataFrameJobsStatsAction.Request; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsStatsAction.Request; -public class GetDataFrameJobsStatsActionRequestTests extends AbstractWireSerializingTestCase { +public class GetDataFrameTransformsStatsActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { if (randomBoolean()) { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformActionRequestTests.java similarity index 75% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformActionRequestTests.java index 80be5cc77f63c..e4313fc1cf7e7 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformActionRequestTests.java @@ -12,18 +12,18 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.xpack.dataframe.action.PutDataFrameJobAction.Request; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfigTests; +import org.elasticsearch.xpack.dataframe.action.PutDataFrameTransformAction.Request; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfigTests; import org.junit.Before; import java.io.IOException; import static java.util.Collections.emptyList; -public class PutDataFrameJobActionRequestTests extends AbstractStreamableXContentTestCase { +public class PutDataFrameTransformActionRequestTests extends AbstractStreamableXContentTestCase { - private String jobId; + private String transformId; private NamedWriteableRegistry namedWriteableRegistry; private NamedXContentRegistry namedXContentRegistry; @@ -47,13 +47,13 @@ protected NamedXContentRegistry xContentRegistry() { } @Before - public void setupJobID() { - jobId = randomAlphaOfLengthBetween(1, 10); + public void setupTransformId() { + transformId = randomAlphaOfLengthBetween(1, 10); } @Override protected Request doParseInstance(XContentParser parser) throws IOException { - return Request.fromXContent(parser, jobId); + return Request.fromXContent(parser, transformId); } @Override @@ -68,7 +68,7 @@ protected boolean supportsUnknownFields() { @Override protected Request createTestInstance() { - DataFrameJobConfig config = DataFrameJobConfigTests.randomDataFrameJobConfig(); + DataFrameTransformConfig config = DataFrameTransformConfigTests.randomDataFrameTransformConfig(); return new Request(config); } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameTransformActionTests.java similarity index 77% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameTransformActionTests.java index c38ffce4243b2..9ac38fe2d6f81 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DeleteDataFrameJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StartDataFrameTransformActionTests.java @@ -1,16 +1,15 @@ +package org.elasticsearch.xpack.dataframe.action; /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.action; - import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.dataframe.action.DeleteDataFrameJobAction.Request; +import org.elasticsearch.xpack.dataframe.action.StartDataFrameTransformAction.Request; -public class DeleteDataFrameJobActionRequestTests extends AbstractWireSerializingTestCase { +public class StartDataFrameTransformActionTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { return new Request(randomAlphaOfLengthBetween(1, 20)); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameTransformActionRequestTests.java similarity index 86% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameTransformActionRequestTests.java index d5a292fe71aac..09e835ef8b7bc 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameJobActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/StopDataFrameTransformActionRequestTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.dataframe.action.StopDataFrameJobAction.Request; +import org.elasticsearch.xpack.dataframe.action.StopDataFrameTransformAction.Request; -public class StopDataFrameJobActionRequestTests extends AbstractWireSerializingTestCase { +public class StopDataFrameTransformActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java deleted file mode 100644 index 0c9deb1ccc6c9..0000000000000 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/DataFrameJobConfigTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.dataframe.job; - -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.xcontent.XContentParser; -import org.junit.Before; - -import java.io.IOException; - -public class DataFrameJobConfigTests extends AbstractSerializingDataFrameTestCase { - - private String jobId; - - public static DataFrameJobConfig randomDataFrameJobConfig() { - return new DataFrameJobConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), - randomAlphaOfLengthBetween(1, 10), SourceConfigTests.randomSourceConfig(), - AggregationConfigTests.randomAggregationConfig()); - } - - @Before - public void setUpOptionalId() { - jobId = randomAlphaOfLengthBetween(1, 10); - } - - @Override - protected DataFrameJobConfig doParseInstance(XContentParser parser) throws IOException { - if (randomBoolean()) { - return DataFrameJobConfig.fromXContent(parser, jobId); - } else { - return DataFrameJobConfig.fromXContent(parser, null); - } - } - - @Override - protected DataFrameJobConfig createTestInstance() { - return randomDataFrameJobConfig(); - } - - @Override - protected Reader instanceReader() { - return DataFrameJobConfig::new; - } -} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java deleted file mode 100644 index 829efa494f470..0000000000000 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameJobConfigManagerTests.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.dataframe.persistence; - -import org.elasticsearch.ResourceAlreadyExistsException; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfigTests; -import org.junit.Before; - -public class DataFrameJobConfigManagerTests extends DataFrameSingleNodeTestCase { - - private DataFrameJobConfigManager jobConfigManager; - - @Before - public void createComponents() { - jobConfigManager = new DataFrameJobConfigManager(client(), xContentRegistry()); - } - - public void testGetMissingJob() throws InterruptedException { - // the index does not exist yet - assertAsync(listener -> jobConfigManager.getJobConfiguration("not_there", listener), (DataFrameJobConfig) null, null, e -> { - assertEquals(ResourceNotFoundException.class, e.getClass()); - assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); - }); - - // create one job and test with an existing index - assertAsync(listener -> jobConfigManager.putJobConfiguration(DataFrameJobConfigTests.randomDataFrameJobConfig(), listener), true, - null, null); - - // same test, but different code path - assertAsync(listener -> jobConfigManager.getJobConfiguration("not_there", listener), (DataFrameJobConfig) null, null, e -> { - assertEquals(ResourceNotFoundException.class, e.getClass()); - assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); - }); - } - - public void testDeleteMissingJob() throws InterruptedException { - // the index does not exist yet - assertAsync(listener -> jobConfigManager.deleteJobConfiguration("not_there", listener), (Boolean) null, null, e -> { - assertEquals(ResourceNotFoundException.class, e.getClass()); - assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); - }); - - // create one job and test with an existing index - assertAsync(listener -> jobConfigManager.putJobConfiguration(DataFrameJobConfigTests.randomDataFrameJobConfig(), listener), true, - null, null); - - // same test, but different code path - assertAsync(listener -> jobConfigManager.deleteJobConfiguration("not_there", listener), (Boolean) null, null, e -> { - assertEquals(ResourceNotFoundException.class, e.getClass()); - assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, "not_there"), e.getMessage()); - }); - } - - public void testCreateReadDelete() throws InterruptedException { - DataFrameJobConfig jobConfig = DataFrameJobConfigTests.randomDataFrameJobConfig(); - - // create job - assertAsync(listener -> jobConfigManager.putJobConfiguration(jobConfig, listener), true, null, null); - - // read job - assertAsync(listener -> jobConfigManager.getJobConfiguration(jobConfig.getId(), listener), jobConfig, null, null); - - // try to create again - assertAsync(listener -> jobConfigManager.putJobConfiguration(jobConfig, listener), (Boolean) null, null, e -> { - assertEquals(ResourceAlreadyExistsException.class, e.getClass()); - assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_JOB_EXISTS, jobConfig.getId()), e.getMessage()); - }); - - // delete job - assertAsync(listener -> jobConfigManager.deleteJobConfiguration(jobConfig.getId(), listener), true, null, null); - - // delete again - assertAsync(listener -> jobConfigManager.deleteJobConfiguration(jobConfig.getId(), listener), (Boolean) null, null, e -> { - assertEquals(ResourceNotFoundException.class, e.getClass()); - assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobConfig.getId()), e.getMessage()); - }); - - // try to get deleted job - assertAsync(listener -> jobConfigManager.getJobConfiguration(jobConfig.getId(), listener), (DataFrameJobConfig) null, null, e -> { - assertEquals(ResourceNotFoundException.class, e.getClass()); - assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_JOB, jobConfig.getId()), e.getMessage()); - }); - } -} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java new file mode 100644 index 0000000000000..e7892d0d8ee14 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.persistence; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfigTests; +import org.junit.Before; + +public class DataFrameTransformsConfigManagerTests extends DataFrameSingleNodeTestCase { + + private DataFrameTransformsConfigManager transformsConfigManager; + + @Before + public void createComponents() { + transformsConfigManager = new DataFrameTransformsConfigManager(client(), xContentRegistry()); + } + + public void testGetMissingTransform() throws InterruptedException { + // the index does not exist yet + assertAsync(listener -> transformsConfigManager.getTransformConfiguration("not_there", listener), (DataFrameTransformConfig) null, + null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, "not_there"), + e.getMessage()); + }); + + // create one transform and test with an existing index + assertAsync( + listener -> transformsConfigManager + .putTransformConfiguration(DataFrameTransformConfigTests.randomDataFrameTransformConfig(), listener), + true, null, null); + + // same test, but different code path + assertAsync(listener -> transformsConfigManager.getTransformConfiguration("not_there", listener), (DataFrameTransformConfig) null, + null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, "not_there"), + e.getMessage()); + }); + } + + public void testDeleteMissingTransform() throws InterruptedException { + // the index does not exist yet + assertAsync(listener -> transformsConfigManager.deleteTransformConfiguration("not_there", listener), (Boolean) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, "not_there"), e.getMessage()); + }); + + // create one transform and test with an existing index + assertAsync( + listener -> transformsConfigManager + .putTransformConfiguration(DataFrameTransformConfigTests.randomDataFrameTransformConfig(), listener), + true, null, null); + + // same test, but different code path + assertAsync(listener -> transformsConfigManager.deleteTransformConfiguration("not_there", listener), (Boolean) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, "not_there"), e.getMessage()); + }); + } + + public void testCreateReadDelete() throws InterruptedException { + DataFrameTransformConfig transformConfig = DataFrameTransformConfigTests.randomDataFrameTransformConfig(); + + // create transform + assertAsync(listener -> transformsConfigManager.putTransformConfiguration(transformConfig, listener), true, null, null); + + // read transform + assertAsync(listener -> transformsConfigManager.getTransformConfiguration(transformConfig.getId(), listener), transformConfig, null, + null); + + // try to create again + assertAsync(listener -> transformsConfigManager.putTransformConfiguration(transformConfig, listener), (Boolean) null, null, e -> { + assertEquals(ResourceAlreadyExistsException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_PUT_DATA_FRAME_TRANSFORM_EXISTS, transformConfig.getId()), + e.getMessage()); + }); + + // delete transform + assertAsync(listener -> transformsConfigManager.deleteTransformConfiguration(transformConfig.getId(), listener), true, null, null); + + // delete again + assertAsync(listener -> transformsConfigManager.deleteTransformConfiguration(transformConfig.getId(), listener), (Boolean) null, + null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, transformConfig.getId()), + e.getMessage()); + }); + + // try to get deleted transform + assertAsync(listener -> transformsConfigManager.getTransformConfiguration(transformConfig.getId(), listener), + (DataFrameTransformConfig) null, null, e -> { + assertEquals(ResourceNotFoundException.class, e.getClass()); + assertEquals(DataFrameMessages.getMessage(DataFrameMessages.REST_DATA_FRAME_UNKNOWN_TRANSFORM, transformConfig.getId()), + e.getMessage()); + }); + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/TransformValidatorTests.java similarity index 83% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/TransformValidatorTests.java index a9735f68bb99c..02171bbf387b3 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/JobValidatorTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/TransformValidatorTests.java @@ -27,9 +27,9 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; -import org.elasticsearch.xpack.dataframe.job.AggregationConfig; -import org.elasticsearch.xpack.dataframe.job.DataFrameJobConfig; -import org.elasticsearch.xpack.dataframe.job.SourceConfig; +import org.elasticsearch.xpack.dataframe.transform.AggregationConfig; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transform.SourceConfig; import org.junit.After; import org.junit.Before; @@ -43,7 +43,7 @@ import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.equalTo; -public class JobValidatorTests extends ESTestCase { +public class TransformValidatorTests extends ESTestCase { private NamedXContentRegistry namedXContentRegistry; private Client client; @@ -79,32 +79,32 @@ public void testValidateExistingIndex() throws Exception { SourceConfig sourceConfig = getValidSourceConfig(); AggregationConfig aggregationConfig = getValidAggregationConfig(); - DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source_index", "non_existing_dest", + DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source_index", "non_existing_dest", sourceConfig, aggregationConfig); - assertValidJob(client, config); + assertValidTransform(client, config); } public void testValidateNonExistingIndex() throws Exception { SourceConfig sourceConfig = getValidSourceConfig(); AggregationConfig aggregationConfig = getValidAggregationConfig(); - DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "non_existing_source_index", + DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "non_existing_source_index", "non_existing_dest", sourceConfig, aggregationConfig); - assertInvalidJob(client, config); + assertInvalidTransform(client, config); } public void testSearchFailure() throws Exception { SourceConfig sourceConfig = getValidSourceConfig(); AggregationConfig aggregationConfig = getValidAggregationConfig(); - // test a failure during the search operation, job creation fails if + // test a failure during the search operation, transform creation fails if // search has failures although they might just be temporary - DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source_index_with_failing_shards", + DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source_index_with_failing_shards", "non_existing_dest", sourceConfig, aggregationConfig); - assertInvalidJob(client, config); + assertInvalidTransform(client, config); } public void testValidateAllSupportedAggregations() throws Exception { @@ -113,10 +113,10 @@ public void testValidateAllSupportedAggregations() throws Exception { for (String agg : supportedAggregations) { AggregationConfig aggregationConfig = getAggregationConfig(agg); - DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source", "non_existing_dest", + DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source", "non_existing_dest", sourceConfig, aggregationConfig); - assertValidJob(client, config); + assertValidTransform(client, config); } } @@ -126,10 +126,10 @@ public void testValidateAllUnsupportedAggregations() throws Exception { for (String agg : unsupportedAggregations) { AggregationConfig aggregationConfig = getAggregationConfig(agg); - DataFrameJobConfig config = new DataFrameJobConfig(getTestName(), "existing_source", "non_existing_dest", + DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source", "non_existing_dest", sourceConfig, aggregationConfig); - assertInvalidJob(client, config); + assertInvalidTransform(client, config); } } @@ -201,16 +201,16 @@ private AggregationConfig parseAggregations(String json) throws IOException { return AggregationConfig.fromXContent(parser); } - private static void assertValidJob(Client client, DataFrameJobConfig config) throws Exception { + private static void assertValidTransform(Client client, DataFrameTransformConfig config) throws Exception { validate(client, config, true); } - private static void assertInvalidJob(Client client, DataFrameJobConfig config) throws Exception { + private static void assertInvalidTransform(Client client, DataFrameTransformConfig config) throws Exception { validate(client, config, false); } - private static void validate(Client client, DataFrameJobConfig config, boolean expectValid) throws Exception { - JobValidator validator = new JobValidator(config, client); + private static void validate(Client client, DataFrameTransformConfig config, boolean expectValid) throws Exception { + TransformValidator validator = new TransformValidator(config, client); CountDownLatch latch = new CountDownLatch(1); final AtomicReference exceptionHolder = new AtomicReference<>(); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AbstractSerializingDataFrameTestCase.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AbstractSerializingDataFrameTestCase.java similarity index 97% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AbstractSerializingDataFrameTestCase.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AbstractSerializingDataFrameTestCase.java index 55253113b3cc9..5aafc631cf529 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AbstractSerializingDataFrameTestCase.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AbstractSerializingDataFrameTestCase.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfigTests.java similarity index 95% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationConfigTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfigTests.java index 510b42990be52..b1fe1f9abdfbd 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfigTests.java @@ -4,13 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.xpack.dataframe.transform.AggregationConfig; import java.io.IOException; import java.util.HashSet; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtilsTests.java similarity index 98% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtilsTests.java index 0e69197691e01..8038bb719f203 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/AggregationResultUtilsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtilsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ContextParser; @@ -45,7 +45,8 @@ import org.elasticsearch.search.aggregations.pipeline.ParsedStatsBucket; import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.dataframe.job.DataFrameIndexerJobStats; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; +import org.elasticsearch.xpack.dataframe.transform.AggregationResultUtils; import java.io.IOException; import java.util.Collection; @@ -283,7 +284,7 @@ aggTypedName2, asMap( private void executeTest(List> sources, Collection aggregationBuilders, Map input, List> expected, long expectedDocCounts) throws IOException { - DataFrameIndexerJobStats stats = new DataFrameIndexerJobStats(); + DataFrameIndexerTransformStats stats = new DataFrameIndexerTransformStats(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.map(input); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfigTests.java new file mode 100644 index 0000000000000..7b2621064ff6b --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfigTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transform; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.junit.Before; + +import java.io.IOException; + +public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameTestCase { + + private String transformId; + + public static DataFrameTransformConfig randomDataFrameTransformConfig() { + return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10), SourceConfigTests.randomSourceConfig(), + AggregationConfigTests.randomAggregationConfig()); + } + + @Before + public void setUpOptionalId() { + transformId = randomAlphaOfLengthBetween(1, 10); + } + + @Override + protected DataFrameTransformConfig doParseInstance(XContentParser parser) throws IOException { + if (randomBoolean()) { + return DataFrameTransformConfig.fromXContent(parser, transformId); + } else { + return DataFrameTransformConfig.fromXContent(parser, null); + } + } + + @Override + protected DataFrameTransformConfig createTestInstance() { + return randomDataFrameTransformConfig(); + } + + @Override + protected Reader instanceReader() { + return DataFrameTransformConfig::new; + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/SourceConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/SourceConfigTests.java similarity index 94% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/SourceConfigTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/SourceConfigTests.java index 3debbb04eb70b..c2a5a270b78f5 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/job/SourceConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/SourceConfigTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.job; +package org.elasticsearch.xpack.dataframe.transform; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; @@ -12,6 +12,7 @@ import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.xpack.dataframe.transform.SourceConfig; import java.io.IOException; import java.util.ArrayList; From ec6684c8476b916fcbfe1dc1d1d4bfa278664da4 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Mon, 21 Jan 2019 21:25:10 +0100 Subject: [PATCH 39/49] fix merge conflict --- .../java/org/elasticsearch/xpack/core/XPackClientPlugin.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 18d6d9c0bf362..0fc75200b9918 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -432,7 +432,7 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, FreezeAction.NAME, FreezeAction::new), new NamedWriteableRegistry.Entry(LifecycleAction.class, SetPriorityAction.NAME, SetPriorityAction::new), - new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new) + new NamedWriteableRegistry.Entry(LifecycleAction.class, UnfollowAction.NAME, UnfollowAction::new), // Data Frame new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_FRAME, DataFrameFeatureSetUsage::new)); } From fac99c70f0abd6ee28a0e8e2412eaf35150de013 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Wed, 23 Jan 2019 21:37:35 +0100 Subject: [PATCH 40/49] [ML-DataFrame] implement new data frame transforms configuration (#37606) implement new data frame transforms configuration --- .../core/dataframe/DataFrameMessages.java | 4 + .../DataFrameConfigurationIndexIT.java | 2 +- .../integration/DataFrameRestTestCase.java | 20 +- .../xpack/dataframe/DataFrame.java | 4 +- .../action/GetDataFrameTransformsAction.java | 2 +- .../action/PutDataFrameTransformAction.java | 6 +- ...ansportDeleteDataFrameTransformAction.java | 2 +- ...TransportGetDataFrameTransformsAction.java | 4 +- ...portGetDataFrameTransformsStatsAction.java | 2 +- .../TransportPutDataFrameTransformAction.java | 16 +- ...ransportStartDataFrameTransformAction.java | 2 +- ...TransportStopDataFrameTransformAction.java | 2 +- .../DataFrameTransformsConfigManager.java | 4 +- .../dataframe/persistence/DataframeIndex.java | 6 +- .../transform/DataFrameTransformConfig.java | 181 ------------------ .../dataframe/transform/SourceConfig.java | 112 ----------- .../DataFrameIndexer.java | 47 +---- .../DataFrameTransform.java | 2 +- .../transforms/DataFrameTransformConfig.java | 169 ++++++++++++++++ ...FrameTransformPersistentTasksExecutor.java | 2 +- .../DataFrameTransformTask.java | 2 +- .../pivot}/AggregationConfig.java | 5 +- .../pivot}/AggregationResultUtils.java | 10 +- .../pivot}/Aggregations.java | 2 +- .../transforms/pivot/GroupConfig.java | 138 +++++++++++++ .../dataframe/transforms/pivot/Pivot.java | 129 +++++++++++++ .../transforms/pivot/PivotConfig.java | 122 ++++++++++++ .../pivot/SchemaUtil.java} | 105 ++-------- .../transforms/pivot/SingleGroupSource.java | 113 +++++++++++ .../transforms/pivot/TermsGroupSource.java | 45 +++++ .../DataFrameTransformStateAndStatsTests.java | 2 +- ...tDataFrameTransformActionRequestTests.java | 4 +- ...DataFrameTransformsConfigManagerTests.java | 4 +- .../transform/SourceConfigTests.java | 60 ------ .../AbstractSerializingDataFrameTestCase.java | 2 +- .../DataFrameTransformConfigTests.java | 11 +- .../pivot}/AggregationConfigTests.java | 4 +- .../pivot}/AggregationResultUtilsTests.java | 25 ++- .../pivot}/AggregationsTests.java | 2 +- .../transforms/pivot/GroupConfigTests.java | 38 ++++ .../transforms/pivot/PivotConfigTests.java | 43 +++++ .../pivot/PivotTests.java} | 82 ++++---- .../pivot/TermsGroupSourceTests.java | 38 ++++ 43 files changed, 977 insertions(+), 598 deletions(-) delete mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfig.java delete mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/SourceConfig.java rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{transform => transforms}/DataFrameIndexer.java (63%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{transform => transforms}/DataFrameTransform.java (98%) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{transform => transforms}/DataFrameTransformPersistentTasksExecutor.java (98%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{transform => transforms}/DataFrameTransformTask.java (99%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{transform => transforms/pivot}/AggregationConfig.java (94%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{transform => transforms/pivot}/AggregationResultUtils.java (87%) rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{support => transforms/pivot}/Aggregations.java (96%) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java rename x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/{support/TransformValidator.java => transforms/pivot/SchemaUtil.java} (53%) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java delete mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/SourceConfigTests.java rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{transform => transforms}/AbstractSerializingDataFrameTestCase.java (96%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{transform => transforms}/DataFrameTransformConfigTests.java (82%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{transform => transforms/pivot}/AggregationConfigTests.java (94%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{transform => transforms/pivot}/AggregationResultUtilsTests.java (92%) rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{support => transforms/pivot}/AggregationsTests.java (93%) create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java rename x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/{support/TransformValidatorTests.java => transforms/pivot/PivotTests.java} (67%) create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSourceTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java index 8c47be2b579cd..e539143449110 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -31,6 +31,10 @@ public class DataFrameMessages { "Failed to load data frame transform configuration for transform [{0}]"; public static final String FAILED_TO_PARSE_TRANSFORM_CONFIGURATION = "Failed to parse transform configuration for data frame transform [{0}]"; + public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_NO_TRANSFORM = + "Data frame transform configuration must specify exactly 1 function"; + public static final String DATA_FRAME_TRANSFORM_PIVOT_FAILED_TO_CREATE_COMPOSITE_AGGREGATION = + "Failed to create composite aggregation from pivot function"; private DataFrameMessages() { } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java index 780350f2f5b92..439aa3098908c 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameConfigurationIndexIT.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.dataframe.persistence.DataFrameInternalIndex; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; import java.io.IOException; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index a4853289ff11c..51bead14b5bfd 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -104,19 +104,19 @@ protected void createReviewsIndex() throws IOException { protected void createPivotReviewsTransform(String transformId, String dataFrameIndex) throws IOException { final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); createDataframeTransformRequest.setJsonEntity("{" - + " \"index_pattern\": \"reviews\"," - + " \"destination_index\": \"" + dataFrameIndex + "\"," - + " \"sources\": {" - + " \"sources\": [ {" + + " \"source\": \"reviews\"," + + " \"dest\": \"" + dataFrameIndex + "\"," + + " \"pivot\": {" + + " \"group_by\": [ {" + " \"reviewer\": {" + " \"terms\": {" + " \"field\": \"user_id\"" - + " } } } ] }," - + " \"aggregations\": {" - + " \"avg_rating\": {" - + " \"avg\": {" - + " \"field\": \"stars\"" - + " } } }" + + " } } } ]," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } } }" + "}"); Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java index 0a055c00d8098..4ef39d630f06c 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/DataFrame.java @@ -67,8 +67,8 @@ import org.elasticsearch.xpack.dataframe.rest.action.RestPutDataFrameTransformAction; import org.elasticsearch.xpack.dataframe.rest.action.RestStartDataFrameTransformAction; import org.elasticsearch.xpack.dataframe.rest.action.RestStopDataFrameTransformAction; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransform; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformPersistentTasksExecutor; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransform; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformPersistentTasksExecutor; import java.io.IOException; import java.time.Clock; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java index ce4935fdc6a3f..7ec993680084b 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; import java.io.IOException; import java.util.Collections; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformAction.java index 89cc6a4c7ef99..71546111ff487 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformAction.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; import java.io.IOException; import java.util.Objects; @@ -30,7 +30,7 @@ public class PutDataFrameTransformAction extends Action { @@ -86,12 +87,13 @@ protected void masterOperation(Request request, ClusterState clusterState, Actio return; } - // create the transform, note the non-state creating steps are done first, so we minimize the chance to end up with orphaned state - // transform validation - TransformValidator transformValidator = new TransformValidator(request.getConfig(), client); - transformValidator.validate(ActionListener.wrap(validationResult -> { + // create the transform, for now we only have pivot and no support for custom queries + Pivot pivot = new Pivot(request.getConfig().getSource(), new MatchAllQueryBuilder(), request.getConfig().getPivotConfig()); + + // the non-state creating steps are done first, so we minimize the chance to end up with orphaned state transform validation + pivot.validate(client, ActionListener.wrap(validationResult -> { // deduce target mappings - transformValidator.deduceMappings(ActionListener.wrap(mappings -> { + pivot.deduceMappings(client, ActionListener.wrap(mappings -> { // create the destination index DataframeIndex.createDestinationIndex(client, request.getConfig(), mappings, ActionListener.wrap(createIndexResult -> { DataFrameTransform transform = createDataFrameTransform(transformId, threadPool); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java index 35576880eb330..199c8bf7ffdaa 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStartDataFrameTransformAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformTask; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformTask; import java.util.List; import java.util.function.Consumer; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java index c7142ecd9ddae..e35a3aec1521d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/TransportStopDataFrameTransformAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformTask; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformTask; import java.util.List; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java index fe00e4b5ec519..2293d2b6319ab 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManager.java @@ -33,7 +33,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; import java.io.IOException; import java.io.InputStream; @@ -142,7 +142,7 @@ private void parseTransformLenientlyFromSource(BytesReference source, String tra try (InputStream stream = source.streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream)) { - transformListener.onResponse(DataFrameTransformConfig.PARSER.parse(parser, transformId)); + transformListener.onResponse(DataFrameTransformConfig.fromXContent(parser, transformId, true)); } catch (Exception e) { logger.error(DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_PARSE_TRANSFORM_CONFIGURATION, transformId), e); transformListener.onFailure(e); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java index 54e8d3854ebd6..758027694f458 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; import java.io.IOException; import java.util.Map; @@ -35,7 +35,7 @@ private DataframeIndex() { public static void createDestinationIndex(Client client, DataFrameTransformConfig transformConfig, Map mappings, final ActionListener listener) { - CreateIndexRequest request = new CreateIndexRequest(transformConfig.getDestinationIndex()); + CreateIndexRequest request = new CreateIndexRequest(transformConfig.getDestination()); // TODO: revisit number of shards, number of replicas request.settings(Settings.builder() // <1> @@ -47,7 +47,7 @@ public static void createDestinationIndex(Client client, DataFrameTransformConfi listener.onResponse(true); }, e -> { String message = DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_CREATE_DESTINATION_INDEX, - transformConfig.getDestinationIndex(), transformConfig.getId()); + transformConfig.getDestination(), transformConfig.getId()); logger.error(message); listener.onFailure(new RuntimeException(message, e)); })); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfig.java deleted file mode 100644 index 6ac620e051480..0000000000000 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfig.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.dataframe.transform; - -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteable; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.dataframe.DataFrameField; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * This class holds the configuration details of a data frame transform - */ -public class DataFrameTransformConfig implements NamedWriteable, ToXContentObject { - - private static final String NAME = "xpack/data_frame/transformconfig"; - private static final ParseField INDEX_PATTERN = new ParseField("index_pattern"); - private static final ParseField DESTINATION_INDEX = new ParseField("destination_index"); - private static final ParseField SOURCES = new ParseField("sources"); - private static final ParseField AGGREGATIONS = new ParseField("aggregations"); - - private final String id; - private final String indexPattern; - private final String destinationIndex; - private final SourceConfig sourceConfig; - private final AggregationConfig aggregationConfig; - - public static final ConstructingObjectParser PARSER = createParser(false); - public static final ConstructingObjectParser LENIENT_PARSER = createParser(true); - - private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, - (args, optionalId) -> { - String id = args[0] != null ? (String) args[0] : optionalId; - String indexPattern = (String) args[1]; - String destinationIndex = (String) args[2]; - SourceConfig sourceConfig = (SourceConfig) args[3]; - AggregationConfig aggregationConfig = (AggregationConfig) args[4]; - return new DataFrameTransformConfig(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); - }); - - parser.declareString(optionalConstructorArg(), DataFrameField.ID); - parser.declareString(constructorArg(), INDEX_PATTERN); - parser.declareString(constructorArg(), DESTINATION_INDEX); - parser.declareObject(optionalConstructorArg(), (p, c) -> SourceConfig.fromXContent(p), SOURCES); - parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); - - return parser; - } - - public static String documentId(String transformId) { - return "dataframe-" + transformId; - } - - public DataFrameTransformConfig(final String id, - final String indexPattern, - final String destinationIndex, - final SourceConfig sourceConfig, - final AggregationConfig aggregationConfig) { - this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); - this.indexPattern = ExceptionsHelper.requireNonNull(indexPattern, INDEX_PATTERN.getPreferredName()); - this.destinationIndex = ExceptionsHelper.requireNonNull(destinationIndex, DESTINATION_INDEX.getPreferredName()); - - // TODO: check for null? - this.sourceConfig = sourceConfig; - this.aggregationConfig = aggregationConfig; - } - - public DataFrameTransformConfig(final StreamInput in) throws IOException { - id = in.readString(); - indexPattern = in.readString(); - destinationIndex = in.readString(); - sourceConfig = in.readOptionalWriteable(SourceConfig::new); - aggregationConfig = in.readOptionalWriteable(AggregationConfig::new); - } - - public String getId() { - return id; - } - - public String getCron() { - return "*"; - } - - public String getIndexPattern() { - return indexPattern; - } - - public String getDestinationIndex() { - return destinationIndex; - } - - public SourceConfig getSourceConfig() { - return sourceConfig; - } - - public AggregationConfig getAggregationConfig() { - return aggregationConfig; - } - - @Override - public void writeTo(final StreamOutput out) throws IOException { - out.writeString(id); - out.writeString(indexPattern); - out.writeString(destinationIndex); - out.writeOptionalWriteable(sourceConfig); - out.writeOptionalWriteable(aggregationConfig); - } - - @Override - public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { - builder.startObject(); - builder.field(DataFrameField.ID.getPreferredName(), id); - builder.field(INDEX_PATTERN.getPreferredName(), indexPattern); - builder.field(DESTINATION_INDEX.getPreferredName(), destinationIndex); - if (sourceConfig != null) { - builder.field(SOURCES.getPreferredName(), sourceConfig); - } - if (aggregationConfig!=null) { - builder.field(AGGREGATIONS.getPreferredName(), aggregationConfig); - } - builder.endObject(); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - final DataFrameTransformConfig that = (DataFrameTransformConfig) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.indexPattern, that.indexPattern) - && Objects.equals(this.destinationIndex, that.destinationIndex) - && Objects.equals(this.sourceConfig, that.sourceConfig) - && Objects.equals(this.aggregationConfig, that.aggregationConfig); - } - - @Override - public int hashCode() { - return Objects.hash(id, indexPattern, destinationIndex, sourceConfig, aggregationConfig); - } - - @Override - public String toString() { - return Strings.toString(this, true, true); - } - - public static DataFrameTransformConfig fromXContent(final XContentParser parser, @Nullable final String optionalTransformId) - throws IOException { - return PARSER.parse(parser, optionalTransformId); - } -} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/SourceConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/SourceConfig.java deleted file mode 100644 index d420579c1b237..0000000000000 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/SourceConfig.java +++ /dev/null @@ -1,112 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.dataframe.transform; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceParserHelper; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; - -/* - * Wrapper for the Source config part of a composite aggregation. - * - * For now just wraps sources from composite aggs. - */ -public class SourceConfig implements Writeable, ToXContentObject { - - private static final String NAME = "data_frame_source"; - - private final List> sources; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, false, (args) -> { - @SuppressWarnings("unchecked") - List> sources = (List>) args[0]; - return new SourceConfig(sources); - }); - - static { - PARSER.declareFieldArray(constructorArg(), (parser, builder) -> CompositeValuesSourceParserHelper.fromXContent(parser), - CompositeAggregationBuilder.SOURCES_FIELD_NAME, ObjectParser.ValueType.OBJECT_ARRAY); - } - - SourceConfig(final StreamInput in) throws IOException { - int num = in.readVInt(); - List> sources = new ArrayList<>(num); - for (int i = 0; i < num; i++) { - CompositeValuesSourceBuilder builder = CompositeValuesSourceParserHelper.readFrom(in); - sources.add(builder); - } - this.sources = Collections.unmodifiableList(sources); - } - - public SourceConfig(List> sources) { - this.sources = Collections.unmodifiableList(sources); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.startArray(CompositeAggregationBuilder.SOURCES_FIELD_NAME.getPreferredName()); - for (CompositeValuesSourceBuilder source : getSources()) { - CompositeValuesSourceParserHelper.toXContent(source, builder, params); - } - builder.endArray(); - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(getSources().size()); - for (CompositeValuesSourceBuilder builder : getSources()) { - CompositeValuesSourceParserHelper.writeTo(builder, out); - } - } - - @Override - public int hashCode() { - return Objects.hash(getSources()); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - final SourceConfig that = (SourceConfig) other; - - return Objects.equals(this.getSources(), that.getSources()); - } - - public static SourceConfig fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public List> getSources() { - return sources; - } -} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java similarity index 63% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameIndexer.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java index 1e426753ccd5b..60b206a8de1b5 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameIndexer.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -14,20 +14,15 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; import org.elasticsearch.xpack.core.indexing.AsyncTwoPhaseIndexer; import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.indexing.IterationResult; +import org.elasticsearch.xpack.dataframe.transforms.pivot.Pivot; import java.io.IOException; import java.io.UncheckedIOException; -import java.util.Collection; -import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; @@ -41,6 +36,8 @@ public abstract class DataFrameIndexer extends AsyncTwoPhaseIndexer initialState, Map initialPosition) { super(executor, initialState, initialPosition, new DataFrameIndexerTransformStats()); } @@ -49,6 +46,9 @@ public DataFrameIndexer(Executor executor, AtomicReference initial @Override protected void onStartJob(long now) { + // for now a match all, to be replaced + QueryBuilder queryBuilder = new MatchAllQueryBuilder(); + pivot = new Pivot(getConfig().getSource(), queryBuilder, getConfig().getPivotConfig()); } @Override @@ -68,11 +68,9 @@ protected IterationResult> doProcess(SearchResponse searchRe */ private Stream processBucketsToIndexRequests(CompositeAggregation agg) { final DataFrameTransformConfig transformConfig = getConfig(); - String indexName = transformConfig.getDestinationIndex(); - List> sources = transformConfig.getSourceConfig().getSources(); - Collection aggregationBuilders = transformConfig.getAggregationConfig().getAggregatorFactories(); + String indexName = transformConfig.getDestination(); - return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders, getStats()).map(document -> { + return pivot.extractResults(agg, getStats()).map(document -> { XContentBuilder builder; try { builder = jsonBuilder(); @@ -88,31 +86,6 @@ private Stream processBucketsToIndexRequests(CompositeAggregation @Override protected SearchRequest buildSearchRequest() { - final Map position = getPosition(); - final DataFrameTransformConfig transformConfig = getConfig(); - - QueryBuilder queryBuilder = new MatchAllQueryBuilder(); - SearchRequest searchRequest = new SearchRequest(transformConfig.getIndexPattern()); - - List> sources = transformConfig.getSourceConfig().getSources(); - - CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder(COMPOSITE_AGGREGATION_NAME, sources); - compositeAggregation.size(1000); - - if (position != null) { - compositeAggregation.aggregateAfter(position); - } - - for (AggregationBuilder agg : transformConfig.getAggregationConfig().getAggregatorFactories()) { - compositeAggregation.subAggregation(agg); - } - - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - sourceBuilder.aggregation(compositeAggregation); - sourceBuilder.size(0); - sourceBuilder.query(queryBuilder); - searchRequest.source(sourceBuilder); - - return searchRequest; + return pivot.buildSearchRequest(getPosition()); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransform.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransform.java similarity index 98% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransform.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransform.java index 2b26acabf4774..507a4173b2788 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransform.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransform.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms; import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java new file mode 100644 index 0000000000000..b6aa2cc35d918 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java @@ -0,0 +1,169 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.dataframe.transforms.pivot.PivotConfig; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * This class holds the configuration details of a data frame transform + */ +public class DataFrameTransformConfig implements Writeable, ToXContentObject { + + private static final String NAME = "data_frame_transforms"; + private static final ParseField SOURCE = new ParseField("source"); + private static final ParseField DESTINATION = new ParseField("dest"); + private static final ParseField QUERY = new ParseField("query"); + + // types of transforms + private static final ParseField PIVOT_TRANSFORM = new ParseField("pivot"); + + private static final ConstructingObjectParser PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + private final String id; + private final String source; + private final String dest; + private final PivotConfig pivotConfig; + + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, + (args, optionalId) -> { + String id = args[0] != null ? (String) args[0] : optionalId; + String source = (String) args[1]; + String dest = (String) args[2]; + PivotConfig pivotConfig = (PivotConfig) args[3]; + return new DataFrameTransformConfig(id, source, dest, pivotConfig); + }); + + parser.declareString(optionalConstructorArg(), DataFrameField.ID); + parser.declareString(constructorArg(), SOURCE); + parser.declareString(constructorArg(), DESTINATION); + parser.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p, ignoreUnknownFields), PIVOT_TRANSFORM); + + return parser; + } + + public static String documentId(String transformId) { + return "data_frame-" + transformId; + } + + public DataFrameTransformConfig(final String id, + final String source, + final String dest, + final PivotConfig pivotConfig) { + this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); + this.source = ExceptionsHelper.requireNonNull(source, SOURCE.getPreferredName()); + this.dest = ExceptionsHelper.requireNonNull(dest, DESTINATION.getPreferredName()); + this.pivotConfig = pivotConfig; + + // at least one transform must be defined + if (this.pivotConfig == null) { + throw new IllegalArgumentException(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_NO_TRANSFORM); + } + } + + public DataFrameTransformConfig(final StreamInput in) throws IOException { + id = in.readString(); + source = in.readString(); + dest = in.readString(); + pivotConfig = in.readOptionalWriteable(PivotConfig::new); + } + + public String getId() { + return id; + } + + public String getCron() { + return "*"; + } + + public String getSource() { + return source; + } + + public String getDestination() { + return dest; + } + + public PivotConfig getPivotConfig() { + return pivotConfig; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeString(id); + out.writeString(source); + out.writeString(dest); + out.writeOptionalWriteable(pivotConfig); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(DataFrameField.ID.getPreferredName(), id); + builder.field(SOURCE.getPreferredName(), source); + builder.field(DESTINATION.getPreferredName(), dest); + if (pivotConfig != null) { + builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final DataFrameTransformConfig that = (DataFrameTransformConfig) other; + + return Objects.equals(this.id, that.id) + && Objects.equals(this.source, that.source) + && Objects.equals(this.dest, that.dest) + && Objects.equals(this.pivotConfig, that.pivotConfig); + } + + @Override + public int hashCode() { + return Objects.hash(id, source, dest, pivotConfig); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + public static DataFrameTransformConfig fromXContent(final XContentParser parser, @Nullable final String optionalTransformId, + boolean ignoreUnknownFields) throws IOException { + + return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, optionalTransformId) : PARSER.apply(parser, optionalTransformId); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformPersistentTasksExecutor.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java similarity index 98% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformPersistentTasksExecutor.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java index f9d9ec282fffd..8b82f2684924d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformPersistentTasksExecutor.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformPersistentTasksExecutor.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java similarity index 99% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformTask.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index 7826d08a06366..cbc5824521fa1 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java similarity index 94% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfig.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java index 400bfc4e23a6d..02b999abd703f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; + import java.io.IOException; import java.util.Collection; import java.util.Objects; @@ -32,7 +33,7 @@ public AggregationConfig(AggregatorFactories.Builder aggregatorFactoryBuilder) { this.aggregatorFactoryBuilder = aggregatorFactoryBuilder; } - AggregationConfig(final StreamInput in) throws IOException { + public AggregationConfig(final StreamInput in) throws IOException { aggregatorFactoryBuilder = new AggregatorFactories.Builder(in); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java similarity index 87% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtils.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java index 8e1080d44b9e0..234bbb8626c1a 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java @@ -4,21 +4,19 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation.SingleValue; import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; import java.util.Collection; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.stream.Stream; @@ -35,14 +33,14 @@ final class AggregationResultUtils { * @return a map containing the results of the aggregation in a consumable way */ public static Stream> extractCompositeAggregationResults(CompositeAggregation agg, - List> sources, Collection aggregationBuilders, + Iterable sources, Collection aggregationBuilders, DataFrameIndexerTransformStats dataFrameIndexerTransformStats) { return agg.getBuckets().stream().map(bucket -> { dataFrameIndexerTransformStats.incrementNumDocuments(bucket.getDocCount()); Map document = new HashMap<>(); - for (CompositeValuesSourceBuilder source : sources) { - String destinationFieldName = source.name(); + for (GroupConfig source : sources) { + String destinationFieldName = source.getDestinationFieldName(); document.put(destinationFieldName, bucket.getKey().get(destinationFieldName)); } for (AggregationBuilder aggregationBuilder : aggregationBuilders) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/Aggregations.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java similarity index 96% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/Aggregations.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java index e625f4ac9edea..b59955cd84ba4 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/Aggregations.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.support; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import java.util.Locale; import java.util.Set; diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java new file mode 100644 index 0000000000000..c8f0fa6b601ab --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +/* + * Wraps a single group for groupby + */ +public class GroupConfig implements Writeable, ToXContentObject { + + private final String destinationFieldName; + private final SingleGroupSource.Type groupType; + private final SingleGroupSource groupSource; + + public GroupConfig(final String destinationFieldName, final SingleGroupSource.Type groupType, final SingleGroupSource groupSource) { + this.destinationFieldName = Objects.requireNonNull(destinationFieldName); + this.groupType = Objects.requireNonNull(groupType); + this.groupSource = Objects.requireNonNull(groupSource); + } + + public GroupConfig(StreamInput in) throws IOException { + destinationFieldName = in.readString(); + groupType = Type.fromId(in.readByte()); + switch (groupType) { + case TERMS: + groupSource = in.readOptionalWriteable(TermsGroupSource::new); + break; + default: + throw new IOException("Unknown group type"); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(destinationFieldName); + out.writeByte(groupType.getId()); + out.writeOptionalWriteable(groupSource); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startObject(destinationFieldName); + + builder.field(groupType.value(), groupSource); + builder.endObject(); + builder.endObject(); + return builder; + } + + public String getDestinationFieldName() { + return destinationFieldName; + } + + public SingleGroupSource getGroupSource() { + return groupSource; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final GroupConfig that = (GroupConfig) other; + + return Objects.equals(this.destinationFieldName, that.destinationFieldName) && Objects.equals(this.groupType, that.groupType) + && Objects.equals(this.groupSource, that.groupSource); + } + + @Override + public int hashCode() { + return Objects.hash(destinationFieldName, groupType, groupSource); + } + + public static GroupConfig fromXContent(final XContentParser parser, boolean ignoreUnknownFields) throws IOException { + String destinationFieldName; + Type groupType; + SingleGroupSource groupSource; + + // be parsing friendly, whether the token needs to be advanced or not (similar to what ObjectParser does) + XContentParser.Token token; + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + token = parser.currentToken(); + } else { + token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: Expected START_OBJECT but was: " + token); + } + } + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + destinationFieldName = parser.currentName(); + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + groupType = SingleGroupSource.Type.valueOf(parser.currentName().toUpperCase(Locale.ROOT)); + + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + + switch (groupType) { + case TERMS: + groupSource = TermsGroupSource.fromXContent(parser, ignoreUnknownFields); + break; + default: + throw new ParsingException(parser.getTokenLocation(), "invalid grouping type: " + groupType); + } + + parser.nextToken(); + parser.nextToken(); + + return new GroupConfig(destinationFieldName, groupType, groupSource); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java new file mode 100644 index 0000000000000..26760d6f167cf --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; + +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import java.util.stream.Stream; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +public class Pivot { + private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; + + private final PivotConfig config; + private final String source; + + // objects for re-using + private final CompositeAggregationBuilder cachedCompositeAggregation; + private final SearchRequest cachedSearchRequest; + + public Pivot(String source, QueryBuilder query, PivotConfig config) { + this.source = source; + this.config = config; + this.cachedCompositeAggregation = createCompositeAggregation(config); + this.cachedSearchRequest = createSearchRequest(source, query, cachedCompositeAggregation); + } + + public void validate(Client client, final ActionListener listener) { + // step 1: check if used aggregations are supported + for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { + if (Aggregations.isSupportedByDataframe(agg.getType()) == false) { + listener.onFailure(new RuntimeException("Unsupported aggregation type [" + agg.getType() + "]")); + return; + } + } + + // step 2: run a query to validate that config is valid + runTestQuery(client, listener); + } + + public void deduceMappings(Client client, final ActionListener> listener) { + SchemaUtil.deduceMappings(client, config, source, listener); + } + + public SearchRequest buildSearchRequest(Map position) { + if (position != null) { + cachedCompositeAggregation.aggregateAfter(position); + } + + return cachedSearchRequest; + } + + public Stream> extractResults(CompositeAggregation agg, + DataFrameIndexerTransformStats dataFrameIndexerTransformStats) { + Iterable sources = config.getGroups(); + Collection aggregationBuilders = config.getAggregationConfig().getAggregatorFactories(); + + return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders, dataFrameIndexerTransformStats); + } + + private void runTestQuery(Client client, final ActionListener listener) { + // no after key + cachedCompositeAggregation.aggregateAfter(null); + client.execute(SearchAction.INSTANCE, cachedSearchRequest, ActionListener.wrap(response -> { + if (response == null) { + listener.onFailure(new RuntimeException("Unexpected null response from test query")); + return; + } + if (response.status() != RestStatus.OK) { + listener.onFailure(new RuntimeException("Unexpected status from response of test query: " + response.status())); + return; + } + listener.onResponse(true); + }, e->{ + listener.onFailure(new RuntimeException("Failed to test query",e)); + })); + } + + private static SearchRequest createSearchRequest(String index, QueryBuilder query, CompositeAggregationBuilder compositeAggregation) { + SearchRequest searchRequest = new SearchRequest(index); + SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); + sourceBuilder.aggregation(compositeAggregation); + sourceBuilder.size(0); + sourceBuilder.query(query); + searchRequest.source(sourceBuilder); + return searchRequest; + } + + private static CompositeAggregationBuilder createCompositeAggregation(PivotConfig config) { + CompositeAggregationBuilder compositeAggregation; + + try (XContentBuilder builder = jsonBuilder()) { + // write configuration for composite aggs into builder + config.toCompositeAggXContent(builder, ToXContentObject.EMPTY_PARAMS); + XContentParser parser = builder.generator().contentType().xContent().createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()); + compositeAggregation = CompositeAggregationBuilder.parse(COMPOSITE_AGGREGATION_NAME, parser); + compositeAggregation.size(1000); + config.getAggregationConfig().getAggregatorFactories().forEach(agg -> compositeAggregation.subAggregation(agg)); + } catch (IOException e) { + throw new RuntimeException(DataFrameMessages.DATA_FRAME_TRANSFORM_PIVOT_FAILED_TO_CREATE_COMPOSITE_AGGREGATION, e); + } + return compositeAggregation; + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java new file mode 100644 index 0000000000000..a869a28fa775c --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class PivotConfig implements Writeable, ToXContentObject { + + private static final String NAME = "data_frame_transform_pivot"; + private static final ParseField GROUP_BY = new ParseField("group_by"); + private static final ParseField AGGREGATIONS = new ParseField("aggregations"); + + private final List groups; + private final AggregationConfig aggregationConfig; + + private static final ConstructingObjectParser PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, + args -> { + @SuppressWarnings("unchecked") + List groups = (List) args[0]; + AggregationConfig aggregationConfig = (AggregationConfig) args[1]; + return new PivotConfig(groups, aggregationConfig); + }); + + parser.declareObjectArray(constructorArg(), + (p, c) -> (GroupConfig.fromXContent(p, ignoreUnknownFields)), GROUP_BY); + + parser.declareObject(constructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); + + return parser; + } + + public PivotConfig(final List groups, final AggregationConfig aggregationConfig) { + this.groups = ExceptionsHelper.requireNonNull(groups, GROUP_BY.getPreferredName()); + this.aggregationConfig = ExceptionsHelper.requireNonNull(aggregationConfig, AGGREGATIONS.getPreferredName()); + } + + public PivotConfig(StreamInput in) throws IOException { + this.groups = in.readList(GroupConfig::new); + this.aggregationConfig = new AggregationConfig(in); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(GROUP_BY.getPreferredName(), groups); + builder.field(AGGREGATIONS.getPreferredName(), aggregationConfig); + builder.endObject(); + return builder; + } + + public void toCompositeAggXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CompositeAggregationBuilder.SOURCES_FIELD_NAME.getPreferredName()); + builder.startArray(); + for (GroupConfig group : groups) { + group.toXContent(builder, params); + } + builder.endArray(); + builder.endObject(); // sources + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeList(groups); + aggregationConfig.writeTo(out); + } + + public AggregationConfig getAggregationConfig() { + return aggregationConfig; + } + + public Iterable getGroups() { + return groups; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final PivotConfig that = (PivotConfig) other; + + return Objects.equals(this.groups, that.groups) && Objects.equals(this.aggregationConfig, that.aggregationConfig); + } + + @Override + public int hashCode() { + return Objects.hash(groups, aggregationConfig); + } + + public static PivotConfig fromXContent(final XContentParser parser, boolean ignoreUnknownFields) throws IOException { + return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, null) : PARSER.apply(parser, null); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/TransformValidator.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java similarity index 53% rename from x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/TransformValidator.java rename to x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java index 36c9c40de8551..55df001d6cc63 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/support/TransformValidator.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.support; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -12,59 +12,30 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetaData; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; import java.util.HashMap; -import java.util.List; import java.util.Map; -import java.util.Objects; -public class TransformValidator { - private static final String COMPOSITE_AGGREGATION_NAME = "_data_frame"; +public class SchemaUtil { + private static final Logger logger = LogManager.getLogger(SchemaUtil.class); - private static final Logger logger = LogManager.getLogger(TransformValidator.class); - - private final Client client; - private final DataFrameTransformConfig config; - - public TransformValidator(DataFrameTransformConfig config, Client client) { - this.client = Objects.requireNonNull(client); - this.config = Objects.requireNonNull(config); + private SchemaUtil() { } - public void validate(final ActionListener listener) { - // step 1: check if used aggregations are supported - for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { - if (Aggregations.isSupportedByDataframe(agg.getType()) == false) { - listener.onFailure(new RuntimeException("Unsupported aggregation type [" + agg.getType() + "]")); - return; - } - } - - // step 2: run a query to validate that config is valid - runTestQuery(listener); - } - - public void deduceMappings(final ActionListener> listener) { + public static void deduceMappings(final Client client, final PivotConfig config, final String source, + final ActionListener> listener) { // collects the fieldnames used as source for aggregations Map aggregationSourceFieldNames = new HashMap<>(); // collects the aggregation types by source name Map aggregationTypes = new HashMap<>(); // collects the fieldnames and target fieldnames used for grouping Map fieldNamesForGrouping = new HashMap<>(); - config.getSourceConfig().getSources().forEach(source -> { - fieldNamesForGrouping.put(source.name(), source.field()); + + config.getGroups().forEach(group -> { + fieldNamesForGrouping.put(group.getDestinationFieldName(), group.getGroupSource().getField()); }); for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { @@ -83,7 +54,7 @@ public void deduceMappings(final ActionListener> listener) { allFieldNames.putAll(aggregationSourceFieldNames); allFieldNames.putAll(fieldNamesForGrouping); - getSourceFieldMappings(config.getIndexPattern(), allFieldNames.values().toArray(new String[0]), + getSourceFieldMappings(client, source, allFieldNames.values().toArray(new String[0]), ActionListener.wrap(sourceMappings -> { Map targetMapping = resolveMappings(aggregationSourceFieldNames, aggregationTypes, fieldNamesForGrouping, sourceMappings); @@ -94,21 +65,20 @@ public void deduceMappings(final ActionListener> listener) { })); } - Map resolveMappings(Map aggregationSourceFieldNames, Map aggregationTypes, - Map fieldNamesForGrouping, Map sourceMappings) { + private static Map resolveMappings(Map aggregationSourceFieldNames, + Map aggregationTypes, Map fieldNamesForGrouping, Map sourceMappings) { Map targetMapping = new HashMap<>(); aggregationTypes.forEach((targetFieldName, aggregationName) -> { String sourceFieldName = aggregationSourceFieldNames.get(targetFieldName); String destinationMapping = Aggregations.resolveTargetMapping(aggregationName, sourceMappings.get(sourceFieldName)); - logger.debug("[" + config.getId() + "] Deduced mapping for: [" + targetFieldName + "], agg type [" + aggregationName - + "] to [" + destinationMapping + "]"); + logger.debug( + "Deduced mapping for: [" + targetFieldName + "], agg type [" + aggregationName + "] to [" + destinationMapping + "]"); if (destinationMapping != null) { targetMapping.put(targetFieldName, destinationMapping); } else { - logger.warn("[" + config.getId() + "] Failed to deduce mapping for [" + targetFieldName - + "], fall back to double."); + logger.warn("Failed to deduce mapping for [" + targetFieldName + "], fall back to double."); targetMapping.put(targetFieldName, "double"); } }); @@ -116,56 +86,21 @@ Map resolveMappings(Map aggregationSourceFieldNa fieldNamesForGrouping.forEach((targetFieldName, sourceFieldName) -> { String destinationMapping = sourceMappings.get(sourceFieldName); logger.debug( - "[" + config.getId() + "] Deduced mapping for: [" + targetFieldName + "] to [" + destinationMapping + "]"); + "Deduced mapping for: [" + targetFieldName + "] to [" + destinationMapping + "]"); if (destinationMapping != null) { targetMapping.put(targetFieldName, destinationMapping); } else { - logger.warn("[" + config.getId() + "] Failed to deduce mapping for [" + targetFieldName - + "], fall back to keyword."); + logger.warn("Failed to deduce mapping for [" + targetFieldName + "], fall back to keyword."); targetMapping.put(targetFieldName, "keyword"); } }); return targetMapping; } - private void runTestQuery(final ActionListener listener) { - QueryBuilder queryBuilder = new MatchAllQueryBuilder(); - SearchRequest searchRequest = new SearchRequest(config.getIndexPattern()); - - List> sources = config.getSourceConfig().getSources(); - - CompositeAggregationBuilder compositeAggregation = new CompositeAggregationBuilder(COMPOSITE_AGGREGATION_NAME, sources); - compositeAggregation.size(1); - - for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { - compositeAggregation.subAggregation(agg); - } - - SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); - sourceBuilder.aggregation(compositeAggregation); - sourceBuilder.size(0); - sourceBuilder.query(queryBuilder); - searchRequest.source(sourceBuilder); - - client.execute(SearchAction.INSTANCE, searchRequest, ActionListener.wrap(response -> { - if (response == null) { - listener.onFailure(new RuntimeException("Unexpected null response from test query")); - return; - } - if (response.status() != RestStatus.OK) { - listener.onFailure(new RuntimeException("Unexpected status from response of test query: " + response.status())); - return; - } - listener.onResponse(true); - }, e->{ - listener.onFailure(new RuntimeException("Failed to test query",e)); - })); - } - /* * Very "magic" helper method to extract the source mappings */ - private void getSourceFieldMappings(String index, String[] fields, + private static void getSourceFieldMappings(Client client, String index, String[] fields, ActionListener> listener) { GetFieldMappingsRequest fieldMappingRequest = new GetFieldMappingsRequest(); fieldMappingRequest.indices(index); @@ -178,7 +113,7 @@ private void getSourceFieldMappings(String index, String[] fields, })); } - Map extractSourceFieldMappings(Map>> mappings) { + private static Map extractSourceFieldMappings(Map>> mappings) { Map extractedTypes = new HashMap<>(); mappings.forEach((indexName, docTypeToMapping) -> { @@ -193,7 +128,7 @@ Map extractSourceFieldMappings(Map map = (Map) typeMap; if (map.containsKey("type")) { String type = map.get("type").toString(); - logger.debug("[" + config.getId() + "] Extracted type for [" + fieldName + "] : [" + type + "]"); + logger.debug("Extracted type for [" + fieldName + "] : [" + type + "]"); // TODO: overwrites types, requires resolve if // types are mixed extractedTypes.put(fieldName, type); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java new file mode 100644 index 0000000000000..b049666ea8db8 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.AbstractObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.support.ValueType; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/* + * Base class for a single source for group_by + */ +public abstract class SingleGroupSource> implements Writeable, ToXContentObject { + + public enum Type { + TERMS(0); + + private final byte id; + + Type(int id) { + this.id = (byte) id; + } + + public byte getId() { + return id; + } + + public static Type fromId(byte id) { + switch (id) { + case 0: + return TERMS; + default: + throw new IllegalArgumentException("unknown type"); + } + } + + public String value() { + return name().toLowerCase(Locale.ROOT); + } + } + + private static final ParseField FIELD = new ParseField("field"); + + // TODO: add script + private final String field; + + static , T> void declareValuesSourceFields(AbstractObjectParser parser, + ValueType targetValueType) { + // either script or field + parser.declareString(optionalConstructorArg(), FIELD); + } + + public SingleGroupSource(final String field) { + this.field = field; + } + + public SingleGroupSource(StreamInput in) throws IOException { + field = in.readOptionalString(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (field != null) { + builder.field(FIELD.getPreferredName(), field); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(field); + } + + public String getField() { + return field; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final SingleGroupSource that = (SingleGroupSource) other; + + return Objects.equals(this.field, that.field); + } + + @Override + public int hashCode() { + return Objects.hash(field); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java new file mode 100644 index 0000000000000..c4de273403af8 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +/* + * A terms aggregation source for group_by + */ +public class TermsGroupSource extends SingleGroupSource { + private static final String NAME = "data_frame_terms_group"; + + private static final ConstructingObjectParser PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + + private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, (args) -> { + String field = (String) args[0]; + return new TermsGroupSource(field); + }); + + SingleGroupSource.declareValuesSourceFields(parser, null); + return parser; + } + + public TermsGroupSource(final String field) { + super(field); + } + + public TermsGroupSource(StreamInput in) throws IOException { + super(in); + } + + public static TermsGroupSource fromXContent(final XContentParser parser, boolean ignoreUnknownFields) throws IOException { + return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, null) : PARSER.apply(parser, null); + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java index 12beb8b86c2b8..edc46897f38c0 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/DataFrameTransformStateAndStatsTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStatsTests; import org.elasticsearch.xpack.core.dataframe.transform.DataFrameTransformStateTests; -import org.elasticsearch.xpack.dataframe.transform.AbstractSerializingDataFrameTestCase; +import org.elasticsearch.xpack.dataframe.transforms.AbstractSerializingDataFrameTestCase; import java.io.IOException; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformActionRequestTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformActionRequestTests.java index e4313fc1cf7e7..e2dc9edfe54db 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformActionRequestTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/PutDataFrameTransformActionRequestTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.dataframe.action.PutDataFrameTransformAction.Request; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfigTests; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfigTests; import org.junit.Before; import java.io.IOException; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java index e7892d0d8ee14..2efee3faa2d6f 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/persistence/DataFrameTransformsConfigManagerTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfigTests; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfigTests; import org.junit.Before; public class DataFrameTransformsConfigManagerTests extends DataFrameSingleNodeTestCase { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/SourceConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/SourceConfigTests.java deleted file mode 100644 index c2a5a270b78f5..0000000000000 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/SourceConfigTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.dataframe.transform; - -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.xpack.dataframe.transform.SourceConfig; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -public class SourceConfigTests extends AbstractSerializingDataFrameTestCase { - - public static SourceConfig randomSourceConfig() { - int numSources = randomIntBetween(1, 10); - List> sources = new ArrayList<>(); - for (int i = 0; i < numSources; i++) { - sources.add(randomTermsSourceBuilder()); - } - return new SourceConfig(sources); - } - - @Override - protected SourceConfig doParseInstance(XContentParser parser) throws IOException { - return SourceConfig.fromXContent(parser); - } - - @Override - protected SourceConfig createTestInstance() { - return randomSourceConfig(); - } - - @Override - protected Reader instanceReader() { - return SourceConfig::new; - } - - private static TermsValuesSourceBuilder randomTermsSourceBuilder() { - TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder(randomAlphaOfLengthBetween(5, 10)); - if (randomBoolean()) { - terms.field(randomAlphaOfLengthBetween(1, 20)); - } else { - terms.script(new Script(randomAlphaOfLengthBetween(10, 20))); - } - terms.order(randomFrom(SortOrder.values())); - if (randomBoolean()) { - terms.missingBucket(true); - } - return terms; - } -} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AbstractSerializingDataFrameTestCase.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java similarity index 96% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AbstractSerializingDataFrameTestCase.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java index 5aafc631cf529..3d4addc2ca290 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AbstractSerializingDataFrameTestCase.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java similarity index 82% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfigTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java index 7b2621064ff6b..cdad3c810444d 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/DataFrameTransformConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java @@ -4,11 +4,11 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.pivot.PivotConfigTests; import org.junit.Before; import java.io.IOException; @@ -19,8 +19,7 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT public static DataFrameTransformConfig randomDataFrameTransformConfig() { return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), - randomAlphaOfLengthBetween(1, 10), SourceConfigTests.randomSourceConfig(), - AggregationConfigTests.randomAggregationConfig()); + randomAlphaOfLengthBetween(1, 10), PivotConfigTests.randomPivotConfig()); } @Before @@ -31,9 +30,9 @@ public void setUpOptionalId() { @Override protected DataFrameTransformConfig doParseInstance(XContentParser parser) throws IOException { if (randomBoolean()) { - return DataFrameTransformConfig.fromXContent(parser, transformId); + return DataFrameTransformConfig.fromXContent(parser, transformId, false); } else { - return DataFrameTransformConfig.fromXContent(parser, null); + return DataFrameTransformConfig.fromXContent(parser, null, false); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java similarity index 94% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfigTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java index b1fe1f9abdfbd..c6cb6fbd07035 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java @@ -4,14 +4,14 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.xpack.dataframe.transform.AggregationConfig; +import org.elasticsearch.xpack.dataframe.transforms.AbstractSerializingDataFrameTestCase; import java.io.IOException; import java.util.HashSet; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtilsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java similarity index 92% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtilsTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java index 8038bb719f203..a0d5c4851212e 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transform/AggregationResultUtilsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.transform; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ContextParser; @@ -17,9 +17,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; -import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.composite.ParsedComposite; -import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms; import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms; @@ -46,7 +44,6 @@ import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; -import org.elasticsearch.xpack.dataframe.transform.AggregationResultUtils; import java.io.IOException; import java.util.Collection; @@ -57,6 +54,7 @@ import java.util.stream.Collectors; import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type.TERMS; public class AggregationResultUtilsTests extends ESTestCase { @@ -95,8 +93,8 @@ protected NamedXContentRegistry xContentRegistry() { public void testExtractCompositeAggregationResults() throws IOException { String targetField = randomAlphaOfLengthBetween(5, 10); - List> sources = Collections.singletonList( - new TermsValuesSourceBuilder(targetField).field("doesn't_matter_for_this_test") + List sources = Collections.singletonList( + new GroupConfig(targetField, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")) ); String aggName = randomAlphaOfLengthBetween(5, 10); @@ -148,9 +146,9 @@ public void testExtractCompositeAggregationResultsMultiSources() throws IOExcept String targetField = randomAlphaOfLengthBetween(5, 10); String targetField2 = randomAlphaOfLengthBetween(5, 10) + "_2"; - List> sources = asList( - new TermsValuesSourceBuilder(targetField).field("doesn't_matter_for_this_test"), - new TermsValuesSourceBuilder(targetField2).field("doesn't_matter_for_this_test_too") + List sources = asList( + new GroupConfig(targetField, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")), + new GroupConfig(targetField2, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")) ); String aggName = randomAlphaOfLengthBetween(5, 10); @@ -221,8 +219,9 @@ aggTypedName, asMap( public void testExtractCompositeAggregationResultsMultiAggregations() throws IOException { String targetField = randomAlphaOfLengthBetween(5, 10); - List> sources = Collections.singletonList( - new TermsValuesSourceBuilder(targetField).field("doesn't_matter_for_this_test") + + List sources = Collections.singletonList( + new GroupConfig(targetField, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")) ); String aggName = randomAlphaOfLengthBetween(5, 10); @@ -282,8 +281,8 @@ aggTypedName2, asMap( executeTest(sources, aggregationBuilders, input, expected, 200); } - private void executeTest(List> sources, Collection aggregationBuilders, - Map input, List> expected, long expectedDocCounts) throws IOException { + private void executeTest(Iterable sources, Collection aggregationBuilders, Map input, + List> expected, long expectedDocCounts) throws IOException { DataFrameIndexerTransformStats stats = new DataFrameIndexerTransformStats(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); builder.map(input); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/AggregationsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java similarity index 93% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/AggregationsTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java index 644174b8c8009..23720ab6af3b8 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/AggregationsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationsTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.support; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java new file mode 100644 index 0000000000000..e3d1ed3901558 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +import static org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type.TERMS; + +public class GroupConfigTests extends AbstractSerializingTestCase { + + public static GroupConfig randomGroupConfig() { + String targetFieldName = randomAlphaOfLengthBetween(1, 20); + return new GroupConfig(targetFieldName, TERMS, TermsGroupSourceTests.randomTermsGroupSource()); + } + + @Override + protected GroupConfig doParseInstance(XContentParser parser) throws IOException { + return GroupConfig.fromXContent(parser, false); + } + + @Override + protected GroupConfig createTestInstance() { + return randomGroupConfig(); + } + + @Override + protected Reader instanceReader() { + return GroupConfig::new; + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java new file mode 100644 index 0000000000000..c6d835c420d7b --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.dataframe.transforms.AbstractSerializingDataFrameTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class PivotConfigTests extends AbstractSerializingDataFrameTestCase { + + public static PivotConfig randomPivotConfig() { + List groups = new ArrayList<>(); + + for (int i = 0; i < randomIntBetween(1, 10); ++i) { + groups.add(GroupConfigTests.randomGroupConfig()); + } + + return new PivotConfig(groups, AggregationConfigTests.randomAggregationConfig()); + } + + @Override + protected PivotConfig doParseInstance(XContentParser parser) throws IOException { + return PivotConfig.fromXContent(parser, false); + } + + @Override + protected PivotConfig createTestInstance() { + return randomPivotConfig(); + } + + @Override + protected Reader instanceReader() { + return PivotConfig::new; + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/TransformValidatorTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java similarity index 67% rename from x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/TransformValidatorTests.java rename to x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java index 02171bbf387b3..74bbe6f5acb73 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/support/TransformValidatorTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.dataframe.support; +package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.Action; @@ -22,14 +22,12 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; -import org.elasticsearch.xpack.dataframe.transform.AggregationConfig; -import org.elasticsearch.xpack.dataframe.transform.DataFrameTransformConfig; -import org.elasticsearch.xpack.dataframe.transform.SourceConfig; import org.junit.After; import org.junit.Before; @@ -40,10 +38,12 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import static java.util.Arrays.asList; import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type.TERMS; import static org.hamcrest.Matchers.equalTo; -public class TransformValidatorTests extends ESTestCase { +public class PivotTests extends ESTestCase { private NamedXContentRegistry namedXContentRegistry; private Client client; @@ -76,60 +76,42 @@ protected NamedXContentRegistry xContentRegistry() { } public void testValidateExistingIndex() throws Exception { - SourceConfig sourceConfig = getValidSourceConfig(); - AggregationConfig aggregationConfig = getValidAggregationConfig(); + Pivot pivot = new Pivot("existing_source_index", new MatchAllQueryBuilder(), getValidPivotConfig()); - DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source_index", "non_existing_dest", - sourceConfig, aggregationConfig); - - assertValidTransform(client, config); + assertValidTransform(client, pivot); } public void testValidateNonExistingIndex() throws Exception { - SourceConfig sourceConfig = getValidSourceConfig(); - AggregationConfig aggregationConfig = getValidAggregationConfig(); - - DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "non_existing_source_index", - "non_existing_dest", sourceConfig, aggregationConfig); + Pivot pivot = new Pivot("non_existing_source_index", new MatchAllQueryBuilder(), getValidPivotConfig()); - assertInvalidTransform(client, config); + assertInvalidTransform(client, pivot); } public void testSearchFailure() throws Exception { - SourceConfig sourceConfig = getValidSourceConfig(); - AggregationConfig aggregationConfig = getValidAggregationConfig(); - // test a failure during the search operation, transform creation fails if // search has failures although they might just be temporary - DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source_index_with_failing_shards", - "non_existing_dest", sourceConfig, aggregationConfig); + Pivot pivot = new Pivot("existing_source_index_with_failing_shards", new MatchAllQueryBuilder(), getValidPivotConfig()); - assertInvalidTransform(client, config); + assertInvalidTransform(client, pivot); } public void testValidateAllSupportedAggregations() throws Exception { - SourceConfig sourceConfig = getValidSourceConfig(); - for (String agg : supportedAggregations) { AggregationConfig aggregationConfig = getAggregationConfig(agg); - DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source", "non_existing_dest", - sourceConfig, aggregationConfig); + Pivot pivot = new Pivot("existing_source", new MatchAllQueryBuilder(), getValidPivotConfig(aggregationConfig)); - assertValidTransform(client, config); + assertValidTransform(client, pivot); } } public void testValidateAllUnsupportedAggregations() throws Exception { - SourceConfig sourceConfig = getValidSourceConfig(); - for (String agg : unsupportedAggregations) { AggregationConfig aggregationConfig = getAggregationConfig(agg); - DataFrameTransformConfig config = new DataFrameTransformConfig(getTestName(), "existing_source", "non_existing_dest", - sourceConfig, aggregationConfig); + Pivot pivot = new Pivot("existing_source", new MatchAllQueryBuilder(), getValidPivotConfig(aggregationConfig)); - assertInvalidTransform(client, config); + assertInvalidTransform(client, pivot); } } @@ -172,16 +154,22 @@ protected void } } - private SourceConfig getValidSourceConfig() throws IOException { - return parseSource("{\"sources\": [\n" + " {\n" + " \"pivot\": {\n" + " \"terms\": {\n" + " \"field\": \"terms\"\n" - + " }\n" + " }\n" + " }\n" + "]}"); + private PivotConfig getValidPivotConfig() throws IOException { + List sources = asList( + new GroupConfig("terms", TERMS, new TermsGroupSource("terms")), + new GroupConfig("terms", TERMS, new TermsGroupSource("terms")) + ); + + return new PivotConfig(sources, getValidAggregationConfig()); } - private SourceConfig parseSource(String json) throws IOException { - final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); + private PivotConfig getValidPivotConfig(AggregationConfig aggregationConfig) throws IOException { + List sources = asList( + new GroupConfig("terms", TERMS, new TermsGroupSource("terms")), + new GroupConfig("terms", TERMS, new TermsGroupSource("terms")) + ); - return SourceConfig.fromXContent(parser); + return new PivotConfig(sources, aggregationConfig); } private AggregationConfig getValidAggregationConfig() throws IOException { @@ -201,20 +189,18 @@ private AggregationConfig parseAggregations(String json) throws IOException { return AggregationConfig.fromXContent(parser); } - private static void assertValidTransform(Client client, DataFrameTransformConfig config) throws Exception { - validate(client, config, true); + private static void assertValidTransform(Client client, Pivot pivot) throws Exception { + validate(client, pivot, true); } - private static void assertInvalidTransform(Client client, DataFrameTransformConfig config) throws Exception { - validate(client, config, false); + private static void assertInvalidTransform(Client client, Pivot pivot) throws Exception { + validate(client, pivot, false); } - private static void validate(Client client, DataFrameTransformConfig config, boolean expectValid) throws Exception { - TransformValidator validator = new TransformValidator(config, client); - + private static void validate(Client client, Pivot pivot, boolean expectValid) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference exceptionHolder = new AtomicReference<>(); - validator.validate(ActionListener.wrap(validity -> { + pivot.validate(client, ActionListener.wrap(validity -> { assertEquals(expectValid, validity); latch.countDown(); }, e -> { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSourceTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSourceTests.java new file mode 100644 index 0000000000000..984cd40bd9640 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSourceTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class TermsGroupSourceTests extends AbstractSerializingTestCase { + + public static TermsGroupSource randomTermsGroupSource() { + String field = randomAlphaOfLengthBetween(1, 20); + + return new TermsGroupSource(field); + } + + @Override + protected TermsGroupSource doParseInstance(XContentParser parser) throws IOException { + return TermsGroupSource.fromXContent(parser, false); + } + + @Override + protected TermsGroupSource createTestInstance() { + return randomTermsGroupSource(); + } + + @Override + protected Reader instanceReader() { + return TermsGroupSource::new; + } + +} From 84c1ba2fa2186e4aa1bffb104488952025757d5f Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Mon, 28 Jan 2019 07:42:40 +0100 Subject: [PATCH 41/49] [ML-DataFrame] add query support (#37827) adds query support to data frame transforms --- .../core/dataframe/DataFrameMessages.java | 5 + .../integration/DataFramePivotRestIT.java | 41 +++++-- .../integration/DataFrameRestTestCase.java | 20 +++- .../integration/DataFrameUsageIT.java | 2 +- .../action/GetDataFrameTransformsAction.java | 17 +++ .../transforms/DataFrameIndexer.java | 5 +- .../transforms/DataFrameTransformConfig.java | 45 +++++-- .../transforms/DataFrameTransformTask.java | 7 ++ .../dataframe/transforms/QueryConfig.java | 113 ++++++++++++++++++ .../AbstractSerializingDataFrameTestCase.java | 20 +++- .../DataFrameTransformConfigTests.java | 2 +- .../MockDeprecatedQueryBuilder.java | 88 ++++++++++++++ .../transforms/QueryConfigTests.java | 112 +++++++++++++++++ 13 files changed, 444 insertions(+), 33 deletions(-) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfig.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedQueryBuilder.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java index e539143449110..22c927e6196d5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -35,6 +35,11 @@ public class DataFrameMessages { "Data frame transform configuration must specify exactly 1 function"; public static final String DATA_FRAME_TRANSFORM_PIVOT_FAILED_TO_CREATE_COMPOSITE_AGGREGATION = "Failed to create composite aggregation from pivot function"; + public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_INVALID = + "Data frame transform configuration [{0}] has invalid elements"; + + public static final String LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_QUERY = + "Failed to parse query for data frame transform"; private DataFrameMessages() { } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index adf8e728c99ec..71e5349bd4461 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -20,7 +20,7 @@ public class DataFramePivotRestIT extends DataFrameRestTestCase { - private boolean indicesCreated = false; + private static boolean indicesCreated = false; // preserve indices in order to reuse source indices in several test cases @Override @@ -44,16 +44,9 @@ public void testSimplePivot() throws Exception { String transformId = "simplePivot"; String dataFrameIndex = "pivot_reviews"; - createPivotReviewsTransform(transformId, dataFrameIndex); + createPivotReviewsTransform(transformId, dataFrameIndex, null); - // start the transform - final Request startTransformRequest = new Request("POST", DATAFRAME_ENDPOINT + transformId + "/_start"); - Map startTransformResponse = entityAsMap(client().performRequest(startTransformRequest)); - assertThat(startTransformResponse.get("started"), equalTo(Boolean.TRUE)); - - // wait until the dataframe has been created and all data is available - waitForDataFrameGeneration(transformId); - refreshIndex(dataFrameIndex); + startAndWaitForTransform(transformId, dataFrameIndex); // we expect 27 documents as there shall be 27 user_id's Map indexStats = getAsMap(dataFrameIndex + "/_stats"); @@ -67,6 +60,34 @@ public void testSimplePivot() throws Exception { assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); } + public void testSimplePivotWithQuery() throws Exception { + String transformId = "simplePivotWithQuery"; + String dataFrameIndex = "pivot_reviews_user_id_above_20"; + String query = "\"match\": {\"user_id\": \"user_26\"}"; + + createPivotReviewsTransform(transformId, dataFrameIndex, query); + + startAndWaitForTransform(transformId, dataFrameIndex); + + // we expect only 1 document due to the query + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(1, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); + } + + private void startAndWaitForTransform(String transformId, String dataFrameIndex) throws IOException, Exception { + // start the transform + final Request startTransformRequest = new Request("POST", DATAFRAME_ENDPOINT + transformId + "/_start"); + Map startTransformResponse = entityAsMap(client().performRequest(startTransformRequest)); + assertThat(startTransformResponse.get("started"), equalTo(Boolean.TRUE)); + + // wait until the dataframe has been created and all data is available + waitForDataFrameGeneration(transformId); + refreshIndex(dataFrameIndex); + } + + + private void waitForDataFrameGeneration(String transformId) throws Exception { assertBusy(() -> { long generation = getDataFrameGeneration(transformId); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 51bead14b5bfd..1845d6c0f8a27 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -101,12 +101,20 @@ protected void createReviewsIndex() throws IOException { client().performRequest(bulkRequest); } - protected void createPivotReviewsTransform(String transformId, String dataFrameIndex) throws IOException { + protected void createPivotReviewsTransform(String transformId, String dataFrameIndex, String query) throws IOException { final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); - createDataframeTransformRequest.setJsonEntity("{" + + String config = "{" + " \"source\": \"reviews\"," - + " \"dest\": \"" + dataFrameIndex + "\"," - + " \"pivot\": {" + + " \"dest\": \"" + dataFrameIndex + "\","; + + if (query != null) { + config += "\"query\": {" + + query + + "},"; + } + + config += " \"pivot\": {" + " \"group_by\": [ {" + " \"reviewer\": {" + " \"terms\": {" @@ -117,7 +125,9 @@ protected void createPivotReviewsTransform(String transformId, String dataFrameI + " \"avg\": {" + " \"field\": \"stars\"" + " } } } }" - + "}"); + + "}"; + + createDataframeTransformRequest.setJsonEntity(config); Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); assertTrue(indexExists(dataFrameIndex)); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java index abaa2091554a4..c5436049c1abe 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameUsageIT.java @@ -46,7 +46,7 @@ public void testUsage() throws IOException { assertEquals(null, XContentMapValues.extractValue("data_frame.stats", usageAsMap)); // create a transform - createPivotReviewsTransform("test_usage", "pivot_reviews"); + createPivotReviewsTransform("test_usage", "pivot_reviews", null); usageResponse = client().performRequest(new Request("GET", "_xpack/usage")); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java index 7ec993680084b..8f1d2ff206fc1 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.dataframe.action; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; @@ -15,10 +16,12 @@ import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -36,6 +39,10 @@ public class GetDataFrameTransformsAction extends Action initial @Override protected void onStartJob(long now) { - // for now a match all, to be replaced - QueryBuilder queryBuilder = new MatchAllQueryBuilder(); + QueryConfig queryConfig = getConfig().getQueryConfig(); + QueryBuilder queryBuilder = queryConfig != null ? queryConfig.getQuery() : new MatchAllQueryBuilder(); + pivot = new Pivot(getConfig().getSource(), queryBuilder, getConfig().getPivotConfig()); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java index b6aa2cc35d918..9c27ea51f1aeb 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.dataframe.transforms; +import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -30,7 +31,7 @@ /** * This class holds the configuration details of a data frame transform */ -public class DataFrameTransformConfig implements Writeable, ToXContentObject { +public class DataFrameTransformConfig extends AbstractDiffable implements Writeable, ToXContentObject { private static final String NAME = "data_frame_transforms"; private static final ParseField SOURCE = new ParseField("source"); @@ -40,28 +41,32 @@ public class DataFrameTransformConfig implements Writeable, ToXContentObject { // types of transforms private static final ParseField PIVOT_TRANSFORM = new ParseField("pivot"); - private static final ConstructingObjectParser PARSER = createParser(false); + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); private final String id; private final String source; private final String dest; + + private final QueryConfig queryConfig; private final PivotConfig pivotConfig; - private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, (args, optionalId) -> { String id = args[0] != null ? (String) args[0] : optionalId; String source = (String) args[1]; String dest = (String) args[2]; - PivotConfig pivotConfig = (PivotConfig) args[3]; - return new DataFrameTransformConfig(id, source, dest, pivotConfig); + QueryConfig queryConfig = (QueryConfig) args[3]; + PivotConfig pivotConfig = (PivotConfig) args[4]; + return new DataFrameTransformConfig(id, source, dest, queryConfig, pivotConfig); }); parser.declareString(optionalConstructorArg(), DataFrameField.ID); parser.declareString(constructorArg(), SOURCE); parser.declareString(constructorArg(), DESTINATION); - parser.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p, ignoreUnknownFields), PIVOT_TRANSFORM); + parser.declareObject(optionalConstructorArg(), (p, c) -> QueryConfig.fromXContent(p, lenient), QUERY); + parser.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p, lenient), PIVOT_TRANSFORM); return parser; } @@ -73,13 +78,15 @@ public static String documentId(String transformId) { public DataFrameTransformConfig(final String id, final String source, final String dest, + final QueryConfig queryConfig, final PivotConfig pivotConfig) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); this.source = ExceptionsHelper.requireNonNull(source, SOURCE.getPreferredName()); this.dest = ExceptionsHelper.requireNonNull(dest, DESTINATION.getPreferredName()); + this.queryConfig = queryConfig; this.pivotConfig = pivotConfig; - // at least one transform must be defined + // at least one function must be defined if (this.pivotConfig == null) { throw new IllegalArgumentException(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_NO_TRANSFORM); } @@ -89,6 +96,7 @@ public DataFrameTransformConfig(final StreamInput in) throws IOException { id = in.readString(); source = in.readString(); dest = in.readString(); + queryConfig = in.readOptionalWriteable(QueryConfig::new); pivotConfig = in.readOptionalWriteable(PivotConfig::new); } @@ -112,11 +120,24 @@ public PivotConfig getPivotConfig() { return pivotConfig; } + public QueryConfig getQueryConfig() { + return queryConfig; + } + + public boolean isValid() { + // collect validation results from all child objects + if (queryConfig != null && queryConfig.isValid() == false) { + return false; + } + return true; + } + @Override public void writeTo(final StreamOutput out) throws IOException { out.writeString(id); out.writeString(source); out.writeString(dest); + out.writeOptionalWriteable(queryConfig); out.writeOptionalWriteable(pivotConfig); } @@ -126,6 +147,9 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.field(DataFrameField.ID.getPreferredName(), id); builder.field(SOURCE.getPreferredName(), source); builder.field(DESTINATION.getPreferredName(), dest); + if (queryConfig != null) { + builder.field(QUERY.getPreferredName(), queryConfig); + } if (pivotConfig != null) { builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig); } @@ -148,12 +172,13 @@ public boolean equals(Object other) { return Objects.equals(this.id, that.id) && Objects.equals(this.source, that.source) && Objects.equals(this.dest, that.dest) + && Objects.equals(this.queryConfig, that.queryConfig) && Objects.equals(this.pivotConfig, that.pivotConfig); } @Override public int hashCode() { - return Objects.hash(id, source, dest, pivotConfig); + return Objects.hash(id, source, dest, queryConfig, pivotConfig); } @Override @@ -164,6 +189,6 @@ public String toString() { public static DataFrameTransformConfig fromXContent(final XContentParser parser, @Nullable final String optionalTransformId, boolean ignoreUnknownFields) throws IOException { - return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, optionalTransformId) : PARSER.apply(parser, optionalTransformId); + return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, optionalTransformId) : STRICT_PARSER.apply(parser, optionalTransformId); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java index cbc5824521fa1..71570769c052d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformTask.java @@ -265,6 +265,13 @@ public synchronized boolean maybeTriggerAsyncJob(long now) { DataFrameMessages.getMessage(DataFrameMessages.FAILED_TO_LOAD_TRANSFORM_CONFIGURATION, transformId), e); } } + + // todo: set job into failed state + if (transformConfig.isValid() == false) { + throw new RuntimeException( + DataFrameMessages.getMessage(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_INVALID, transformId)); + } + return super.maybeTriggerAsyncJob(now); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfig.java new file mode 100644 index 0000000000000..6da61e711d842 --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfig.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.AbstractDiffable; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class QueryConfig extends AbstractDiffable implements Writeable, ToXContentObject { + private static final Logger logger = LogManager.getLogger(QueryConfig.class); + + // we store the query in 2 formats: the raw format and the parsed format, because: + // - the parsed format adds defaults, which were not part of the original and looks odd on XContent retrieval + // - if parsing fails (e.g. query uses removed functionality), the source can be retrieved + private final Map source; + private final QueryBuilder query; + + public QueryConfig(final Map source, final QueryBuilder query) { + this.source = Objects.requireNonNull(source); + this.query = query; + } + + public QueryConfig(final StreamInput in) throws IOException { + this.source = in.readMap(); + this.query = in.readOptionalNamedWriteable(QueryBuilder.class); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.map(source); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(source); + out.writeOptionalNamedWriteable(query); + } + + public QueryBuilder getQuery() { + return query; + } + + public static QueryConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { + // we need 2 passes, but the parser can not be cloned, so we parse 1st into a map and then re-parse that for syntax checking + + // remember the registry, needed for the 2nd pass + NamedXContentRegistry registry = parser.getXContentRegistry(); + + Map source = parser.mapOrdered(); + QueryBuilder query = null; + + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); + XContentParser sourceParser = XContentType.JSON.xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(xContentBuilder).streamInput())) { + query = AbstractQueryBuilder.parseInnerQueryBuilder(sourceParser); + } catch (Exception e) { + if (lenient) { + logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_QUERY, e); + } else { + throw e; + } + } + + return new QueryConfig(source, query); + } + + @Override + public int hashCode() { + return Objects.hash(source, query); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final QueryConfig that = (QueryConfig) other; + + return Objects.equals(this.source, that.source) && Objects.equals(this.query, that.query); + } + + public boolean isValid() { + return this.query != null; + } +} \ No newline at end of file diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java index 3d4addc2ca290..8b3ad3d7794e6 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java @@ -6,24 +6,27 @@ package org.elasticsearch.xpack.dataframe.transforms; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractSerializingTestCase; import org.junit.Before; +import java.util.List; + import static java.util.Collections.emptyList; public abstract class AbstractSerializingDataFrameTestCase extends AbstractSerializingTestCase { /** - * Test case that ensure aggregation named objects are registered + * Test case that ensures aggregation named objects are registered */ - private NamedWriteableRegistry namedWriteableRegistry; private NamedXContentRegistry namedXContentRegistry; @@ -31,8 +34,17 @@ public abstract class AbstractSerializingDataFrameTestCase namedWriteables = searchModule.getNamedWriteables(); + namedWriteables.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, MockDeprecatedQueryBuilder.NAME, + MockDeprecatedQueryBuilder::new)); + + List namedXContents = searchModule.getNamedXContents(); + namedXContents.add(new NamedXContentRegistry.Entry(QueryBuilder.class, + new ParseField(MockDeprecatedQueryBuilder.NAME), (p, c) -> MockDeprecatedQueryBuilder.fromXContent(p))); + + namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); + namedXContentRegistry = new NamedXContentRegistry(namedXContents); } @Override diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java index cdad3c810444d..9ac86a565ba5a 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java @@ -19,7 +19,7 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT public static DataFrameTransformConfig randomDataFrameTransformConfig() { return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), - randomAlphaOfLengthBetween(1, 10), PivotConfigTests.randomPivotConfig()); + randomAlphaOfLengthBetween(1, 10), QueryConfigTests.randomQueryConfig(), PivotConfigTests.randomPivotConfig()); } @Before diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedQueryBuilder.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedQueryBuilder.java new file mode 100644 index 0000000000000..223a7100d3109 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedQueryBuilder.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms; + +import org.apache.logging.log4j.LogManager; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; + +import java.io.IOException; + +/* + * Utility test class to write a deprecation message on usage + */ +public class MockDeprecatedQueryBuilder extends AbstractQueryBuilder { + + public static final String NAME = "deprecated_match_all"; + public static final String DEPRECATION_MESSAGE = "expected deprecation message from MockDeprecatedQueryBuilder"; + + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(MockDeprecatedQueryBuilder.class)); + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, MockDeprecatedQueryBuilder::new); + + static { + declareStandardFields(PARSER); + } + + public MockDeprecatedQueryBuilder() { + } + + public MockDeprecatedQueryBuilder(StreamInput in) throws IOException { + super(in); + } + + public static MockDeprecatedQueryBuilder fromXContent(XContentParser parser) { + try { + deprecationLogger.deprecatedAndMaybeLog("deprecated_mock", DEPRECATION_MESSAGE); + + return PARSER.apply(parser, null); + } catch (IllegalArgumentException e) { + throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e); + } + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + printBoostAndQueryName(builder); + builder.endObject(); + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + return Queries.newMatchAllQuery(); + } + + @Override + protected boolean doEquals(MockDeprecatedQueryBuilder other) { + return true; + } + + @Override + protected int doHashCode() { + return 0; + } +} \ No newline at end of file diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java new file mode 100644 index 0000000000000..8da8e75e5e85d --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.junit.Before; + +import java.io.IOException; +import java.util.LinkedHashMap; + +public class QueryConfigTests extends AbstractSerializingDataFrameTestCase { + + private boolean lenient; + + public static QueryConfig randomQueryConfig() { + + QueryBuilder queryBuilder = randomBoolean() ? new MatchAllQueryBuilder() : new MatchNoneQueryBuilder(); + LinkedHashMap source = null; + + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + XContentBuilder content = queryBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + source = (LinkedHashMap) XContentHelper.convertToMap(BytesReference.bytes(content), true, XContentType.JSON) + .v2(); + } catch (IOException e) { + // should not happen + fail("failed to create random query config"); + } + + return new QueryConfig(source, queryBuilder); + } + + public static QueryConfig randomInvalidQueryConfig() { + // create something broken but with a source + LinkedHashMap source = new LinkedHashMap<>(); + for (String key : randomUnique(() -> randomAlphaOfLengthBetween(1, 20), randomIntBetween(1, 10))) { + source.put(key, randomAlphaOfLengthBetween(1, 20)); + } + + return new QueryConfig(source, null); + } + + @Before + public void setRandomFeatures() { + lenient = randomBoolean(); + } + + @Override + protected QueryConfig doParseInstance(XContentParser parser) throws IOException { + return QueryConfig.fromXContent(parser, lenient); + } + + @Override + protected QueryConfig createTestInstance() { + return lenient ? randomBoolean() ? randomQueryConfig() : randomInvalidQueryConfig() : randomQueryConfig(); + } + + @Override + protected Reader instanceReader() { + return QueryConfig::new; + } + + public void testValidQueryParsing() throws IOException { + QueryBuilder query = new MatchQueryBuilder("key", "value"); + String source = query.toString(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + QueryConfig queryConfig = QueryConfig.fromXContent(parser, true); + assertEquals(query, queryConfig.getQuery()); + assertTrue(queryConfig.isValid()); + } + } + + public void testFailOnStrictPassOnLenient() throws IOException { + String source = "{\"query_element_does_not_exist\" : {}}"; + + // lenient, passes but reports invalid + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + QueryConfig query = QueryConfig.fromXContent(parser, true); + assertFalse(query.isValid()); + } + + // strict throws + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + expectThrows(ParsingException.class, () -> QueryConfig.fromXContent(parser, false)); + } + } + + public void testDeprecation() throws IOException { + String source = "{\"" + MockDeprecatedQueryBuilder.NAME + "\" : {}}"; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + QueryConfig query = QueryConfig.fromXContent(parser, false); + assertTrue(query.isValid()); + assertWarnings(MockDeprecatedQueryBuilder.DEPRECATION_MESSAGE); + } + } +} From d23ac76d2104ebea9938bf95842a22ccaf3f5c5e Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 1 Feb 2019 17:07:26 +0100 Subject: [PATCH 42/49] [ML-DataFrame] Feature/fib lenient aggs (#38122) adds lenient parsing for pivot aggregations and allow deprecation warnings to be forwarded --- .../core/dataframe/DataFrameMessages.java | 2 + .../action/GetDataFrameTransformsAction.java | 19 ++-- .../transforms/DataFrameTransformConfig.java | 9 +- .../transforms/pivot/AggregationConfig.java | 57 +++++++++--- .../transforms/pivot/GroupConfig.java | 4 +- .../transforms/pivot/PivotConfig.java | 18 ++-- .../transforms/pivot/TermsGroupSource.java | 10 +- ...ataFrameTransformsActionResponseTests.java | 46 ++++++++++ .../AbstractSerializingDataFrameTestCase.java | 6 ++ .../DataFrameTransformConfigTests.java | 9 ++ .../MockDeprecatedAggregationBuilder.java | 92 +++++++++++++++++++ .../pivot/AggregationConfigTests.java | 86 ++++++++++++++--- .../transforms/pivot/PivotConfigTests.java | 10 ++ .../transforms/pivot/PivotTests.java | 2 +- 14 files changed, 324 insertions(+), 46 deletions(-) create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsActionResponseTests.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedAggregationBuilder.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java index 22c927e6196d5..e1b94425c3b06 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -40,6 +40,8 @@ public class DataFrameMessages { public static final String LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_QUERY = "Failed to parse query for data frame transform"; + public static final String LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_AGGREGATION = + "Failed to parse aggregation for data frame pivot transform"; private DataFrameMessages() { } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java index 8f1d2ff206fc1..5e10454207c21 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -39,7 +40,6 @@ public class GetDataFrameTransformsAction extends Action transformConfigurations; public Response(List transformConfigs) { @@ -168,8 +171,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - int invalidTransforms = 0; - + List invalidTransforms = new ArrayList<>(); builder.startObject(); builder.field(DataFrameField.COUNT.getPreferredName(), transformConfigurations.size()); // XContentBuilder does not support passing the params object for Iterables @@ -178,13 +180,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (DataFrameTransformConfig configResponse : transformConfigurations) { configResponse.toXContent(builder, params); if (configResponse.isValid() == false) { - ++invalidTransforms; + invalidTransforms.add(configResponse.getId()); } } builder.endArray(); - if (invalidTransforms != 0) { - builder.field(INVALID_TRANSFORMS.getPreferredName(), invalidTransforms); - deprecationLogger.deprecated("Found [{}] invalid transforms", invalidTransforms); + if (invalidTransforms.isEmpty() == false) { + builder.startObject(INVALID_TRANSFORMS.getPreferredName()); + builder.field(DataFrameField.COUNT.getPreferredName(), invalidTransforms.size()); + builder.field(DataFrameField.TRANSFORMS.getPreferredName(), invalidTransforms); + builder.endObject(); + deprecationLogger.deprecated(INVALID_TRANSFORMS_DEPRECATION_WARNING, invalidTransforms.size()); } builder.endObject(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java index 9c27ea51f1aeb..268b975e5e23b 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java @@ -129,6 +129,11 @@ public boolean isValid() { if (queryConfig != null && queryConfig.isValid() == false) { return false; } + + if (pivotConfig != null && pivotConfig.isValid() == false) { + return false; + } + return true; } @@ -187,8 +192,8 @@ public String toString() { } public static DataFrameTransformConfig fromXContent(final XContentParser parser, @Nullable final String optionalTransformId, - boolean ignoreUnknownFields) throws IOException { + boolean lenient) throws IOException { - return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, optionalTransformId) : STRICT_PARSER.apply(parser, optionalTransformId); + return lenient ? LENIENT_PARSER.apply(parser, optionalTransformId) : STRICT_PARSER.apply(parser, optionalTransformId); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java index 02b999abd703f..d74b0cd36ffbb 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java @@ -6,17 +6,26 @@ package org.elasticsearch.xpack.dataframe.transforms.pivot; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import java.io.IOException; import java.util.Collection; +import java.util.Map; import java.util.Objects; /* @@ -26,38 +35,60 @@ * */ public class AggregationConfig implements Writeable, ToXContentObject { + private static final Logger logger = LogManager.getLogger(AggregationConfig.class); - private final AggregatorFactories.Builder aggregatorFactoryBuilder; + // we store the query in 2 formats: the raw format and the parsed format + private final Map source; + private final AggregatorFactories.Builder aggregations; - public AggregationConfig(AggregatorFactories.Builder aggregatorFactoryBuilder) { - this.aggregatorFactoryBuilder = aggregatorFactoryBuilder; + public AggregationConfig(final Map source, AggregatorFactories.Builder aggregations) { + this.source = source; + this.aggregations = aggregations; } public AggregationConfig(final StreamInput in) throws IOException { - aggregatorFactoryBuilder = new AggregatorFactories.Builder(in); + source = in.readMap(); + aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return aggregatorFactoryBuilder.toXContent(builder, params); + return builder.map(source); } @Override public void writeTo(StreamOutput out) throws IOException { - aggregatorFactoryBuilder.writeTo(out); + out.writeMap(source); + out.writeOptionalWriteable(aggregations); } public Collection getAggregatorFactories() { - return aggregatorFactoryBuilder.getAggregatorFactories(); + return aggregations.getAggregatorFactories(); } - public static AggregationConfig fromXContent(final XContentParser parser) throws IOException { - return new AggregationConfig(AggregatorFactories.parseAggregators(parser)); + public static AggregationConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { + NamedXContentRegistry registry = parser.getXContentRegistry(); + Map source = parser.mapOrdered(); + AggregatorFactories.Builder aggregations = null; + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); + XContentParser sourceParser = XContentType.JSON.xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(xContentBuilder).streamInput())) { + sourceParser.nextToken(); + aggregations = AggregatorFactories.parseAggregators(sourceParser); + } catch (Exception e) { + if (lenient) { + logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_AGGREGATION, e); + } else { + throw e; + } + } + + return new AggregationConfig(source, aggregations); } @Override public int hashCode() { - return Objects.hash(aggregatorFactoryBuilder); + return Objects.hash(source, aggregations); } @Override @@ -72,6 +103,10 @@ public boolean equals(Object other) { final AggregationConfig that = (AggregationConfig) other; - return Objects.equals(this.aggregatorFactoryBuilder, that.aggregatorFactoryBuilder); + return Objects.equals(this.source, that.source) && Objects.equals(this.aggregations, that.aggregations); + } + + public boolean isValid() { + return this.aggregations != null; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java index c8f0fa6b601ab..e674a809a2281 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java @@ -95,7 +95,7 @@ public int hashCode() { return Objects.hash(destinationFieldName, groupType, groupSource); } - public static GroupConfig fromXContent(final XContentParser parser, boolean ignoreUnknownFields) throws IOException { + public static GroupConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { String destinationFieldName; Type groupType; SingleGroupSource groupSource; @@ -124,7 +124,7 @@ public static GroupConfig fromXContent(final XContentParser parser, boolean igno switch (groupType) { case TERMS: - groupSource = TermsGroupSource.fromXContent(parser, ignoreUnknownFields); + groupSource = TermsGroupSource.fromXContent(parser, lenient); break; default: throw new ParsingException(parser.getTokenLocation(), "invalid grouping type: " + groupType); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java index a869a28fa775c..d6f6fe2c604ce 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java @@ -32,11 +32,11 @@ public class PivotConfig implements Writeable, ToXContentObject { private final List groups; private final AggregationConfig aggregationConfig; - private static final ConstructingObjectParser PARSER = createParser(false); + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); - private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, args -> { @SuppressWarnings("unchecked") List groups = (List) args[0]; @@ -45,9 +45,9 @@ private static ConstructingObjectParser createParser(boolean }); parser.declareObjectArray(constructorArg(), - (p, c) -> (GroupConfig.fromXContent(p, ignoreUnknownFields)), GROUP_BY); + (p, c) -> (GroupConfig.fromXContent(p, lenient)), GROUP_BY); - parser.declareObject(constructorArg(), (p, c) -> AggregationConfig.fromXContent(p), AGGREGATIONS); + parser.declareObject(constructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), AGGREGATIONS); return parser; } @@ -116,7 +116,11 @@ public int hashCode() { return Objects.hash(groups, aggregationConfig); } - public static PivotConfig fromXContent(final XContentParser parser, boolean ignoreUnknownFields) throws IOException { - return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, null) : PARSER.apply(parser, null); + public boolean isValid() { + return aggregationConfig.isValid(); + } + + public static PivotConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java index c4de273403af8..5518c8eb5052f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java @@ -18,11 +18,11 @@ public class TermsGroupSource extends SingleGroupSource { private static final String NAME = "data_frame_terms_group"; - private static final ConstructingObjectParser PARSER = createParser(false); + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); - private static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { - ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, ignoreUnknownFields, (args) -> { + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> { String field = (String) args[0]; return new TermsGroupSource(field); }); @@ -39,7 +39,7 @@ public TermsGroupSource(StreamInput in) throws IOException { super(in); } - public static TermsGroupSource fromXContent(final XContentParser parser, boolean ignoreUnknownFields) throws IOException { - return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, null) : PARSER.apply(parser, null); + public static TermsGroupSource fromXContent(final XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsActionResponseTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsActionResponseTests.java new file mode 100644 index 0000000000000..0872eb3d7bd3e --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/action/GetDataFrameTransformsActionResponseTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.action; + +import org.elasticsearch.common.logging.LoggerMessageFormat; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.watcher.watch.Payload.XContent; +import org.elasticsearch.xpack.dataframe.action.GetDataFrameTransformsAction.Response; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; +import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfigTests; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class GetDataFrameTransformsActionResponseTests extends ESTestCase { + + public void testInvalidTransforms() throws IOException { + List transforms = new ArrayList<>(); + + transforms.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + transforms.add(DataFrameTransformConfigTests.randomInvalidDataFrameTransformConfig()); + transforms.add(DataFrameTransformConfigTests.randomDataFrameTransformConfig()); + transforms.add(DataFrameTransformConfigTests.randomInvalidDataFrameTransformConfig()); + + Response r = new Response(transforms); + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + r.toXContent(builder, XContent.EMPTY_PARAMS); + Map responseAsMap = createParser(builder).map(); + assertEquals(2, XContentMapValues.extractValue("invalid_transforms.count", responseAsMap)); + List expectedInvalidTransforms = new ArrayList<>(); + expectedInvalidTransforms.add(transforms.get(1).getId()); + expectedInvalidTransforms.add(transforms.get(3).getId()); + assertEquals(expectedInvalidTransforms, XContentMapValues.extractValue("invalid_transforms.transforms", responseAsMap)); + assertWarnings(LoggerMessageFormat.format(Response.INVALID_TRANSFORMS_DEPRECATION_WARNING, 2)); + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java index 8b3ad3d7794e6..0b7697c7e4cc7 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/AbstractSerializingDataFrameTestCase.java @@ -14,6 +14,8 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.BaseAggregationBuilder; import org.elasticsearch.test.AbstractSerializingTestCase; import org.junit.Before; @@ -38,10 +40,14 @@ public void registerAggregationNamedObjects() throws Exception { List namedWriteables = searchModule.getNamedWriteables(); namedWriteables.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, MockDeprecatedQueryBuilder.NAME, MockDeprecatedQueryBuilder::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(AggregationBuilder.class, MockDeprecatedAggregationBuilder.NAME, + MockDeprecatedAggregationBuilder::new)); List namedXContents = searchModule.getNamedXContents(); namedXContents.add(new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(MockDeprecatedQueryBuilder.NAME), (p, c) -> MockDeprecatedQueryBuilder.fromXContent(p))); + namedXContents.add(new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, + new ParseField(MockDeprecatedAggregationBuilder.NAME), (p, c) -> MockDeprecatedAggregationBuilder.fromXContent(p))); namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); namedXContentRegistry = new NamedXContentRegistry(namedXContents); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java index 9ac86a565ba5a..48686ead113dc 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java @@ -22,6 +22,15 @@ public static DataFrameTransformConfig randomDataFrameTransformConfig() { randomAlphaOfLengthBetween(1, 10), QueryConfigTests.randomQueryConfig(), PivotConfigTests.randomPivotConfig()); } + public static DataFrameTransformConfig randomInvalidDataFrameTransformConfig() { + if (randomBoolean()) { + return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10), QueryConfigTests.randomInvalidQueryConfig(), PivotConfigTests.randomPivotConfig()); + } // else + return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), + randomAlphaOfLengthBetween(1, 10), QueryConfigTests.randomQueryConfig(), PivotConfigTests.randomInvalidPivotConfig()); + } + @Before public void setUpOptionalId() { transformId = randomAlphaOfLengthBetween(1, 10); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedAggregationBuilder.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedAggregationBuilder.java new file mode 100644 index 0000000000000..d9d546942401b --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/MockDeprecatedAggregationBuilder.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms; + +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Map; + +public class MockDeprecatedAggregationBuilder extends ValuesSourceAggregationBuilder { + + public static final String NAME = "deprecated_agg"; + public static final String DEPRECATION_MESSAGE = "expected deprecation message from MockDeprecatedAggregationBuilder"; + + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(MockDeprecatedAggregationBuilder.class)); + + protected MockDeprecatedAggregationBuilder(MockDeprecatedAggregationBuilder clone, Builder factoriesBuilder, + Map metaData) { + super(clone, factoriesBuilder, metaData); + } + + @Override + protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { + return new MockDeprecatedAggregationBuilder(this, factoriesBuilder, metaData); + } + + public MockDeprecatedAggregationBuilder() { + super(NAME, ValuesSourceType.NUMERIC, ValueType.NUMERIC); + } + + /** + * Read from a stream. + */ + protected MockDeprecatedAggregationBuilder(StreamInput in) throws IOException { + super(in, null, null); + } + + @Override + public String getType() { + return NAME; + } + + @Override + protected void innerWriteTo(StreamOutput out) throws IOException { + } + + @Override + protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, + AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { + return null; + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + return null; + } + + @Override + protected int innerHashCode() { + return 0; + } + + @Override + protected boolean innerEquals(Object obj) { + return false; + } + + public static MockDeprecatedAggregationBuilder fromXContent(XContentParser p) { + deprecationLogger.deprecatedAndMaybeLog("deprecated_mock", DEPRECATION_MESSAGE); + return new MockDeprecatedAggregationBuilder(); + } +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java index c6cb6fbd07035..9328a11f049f6 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java @@ -6,23 +6,37 @@ package org.elasticsearch.xpack.dataframe.transforms.pivot; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.NamedObjectNotFoundException; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.xpack.dataframe.transforms.AbstractSerializingDataFrameTestCase; +import org.elasticsearch.xpack.dataframe.transforms.MockDeprecatedAggregationBuilder; +import org.junit.Before; import java.io.IOException; import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; import java.util.Set; -import static org.hamcrest.Matchers.equalTo; - public class AggregationConfigTests extends AbstractSerializingDataFrameTestCase { + private boolean lenient; + public static AggregationConfig randomAggregationConfig() { + AggregatorFactories.Builder builder = new AggregatorFactories.Builder(); + Map source = null; // ensure that the unlikely does not happen: 2 aggs share the same name Set names = new HashSet<>(); @@ -33,19 +47,40 @@ public static AggregationConfig randomAggregationConfig() { } } - return new AggregationConfig(builder); + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + + XContentBuilder content = builder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + source = XContentHelper.convertToMap(BytesReference.bytes(content), true, XContentType.JSON).v2(); + } catch (IOException e) { + fail("failed to create random aggregation config: " + e.getMessage()); + } + + return new AggregationConfig(source, builder); + } + + public static AggregationConfig randomInvalidAggregationConfig() { + // create something broken but with a source + Map source = new LinkedHashMap<>(); + for (String key : randomUnique(() -> randomAlphaOfLengthBetween(1, 20), randomIntBetween(1, 10))) { + source.put(key, randomAlphaOfLengthBetween(1, 20)); + } + + return new AggregationConfig(source, null); + } + + @Before + public void setRandomFeatures() { + lenient = randomBoolean(); } @Override protected AggregationConfig doParseInstance(XContentParser parser) throws IOException { - // parseAggregators expects to be already inside the xcontent object - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - return AggregationConfig.fromXContent(parser); + return AggregationConfig.fromXContent(parser, lenient); } @Override protected AggregationConfig createTestInstance() { - return randomAggregationConfig(); + return lenient ? randomBoolean() ? randomAggregationConfig() : randomInvalidAggregationConfig() : randomAggregationConfig(); } @Override @@ -53,17 +88,46 @@ protected Reader instanceReader() { return AggregationConfig::new; } + public void testFailOnStrictPassOnLenient() throws IOException { + String source = "{\n" + + " \"avg_rating\": { \"some_removed_agg\": { \"field\": \"rating\" } }\n" + + " },\n" + + " {\n" + + " \"max_rating\": { \"max_rating\" : { \"field\" : \"rating\" } }\n" + + " }"; + + // lenient, passes but reports invalid + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + AggregationConfig aggregationConfig = AggregationConfig.fromXContent(parser, true); + assertFalse(aggregationConfig.isValid()); + } + + // strict throws + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + expectThrows(NamedObjectNotFoundException.class, () -> AggregationConfig.fromXContent(parser, false)); + } + } + + public void testDeprecation() throws IOException { + String source = "{\"dep_agg\": {\"" + MockDeprecatedAggregationBuilder.NAME + "\" : {}}}"; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + AggregationConfig agg = AggregationConfig.fromXContent(parser, false); + assertTrue(agg.isValid()); + assertWarnings(MockDeprecatedAggregationBuilder.DEPRECATION_MESSAGE); + } + } + private static AggregationBuilder getRandomSupportedAggregation() { final int numberOfSupportedAggs = 4; switch (randomIntBetween(1, numberOfSupportedAggs)) { case 1: - return AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)); + return AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10)); case 2: - return AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)); + return AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10)); case 3: - return AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)); + return AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10)); case 4: - return AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)); + return AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10)); } return null; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java index c6d835c420d7b..7e1205aa08b9a 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java @@ -26,6 +26,16 @@ public static PivotConfig randomPivotConfig() { return new PivotConfig(groups, AggregationConfigTests.randomAggregationConfig()); } + public static PivotConfig randomInvalidPivotConfig() { + List groups = new ArrayList<>(); + + for (int i = 0; i < randomIntBetween(1, 10); ++i) { + groups.add(GroupConfigTests.randomGroupConfig()); + } + + return new PivotConfig(groups, AggregationConfigTests.randomInvalidAggregationConfig()); + } + @Override protected PivotConfig doParseInstance(XContentParser parser) throws IOException { return PivotConfig.fromXContent(parser, false); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java index 74bbe6f5acb73..a5c5fc526b350 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java @@ -186,7 +186,7 @@ private AggregationConfig parseAggregations(String json) throws IOException { DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); // parseAggregators expects to be already inside the xcontent object assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - return AggregationConfig.fromXContent(parser); + return AggregationConfig.fromXContent(parser, false); } private static void assertValidTransform(Client client, Pivot pivot) throws Exception { From cd7292c76c659af19e9de9a065f01fef2029cb83 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Sat, 9 Feb 2019 08:08:53 +0100 Subject: [PATCH 43/49] add support for max, value_count, cardinality and sum (#38569) add support for max, value_count, cardinality and sum --- .../integration/DataFramePivotRestIT.java | 74 +++++++++++++++++++ .../integration/DataFrameRestTestCase.java | 20 +++-- .../transforms/pivot/Aggregations.java | 6 +- .../transforms/pivot/PivotTests.java | 12 ++- 4 files changed, 97 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 71e5349bd4461..cf2bb34e942ec 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -75,6 +75,80 @@ public void testSimplePivotWithQuery() throws Exception { assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); } + public void testBiggerPivot() throws Exception { + String transformId = "biggerPivot"; + String dataFrameIndex = "bigger_pivot_reviews"; + + final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); + + String config = "{" + + " \"source\": \"reviews\"," + + " \"dest\": \"" + dataFrameIndex + "\","; + + + config += " \"pivot\": {" + + " \"group_by\": [ {" + + " \"reviewer\": {" + + " \"terms\": {" + + " \"field\": \"user_id\"" + + " } } } ]," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } }," + + " \"sum_rating\": {" + + " \"sum\": {" + + " \"field\": \"stars\"" + + " } }," + + " \"cardinality_business\": {" + + " \"cardinality\": {" + + " \"field\": \"business_id\"" + + " } }," + + " \"min_rating\": {" + + " \"min\": {" + + " \"field\": \"stars\"" + + " } }," + + " \"max_rating\": {" + + " \"max\": {" + + " \"field\": \"stars\"" + + " } }," + + " \"count\": {" + + " \"value_count\": {" + + " \"field\": \"business_id\"" + + " } }" + + " } }" + + "}"; + + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertTrue(indexExists(dataFrameIndex)); + + startAndWaitForTransform(transformId, dataFrameIndex); + + // we expect 27 documents as there shall be 27 user_id's + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(27, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + + // get and check some users + Map searchResult = getAsMap(dataFrameIndex + "/_search?q=reviewer:user_4"); + + assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); + Number actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.avg_rating", searchResult)).get(0); + assertEquals(3.878048780, actual.doubleValue(), 0.000001); + actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.sum_rating", searchResult)).get(0); + assertEquals(159, actual.longValue()); + actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.cardinality_business", searchResult)).get(0); + assertEquals(6, actual.longValue()); + actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.min_rating", searchResult)).get(0); + assertEquals(1, actual.longValue()); + actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.max_rating", searchResult)).get(0); + assertEquals(5, actual.longValue()); + actual = (Number) ((List) XContentMapValues.extractValue("hits.hits._source.count", searchResult)).get(0); + assertEquals(41, actual.longValue()); + } + private void startAndWaitForTransform(String transformId, String dataFrameIndex) throws IOException, Exception { // start the transform final Request startTransformRequest = new Request("POST", DATAFRAME_ENDPOINT + transformId + "/_start"); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 1845d6c0f8a27..7bb5bee90400a 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -44,17 +44,15 @@ protected void createReviewsIndex() throws IOException { builder.startObject(); { builder.startObject("mappings") - .startObject("_doc") - .startObject("properties") - .startObject("user_id") - .field("type", "keyword") - .endObject() - .startObject("business_id") - .field("type", "keyword") - .endObject() - .startObject("stars") - .field("type", "integer") - .endObject() + .startObject("properties") + .startObject("user_id") + .field("type", "keyword") + .endObject() + .startObject("business_id") + .field("type", "keyword") + .endObject() + .startObject("stars") + .field("type", "integer") .endObject() .endObject() .endObject(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java index b59955cd84ba4..555deae36745f 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Aggregations.java @@ -25,7 +25,11 @@ private Aggregations() {} */ enum AggregationType { AVG("avg", "double"), - MAX("max", null); + CARDINALITY("cardinality", "long"), + VALUE_COUNT("value_count", "long"), + MAX("max", null), + MIN("min", null), + SUM("sum", null); private final String aggregationType; private final String targetMapping; diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java index a5c5fc526b350..c25d42cf07261 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java @@ -28,15 +28,19 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.xpack.dataframe.transforms.pivot.Aggregations.AggregationType; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -47,8 +51,10 @@ public class PivotTests extends ESTestCase { private NamedXContentRegistry namedXContentRegistry; private Client client; - private final String[] supportedAggregations = { "avg", "max" }; - private final String[] unsupportedAggregations = { "min" }; + + private final Set supportedAggregations = Stream.of(AggregationType.values()).map(AggregationType::getName) + .collect(Collectors.toSet()); + private final String[] unsupportedAggregations = { "stats" }; @Before public void registerAggregationNamedObjects() throws Exception { @@ -173,7 +179,7 @@ private PivotConfig getValidPivotConfig(AggregationConfig aggregationConfig) thr } private AggregationConfig getValidAggregationConfig() throws IOException { - return getAggregationConfig(supportedAggregations[randomIntBetween(0, supportedAggregations.length - 1)]); + return getAggregationConfig(randomFrom(supportedAggregations)); } private AggregationConfig getAggregationConfig(String agg) throws IOException { From cedd78c3c94fb8f4803b1779d2b419f14e70481e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 11 Feb 2019 11:52:26 -0600 Subject: [PATCH 44/49] [ML-DataFrame] Add support for (date) histogram pivots (#38725) * [FEATURE][DATA_FRAME] Adding (date) histogram group_by support for pivot * adjusting format for merge * Update DataFramePivotRestIT.java --- .../integration/DataFramePivotRestIT.java | 77 +++++++- .../integration/DataFrameRestTestCase.java | 16 +- .../pivot/DateHistogramGroupSource.java | 177 ++++++++++++++++++ .../transforms/pivot/GroupConfig.java | 12 ++ .../pivot/HistogramGroupSource.java | 97 ++++++++++ .../transforms/pivot/SingleGroupSource.java | 8 +- .../pivot/DateHistogramGroupSourceTests.java | 51 +++++ .../pivot/HistogramGroupSourceTests.java | 38 ++++ 8 files changed, 469 insertions(+), 7 deletions(-) create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java create mode 100644 x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java create mode 100644 x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index cf2bb34e942ec..6cf07fd88e0c2 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -75,6 +75,44 @@ public void testSimplePivotWithQuery() throws Exception { assertOnePivotValue(dataFrameIndex + "/_search?q=reviewer:user_26", 3.918918918); } + public void testHistogramPivot() throws Exception { + String transformId = "simpleHistogramPivot"; + String dataFrameIndex = "pivot_reviews_via_histogram"; + + final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); + + String config = "{" + + " \"source\": \"reviews\"," + + " \"dest\": \"" + dataFrameIndex + "\","; + + + config += " \"pivot\": {" + + " \"group_by\": [ {" + + " \"every_2\": {" + + " \"histogram\": {" + + " \"interval\": 2,\"field\":\"stars\"" + + " } } } ]," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } } }" + + "}"; + + + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertTrue(indexExists(dataFrameIndex)); + + startAndWaitForTransform(transformId, dataFrameIndex); + + // we expect 3 documents as there shall be 5 unique star values and we are bucketing every 2 starting at 0 + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(3, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + assertOnePivotValue(dataFrameIndex + "/_search?q=every_2:0.0", 1.0); + } + public void testBiggerPivot() throws Exception { String transformId = "biggerPivot"; String dataFrameIndex = "bigger_pivot_reviews"; @@ -149,6 +187,43 @@ public void testBiggerPivot() throws Exception { assertEquals(41, actual.longValue()); } + public void testDateHistogramPivot() throws Exception { + String transformId = "simpleDateHistogramPivot"; + String dataFrameIndex = "pivot_reviews_via_date_histogram"; + + final Request createDataframeTransformRequest = new Request("PUT", DATAFRAME_ENDPOINT + transformId); + + String config = "{" + + " \"source\": \"reviews\"," + + " \"dest\": \"" + dataFrameIndex + "\","; + + + config += " \"pivot\": {" + + " \"group_by\": [ {" + + " \"by_day\": {" + + " \"date_histogram\": {" + + " \"interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-DD\"" + + " } } } ]," + + " \"aggregations\": {" + + " \"avg_rating\": {" + + " \"avg\": {" + + " \"field\": \"stars\"" + + " } } } }" + + "}"; + + createDataframeTransformRequest.setJsonEntity(config); + Map createDataframeTransformResponse = entityAsMap(client().performRequest(createDataframeTransformRequest)); + assertThat(createDataframeTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + assertTrue(indexExists(dataFrameIndex)); + + startAndWaitForTransform(transformId, dataFrameIndex); + + // we expect 21 documents as there shall be 21 days worth of docs + Map indexStats = getAsMap(dataFrameIndex + "/_stats"); + assertEquals(21, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); + assertOnePivotValue(dataFrameIndex + "/_search?q=by_day:2017-01-15", 3.82); + } + private void startAndWaitForTransform(String transformId, String dataFrameIndex) throws IOException, Exception { // start the transform final Request startTransformRequest = new Request("POST", DATAFRAME_ENDPOINT + transformId + "/_start"); @@ -160,8 +235,6 @@ private void startAndWaitForTransform(String transformId, String dataFrameIndex) refreshIndex(dataFrameIndex); } - - private void waitForDataFrameGeneration(String transformId) throws Exception { assertBusy(() -> { long generation = getDataFrameGeneration(transformId); diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index 7bb5bee90400a..d31c63de54279 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -45,6 +45,9 @@ protected void createReviewsIndex() throws IOException { { builder.startObject("mappings") .startObject("properties") + .startObject("timestamp") + .field("type", "date") + .endObject() .startObject("user_id") .field("type", "keyword") .endObject() @@ -66,11 +69,17 @@ protected void createReviewsIndex() throws IOException { // create index final StringBuilder bulk = new StringBuilder(); + int day = 10; for (int i = 0; i < numDocs; i++) { bulk.append("{\"index\":{\"_index\":\"reviews\"}}\n"); long user = Math.round(Math.pow(i * 31 % 1000, distributionTable[i % distributionTable.length]) % 27); int stars = distributionTable[(i * 33) % distributionTable.length]; long business = Math.round(Math.pow(user * stars, distributionTable[i % distributionTable.length]) % 13); + int hour = randomIntBetween(10, 20); + int min = randomIntBetween(30, 59); + int sec = randomIntBetween(30, 59); + + String date_string = "2017-01-" + day + "T" + hour + ":" + min + ":" + sec + "Z"; bulk.append("{\"user_id\":\"") .append("user_") .append(user) @@ -79,7 +88,9 @@ protected void createReviewsIndex() throws IOException { .append(business) .append("\",\"stars\":") .append(stars) - .append("}\n"); + .append(",\"timestamp\":\"") + .append(date_string) + .append("\"}\n"); if (i % 50 == 0) { bulk.append("\r\n"); @@ -89,6 +100,7 @@ protected void createReviewsIndex() throws IOException { client().performRequest(bulkRequest); // clear the builder bulk.setLength(0); + day += 1; } } bulk.append("\r\n"); @@ -209,4 +221,4 @@ protected static void wipeIndices() throws IOException { } } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java new file mode 100644 index 0000000000000..539b4d221304b --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; + +import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.util.Objects; + + +public class DateHistogramGroupSource extends SingleGroupSource { + + private static final String NAME = "data_frame_date_histogram_group"; + private static final ParseField TIME_ZONE = new ParseField("time_zone"); + private static final ParseField FORMAT = new ParseField("format"); + + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + private long interval = 0; + private DateHistogramInterval dateHistogramInterval; + private String format; + private ZoneId timeZone; + + public DateHistogramGroupSource(String field) { + super(field); + } + + public DateHistogramGroupSource(StreamInput in) throws IOException { + super(in); + this.interval = in.readLong(); + this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new); + this.timeZone = in.readOptionalZoneId(); + this.format = in.readOptionalString(); + } + + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> { + String field = (String) args[0]; + return new DateHistogramGroupSource(field); + }); + + SingleGroupSource.declareValuesSourceFields(parser, null); + + parser.declareField((histogram, interval) -> { + if (interval instanceof Long) { + histogram.setInterval((long) interval); + } else { + histogram.setDateHistogramInterval((DateHistogramInterval) interval); + } + }, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return p.longValue(); + } else { + return new DateHistogramInterval(p.text()); + } + }, HistogramGroupSource.INTERVAL, ObjectParser.ValueType.LONG); + + parser.declareField(DateHistogramGroupSource::setTimeZone, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ZoneId.of(p.text()); + } else { + return ZoneOffset.ofHours(p.intValue()); + } + }, TIME_ZONE, ObjectParser.ValueType.LONG); + + parser.declareString(DateHistogramGroupSource::setFormat, FORMAT); + return parser; + } + + public static DateHistogramGroupSource fromXContent(final XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + public long getInterval() { + return interval; + } + + public void setInterval(long interval) { + if (interval < 1) { + throw new IllegalArgumentException("[interval] must be greater than or equal to 1."); + } + this.interval = interval; + } + + public DateHistogramInterval getDateHistogramInterval() { + return dateHistogramInterval; + } + + public void setDateHistogramInterval(DateHistogramInterval dateHistogramInterval) { + if (dateHistogramInterval == null) { + throw new IllegalArgumentException("[dateHistogramInterval] must not be null"); + } + this.dateHistogramInterval = dateHistogramInterval; + } + + public String getFormat() { + return format; + } + + public void setFormat(String format) { + this.format = format; + } + + public ZoneId getTimeZone() { + return timeZone; + } + + public void setTimeZone(ZoneId timeZone) { + this.timeZone = timeZone; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(field); + out.writeLong(interval); + out.writeOptionalWriteable(dateHistogramInterval); + out.writeOptionalZoneId(timeZone); + out.writeOptionalString(format); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (field != null) { + builder.field(FIELD.getPreferredName(), field); + } + if (dateHistogramInterval == null) { + builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), interval); + } else { + builder.field(HistogramGroupSource.INTERVAL.getPreferredName(), dateHistogramInterval.toString()); + } + if (timeZone != null) { + builder.field(TIME_ZONE.getPreferredName(), timeZone.toString()); + } + if (format != null) { + builder.field(FORMAT.getPreferredName(), format); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final DateHistogramGroupSource that = (DateHistogramGroupSource) other; + + return Objects.equals(this.field, that.field) && + Objects.equals(interval, that.interval) && + Objects.equals(dateHistogramInterval, that.dateHistogramInterval) && + Objects.equals(timeZone, that.timeZone) && + Objects.equals(format, that.format); + } + + @Override + public int hashCode() { + return Objects.hash(field, interval, dateHistogramInterval, timeZone, format); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java index e674a809a2281..4792d59cdac59 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java @@ -43,6 +43,12 @@ public GroupConfig(StreamInput in) throws IOException { case TERMS: groupSource = in.readOptionalWriteable(TermsGroupSource::new); break; + case HISTOGRAM: + groupSource = in.readOptionalWriteable(HistogramGroupSource::new); + break; + case DATE_HISTOGRAM: + groupSource = in.readOptionalWriteable(DateHistogramGroupSource::new); + break; default: throw new IOException("Unknown group type"); } @@ -126,6 +132,12 @@ public static GroupConfig fromXContent(final XContentParser parser, boolean leni case TERMS: groupSource = TermsGroupSource.fromXContent(parser, lenient); break; + case HISTOGRAM: + groupSource = HistogramGroupSource.fromXContent(parser, lenient); + break; + case DATE_HISTOGRAM: + groupSource = DateHistogramGroupSource.fromXContent(parser, lenient); + break; default: throw new ParsingException(parser.getTokenLocation(), "invalid grouping type: " + groupType); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java new file mode 100644 index 0000000000000..2e6101368619e --- /dev/null +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class HistogramGroupSource extends SingleGroupSource { + + static final ParseField INTERVAL = new ParseField("interval"); + private static final String NAME = "data_frame_histogram_group"; + private static final ConstructingObjectParser STRICT_PARSER = createParser(false); + private static final ConstructingObjectParser LENIENT_PARSER = createParser(true); + private final double interval; + + public HistogramGroupSource(String field, double interval) { + super(field); + if (interval <= 0) { + throw new IllegalArgumentException("[interval] must be greater than 0."); + } + this.interval = interval; + } + + public HistogramGroupSource(StreamInput in) throws IOException { + super(in); + interval = in.readDouble(); + } + + private static ConstructingObjectParser createParser(boolean lenient) { + ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, (args) -> { + String field = (String) args[0]; + double interval = (double) args[1]; + return new HistogramGroupSource(field, interval); + }); + SingleGroupSource.declareValuesSourceFields(parser, null); + parser.declareDouble(optionalConstructorArg(), INTERVAL); + return parser; + } + + public static HistogramGroupSource fromXContent(final XContentParser parser, boolean lenient) throws IOException { + return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(field); + out.writeDouble(interval); + } + + public double getInterval() { + return interval; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (field != null) { + builder.field(FIELD.getPreferredName(), field); + } + builder.field(INTERVAL.getPreferredName(), interval); + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + final HistogramGroupSource that = (HistogramGroupSource) other; + + return Objects.equals(this.field, that.field) && + Objects.equals(this.interval, that.interval); + } + + @Override + public int hashCode() { + return Objects.hash(field, interval); + } +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java index b049666ea8db8..5cd65124f0650 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java @@ -27,7 +27,9 @@ public abstract class SingleGroupSource> implements Writeable, ToXContentObject { public enum Type { - TERMS(0); + TERMS(0), + HISTOGRAM(1), + DATE_HISTOGRAM(2); private final byte id; @@ -53,10 +55,10 @@ public String value() { } } - private static final ParseField FIELD = new ParseField("field"); + protected static final ParseField FIELD = new ParseField("field"); // TODO: add script - private final String field; + protected final String field; static , T> void declareValuesSourceFields(AbstractObjectParser parser, ValueType targetValueType) { diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java new file mode 100644 index 0000000000000..8e7c6028af5ba --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSourceTests.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class DateHistogramGroupSourceTests extends AbstractSerializingTestCase { + + public static DateHistogramGroupSource randomDateHistogramGroupSource() { + String field = randomAlphaOfLengthBetween(1, 20); + DateHistogramGroupSource dateHistogramGroupSource = new DateHistogramGroupSource(field); + if (randomBoolean()) { + dateHistogramGroupSource.setInterval(randomLongBetween(1, 10_000)); + } else { + dateHistogramGroupSource.setDateHistogramInterval(randomFrom(DateHistogramInterval.days(10), + DateHistogramInterval.minutes(1), DateHistogramInterval.weeks(1))); + } + if (randomBoolean()) { + dateHistogramGroupSource.setTimeZone(randomZone()); + } + if (randomBoolean()) { + dateHistogramGroupSource.setFormat(randomAlphaOfLength(10)); + } + return dateHistogramGroupSource; + } + + @Override + protected DateHistogramGroupSource doParseInstance(XContentParser parser) throws IOException { + return DateHistogramGroupSource.fromXContent(parser, false); + } + + @Override + protected DateHistogramGroupSource createTestInstance() { + return randomDateHistogramGroupSource(); + } + + @Override + protected Reader instanceReader() { + return DateHistogramGroupSource::new; + } + +} diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java new file mode 100644 index 0000000000000..3e2581fca5249 --- /dev/null +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSourceTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.transforms.pivot; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class HistogramGroupSourceTests extends AbstractSerializingTestCase { + + public static HistogramGroupSource randomHistogramGroupSource() { + String field = randomAlphaOfLengthBetween(1, 20); + double interval = randomDoubleBetween(Math.nextUp(0), Double.MAX_VALUE, false); + return new HistogramGroupSource(field, interval); + } + + @Override + protected HistogramGroupSource doParseInstance(XContentParser parser) throws IOException { + return HistogramGroupSource.fromXContent(parser, false); + } + + @Override + protected HistogramGroupSource createTestInstance() { + return randomHistogramGroupSource(); + } + + @Override + protected Reader instanceReader() { + return HistogramGroupSource::new; + } + +} From dd1943ce81ab6ac728915e26f207eca94bf37f65 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Tue, 12 Feb 2019 07:00:11 +0100 Subject: [PATCH 45/49] [ML-DataFrame] allow aggs as abbreviation for aggregations (#38706) allow aggs as abbreviation for aggregations --- .../elasticsearch/client/IndicesClientIT.java | 4 +- .../transforms/pivot/PivotConfig.java | 25 +++++++- .../transforms/pivot/PivotConfigTests.java | 61 +++++++++++++++++++ 3 files changed, 86 insertions(+), 4 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 686092f6a4712..0f152551ddc3e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -1697,7 +1697,7 @@ public void testCRUDIndexTemplateWithTypes() throws Exception { assertTrue(template2.mappings().containsKey("custom_doc_type")); List names = randomBoolean() - ? Arrays.asList("*-1", "template-2") + ? Arrays.asList("*plate-1", "template-2") : Arrays.asList("template-*"); GetIndexTemplatesRequest getBothRequest = new GetIndexTemplatesRequest(names); org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse getBoth = execute( @@ -1834,7 +1834,7 @@ public void testIndexTemplatesExist() throws Exception { { final List templateNames = randomBoolean() - ? Arrays.asList("*-1", "template-2") + ? Arrays.asList("*plate-1", "template-2") : Arrays.asList("template-*"); final IndexTemplatesExistRequest bothRequest = new IndexTemplatesExistRequest(templateNames); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java index d6f6fe2c604ce..06fca1eea2d3d 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java @@ -22,12 +22,15 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; public class PivotConfig implements Writeable, ToXContentObject { private static final String NAME = "data_frame_transform_pivot"; private static final ParseField GROUP_BY = new ParseField("group_by"); private static final ParseField AGGREGATIONS = new ParseField("aggregations"); + private static final ParseField AGGS = new ParseField("aggs"); + private final List groups; private final AggregationConfig aggregationConfig; @@ -40,14 +43,32 @@ private static ConstructingObjectParser createParser(boolean args -> { @SuppressWarnings("unchecked") List groups = (List) args[0]; - AggregationConfig aggregationConfig = (AggregationConfig) args[1]; + + // allow "aggs" and "aggregations" but require one to be specified + // if somebody specifies both: throw + AggregationConfig aggregationConfig = null; + if (args[1] != null) { + aggregationConfig = (AggregationConfig) args[1]; + } + + if (args[2] != null) { + if (aggregationConfig != null) { + throw new IllegalArgumentException("Found two aggregation definitions: [aggs] and [aggregations]"); + } + aggregationConfig = (AggregationConfig) args[2]; + } + if (aggregationConfig == null) { + throw new IllegalArgumentException("Required [aggregations]"); + } + return new PivotConfig(groups, aggregationConfig); }); parser.declareObjectArray(constructorArg(), (p, c) -> (GroupConfig.fromXContent(p, lenient)), GROUP_BY); - parser.declareObject(constructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), AGGREGATIONS); + parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), AGGREGATIONS); + parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), AGGS); return parser; } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java index 7e1205aa08b9a..0ae59315e69f6 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java @@ -7,7 +7,9 @@ package org.elasticsearch.xpack.dataframe.transforms.pivot; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.dataframe.transforms.AbstractSerializingDataFrameTestCase; import java.io.IOException; @@ -50,4 +52,63 @@ protected PivotConfig createTestInstance() { protected Reader instanceReader() { return PivotConfig::new; } + + public void testAggsAbbreviations() throws IOException { + String pivotAggs = "{" + + " \"group_by\": [ {" + + " \"id\": {" + + " \"terms\": {" + + " \"field\": \"id\"" + + "} } } ]," + + " \"aggs\": {" + + " \"avg\": {" + + " \"avg\": {" + + " \"field\": \"points\"" + + "} } } }"; + + PivotConfig p1 = createPivotConfigFromString(pivotAggs); + String pivotAggregations = pivotAggs.replace("aggs", "aggregations"); + assertNotEquals(pivotAggs, pivotAggregations); + PivotConfig p2 = createPivotConfigFromString(pivotAggregations); + assertEquals(p1,p2); + } + + public void testMissingAggs() throws IOException { + String pivot = "{" + + " \"group_by\": [ {" + + " \"id\": {" + + " \"terms\": {" + + " \"field\": \"id\"" + + "} } } ] }"; + + expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot)); + } + + public void testDoubleAggs() throws IOException { + String pivot = "{" + + " \"group_by\": [ {" + + " \"id\": {" + + " \"terms\": {" + + " \"field\": \"id\"" + + "} } } ]," + + " \"aggs\": {" + + " \"avg\": {" + + " \"avg\": {" + + " \"field\": \"points\"" + + "} } }," + + " \"aggregations\": {" + + " \"avg\": {" + + " \"avg\": {" + + " \"field\": \"points\"" + + "} } }" + + "}"; + + expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot)); + } + + private PivotConfig createPivotConfigFromString(String json) throws IOException { + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); + return PivotConfig.fromXContent(parser, false); + } } From 3122884858e15833cf3c6d5577bcd83fc0afb19e Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 14 Feb 2019 07:47:14 +0100 Subject: [PATCH 46/49] [ML-DataFrame] set meta data on data frame index (#38766) store some useful information in the `_meta` field of mappings --- .../xpack/core/dataframe/DataFrameField.java | 9 +++ .../integration/DataFrameMetaDataIT.java | 59 +++++++++++++++++++ .../dataframe/persistence/DataframeIndex.java | 22 ++++++- 3 files changed, 88 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java index 41852aa972900..d2956e6559b0e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -29,6 +29,15 @@ public final class DataFrameField { // note: this is used to match tasks public static final String PERSISTENT_TASK_DESCRIPTION_PREFIX = "data_frame_"; + // strings for meta information + public static final String META_FIELDNAME = "_data_frame"; + public static final String CREATION_DATE_MILLIS = "creation_date_in_millis"; + public static final String VERSION = "version"; + public static final String CREATED = "created"; + public static final String CREATED_BY = "created_by"; + public static final String TRANSFORM = "transform"; + public static final String DATA_FRAME_SIGNATURE = "data-frame-transform"; + private DataFrameField() { } } diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java new file mode 100644 index 0000000000000..d278c78842c39 --- /dev/null +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameMetaDataIT.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.dataframe.integration; + +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.junit.Before; + +import java.io.IOException; +import java.util.Map; + +public class DataFrameMetaDataIT extends DataFrameRestTestCase { + + private boolean indicesCreated = false; + + // preserve indices in order to reuse source indices in several test cases + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @Before + public void createIndexes() throws IOException { + + // it's not possible to run it as @BeforeClass as clients aren't initialized then, so we need this little hack + if (indicesCreated) { + return; + } + + createReviewsIndex(); + indicesCreated = true; + } + + public void testMetaData() throws IOException { + long testStarted = System.currentTimeMillis(); + createPivotReviewsTransform("test_meta", "pivot_reviews", null); + + Response mappingResponse = client().performRequest(new Request("GET", "pivot_reviews/_mapping")); + + Map mappingAsMap = entityAsMap(mappingResponse); + assertEquals(Version.CURRENT.toString(), + XContentMapValues.extractValue("pivot_reviews.mappings._meta._data_frame.version.created", mappingAsMap)); + assertTrue((Long) XContentMapValues.extractValue("pivot_reviews.mappings._meta._data_frame.creation_date_in_millis", + mappingAsMap) < System.currentTimeMillis()); + assertTrue((Long) XContentMapValues.extractValue("pivot_reviews.mappings._meta._data_frame.creation_date_in_millis", + mappingAsMap) > testStarted); + assertEquals("test_meta", + XContentMapValues.extractValue("pivot_reviews.mappings._meta._data_frame.transform", mappingAsMap)); + assertEquals("data-frame-transform", + XContentMapValues.extractValue("pivot_reviews.mappings._meta.created_by", mappingAsMap)); + } + +} diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java index 758027694f458..6605269475c79 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/persistence/DataframeIndex.java @@ -8,12 +8,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.dataframe.transforms.DataFrameTransformConfig; @@ -29,6 +31,7 @@ public final class DataframeIndex { public static final String DOC_TYPE = "_doc"; private static final String PROPERTIES = "properties"; private static final String TYPE = "type"; + private static final String META = "_meta"; private DataframeIndex() { } @@ -41,7 +44,7 @@ public static void createDestinationIndex(Client client, DataFrameTransformConfi request.settings(Settings.builder() // <1> .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)); - request.mapping(DOC_TYPE, createMappingXContent(mappings)); + request.mapping(DOC_TYPE, createMappingXContent(mappings, transformConfig.getId())); client.execute(CreateIndexAction.INSTANCE, request, ActionListener.wrap(createIndexResponse -> { listener.onResponse(true); @@ -53,10 +56,11 @@ public static void createDestinationIndex(Client client, DataFrameTransformConfi })); } - private static XContentBuilder createMappingXContent(Map mappings) { + private static XContentBuilder createMappingXContent(Map mappings, String id) { try { XContentBuilder builder = jsonBuilder().startObject(); builder.startObject(DOC_TYPE); + addMetaData(builder, id); builder.startObject(PROPERTIES); for (Entry field : mappings.entrySet()) { builder.startObject(field.getKey()).field(TYPE, field.getValue()).endObject(); @@ -68,4 +72,18 @@ private static XContentBuilder createMappingXContent(Map mapping throw new RuntimeException(e); } } + + private static XContentBuilder addMetaData(XContentBuilder builder, String id) throws IOException { + builder.startObject(META); + builder.field(DataFrameField.CREATED_BY, DataFrameField.DATA_FRAME_SIGNATURE); + builder.startObject(DataFrameField.META_FIELDNAME); + builder.field(DataFrameField.CREATION_DATE_MILLIS, System.currentTimeMillis()); + builder.startObject(DataFrameField.VERSION); + builder.field(DataFrameField.CREATED, Version.CURRENT); + builder.endObject(); + builder.field(DataFrameField.TRANSFORM, id); + builder.endObject(); // META_FIELDNAME + builder.endObject(); // META + return builder; + } } From 2dabf4a88ee7d80ac39e4f5b91f0d3bdfcb2435b Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 14 Feb 2019 07:47:49 +0100 Subject: [PATCH 47/49] default to match_all if query is not given (#38865) add a default "match_all" query to make query support more visible --- .../transforms/DataFrameIndexer.java | 4 +- .../transforms/DataFrameTransformConfig.java | 15 ++++++- .../DataFrameTransformConfigTests.java | 42 +++++++++++++++++++ .../transforms/QueryConfigTests.java | 30 +++++++++++++ 4 files changed, 86 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java index 117b830c1532b..771e513f05047 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameIndexer.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.xpack.core.dataframe.transform.DataFrameIndexerTransformStats; @@ -46,8 +45,7 @@ public DataFrameIndexer(Executor executor, AtomicReference initial @Override protected void onStartJob(long now) { - QueryConfig queryConfig = getConfig().getQueryConfig(); - QueryBuilder queryBuilder = queryConfig != null ? queryConfig.getQuery() : new MatchAllQueryBuilder(); + QueryBuilder queryBuilder = getConfig().getQueryConfig().getQuery(); pivot = new Pivot(getConfig().getSource(), queryBuilder, getConfig().getPivotConfig()); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java index 268b975e5e23b..b5bd22f3a5e62 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfig.java @@ -17,12 +17,14 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.dataframe.transforms.pivot.PivotConfig; import java.io.IOException; +import java.util.Collections; import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -57,7 +59,16 @@ private static ConstructingObjectParser create String id = args[0] != null ? (String) args[0] : optionalId; String source = (String) args[1]; String dest = (String) args[2]; - QueryConfig queryConfig = (QueryConfig) args[3]; + + // default handling: if the user does not specify a query, we default to match_all + QueryConfig queryConfig = null; + if (args[3] == null) { + queryConfig = new QueryConfig(Collections.singletonMap(MatchAllQueryBuilder.NAME, Collections.emptyMap()), + new MatchAllQueryBuilder()); + } else { + queryConfig = (QueryConfig) args[3]; + } + PivotConfig pivotConfig = (PivotConfig) args[4]; return new DataFrameTransformConfig(id, source, dest, queryConfig, pivotConfig); }); @@ -83,7 +94,7 @@ public DataFrameTransformConfig(final String id, this.id = ExceptionsHelper.requireNonNull(id, DataFrameField.ID.getPreferredName()); this.source = ExceptionsHelper.requireNonNull(source, SOURCE.getPreferredName()); this.dest = ExceptionsHelper.requireNonNull(dest, DESTINATION.getPreferredName()); - this.queryConfig = queryConfig; + this.queryConfig = ExceptionsHelper.requireNonNull(queryConfig, QUERY.getPreferredName()); this.pivotConfig = pivotConfig; // at least one function must be defined diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java index 48686ead113dc..0bac3de558c2d 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java @@ -6,13 +6,21 @@ package org.elasticsearch.xpack.dataframe.transforms; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.dataframe.transforms.pivot.PivotConfigTests; import org.junit.Before; import java.io.IOException; +import static org.elasticsearch.test.TestMatchers.matchesPattern; + public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameTestCase { private String transformId; @@ -54,4 +62,38 @@ protected DataFrameTransformConfig createTestInstance() { protected Reader instanceReader() { return DataFrameTransformConfig::new; } + + public void testDefaultMatchAll( ) throws IOException { + String pivotTransform = "{" + + " \"source\" : \"src\"," + + " \"dest\" : \"dest\"," + + " \"pivot\" : {" + + " \"group_by\": [ {" + + " \"id\": {" + + " \"terms\": {" + + " \"field\": \"id\"" + + "} } } ]," + + " \"aggs\": {" + + " \"avg\": {" + + " \"avg\": {" + + " \"field\": \"points\"" + + "} } } } }"; + + DataFrameTransformConfig dataFrameTransformConfig = createDataFrameTransformConfigFromString(pivotTransform, "test_match_all"); + assertNotNull(dataFrameTransformConfig.getQueryConfig()); + assertTrue(dataFrameTransformConfig.getQueryConfig().isValid()); + + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + XContentBuilder content = dataFrameTransformConfig.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + String pivotTransformWithIdAndDefaults = Strings.toString(content); + + assertThat(pivotTransformWithIdAndDefaults, matchesPattern(".*\"match_all\"\\s*:\\s*\\{\\}.*")); + } + } + + private DataFrameTransformConfig createDataFrameTransformConfigFromString(String json, String id) throws IOException { + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); + return DataFrameTransformConfig.fromXContent(parser, id, false); + } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java index 8da8e75e5e85d..8d64eae9b44cc 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/QueryConfigTests.java @@ -101,6 +101,36 @@ public void testFailOnStrictPassOnLenient() throws IOException { } } + public void testFailOnEmptyQuery() throws IOException { + String source = ""; + + // lenient, passes but reports invalid + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + QueryConfig query = QueryConfig.fromXContent(parser, true); + assertFalse(query.isValid()); + } + + // strict throws + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + expectThrows(IllegalArgumentException.class, () -> QueryConfig.fromXContent(parser, false)); + } + } + + public void testFailOnEmptyQueryClause() throws IOException { + String source = "{}"; + + // lenient, passes but reports invalid + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + QueryConfig query = QueryConfig.fromXContent(parser, true); + assertFalse(query.isValid()); + } + + // strict throws + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + expectThrows(IllegalArgumentException.class, () -> QueryConfig.fromXContent(parser, false)); + } + } + public void testDeprecation() throws IOException { String source = "{\"" + MockDeprecatedQueryBuilder.NAME + "\" : {}}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { From 5758a11b81ec10fe063c4a0854d2aeca86db86ae Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 15 Feb 2019 07:03:58 +0100 Subject: [PATCH 48/49] [ML-DataFrame] remove array arguments for group_by (#38895) remove array arguments for group_by --- .../xpack/core/dataframe/DataFrameField.java | 3 + .../core/dataframe/DataFrameMessages.java | 6 + .../integration/DataFramePivotRestIT.java | 12 +- .../integration/DataFrameRestTestCase.java | 4 +- .../transforms/pivot/AggregationConfig.java | 26 ++- .../pivot/AggregationResultUtils.java | 10 +- .../pivot/DateHistogramGroupSource.java | 5 + .../transforms/pivot/GroupConfig.java | 161 +++++++++++------- .../pivot/HistogramGroupSource.java | 5 + .../dataframe/transforms/pivot/Pivot.java | 5 +- .../transforms/pivot/PivotConfig.java | 50 +++--- .../transforms/pivot/SchemaUtil.java | 4 +- .../transforms/pivot/SingleGroupSource.java | 6 + .../transforms/pivot/TermsGroupSource.java | 5 + .../DataFrameTransformConfigTests.java | 4 +- .../pivot/AggregationConfigTests.java | 17 +- .../pivot/AggregationResultUtilsTests.java | 46 +++-- .../transforms/pivot/GroupConfigTests.java | 71 +++++++- .../transforms/pivot/PivotConfigTests.java | 86 +++++++--- .../transforms/pivot/PivotTests.java | 16 +- 20 files changed, 363 insertions(+), 179 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java index d2956e6559b0e..9749cd915b54e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameField.java @@ -14,9 +14,12 @@ public final class DataFrameField { // common parse fields + public static final ParseField AGGREGATIONS = new ParseField("aggregations"); + public static final ParseField AGGS = new ParseField("aggs"); public static final ParseField ID = new ParseField("id"); public static final ParseField TRANSFORMS = new ParseField("transforms"); public static final ParseField COUNT = new ParseField("count"); + public static final ParseField GROUP_BY = new ParseField("group_by"); public static final ParseField TIMEOUT = new ParseField("timeout"); public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion"); public static final ParseField STATS_FIELD = new ParseField("stats"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java index e1b94425c3b06..a395dcdb3dfd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/dataframe/DataFrameMessages.java @@ -33,6 +33,10 @@ public class DataFrameMessages { "Failed to parse transform configuration for data frame transform [{0}]"; public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_NO_TRANSFORM = "Data frame transform configuration must specify exactly 1 function"; + public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY = + "Data frame pivot transform configuration must specify at least 1 group_by"; + public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION = + "Data frame pivot transform configuration must specify at least 1 aggregation"; public static final String DATA_FRAME_TRANSFORM_PIVOT_FAILED_TO_CREATE_COMPOSITE_AGGREGATION = "Failed to create composite aggregation from pivot function"; public static final String DATA_FRAME_TRANSFORM_CONFIGURATION_INVALID = @@ -40,6 +44,8 @@ public class DataFrameMessages { public static final String LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_QUERY = "Failed to parse query for data frame transform"; + public static final String LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_GROUP_BY = + "Failed to parse group_by for data frame pivot transform"; public static final String LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_AGGREGATION = "Failed to parse aggregation for data frame pivot transform"; diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java index 6cf07fd88e0c2..eb8203e1dd2e2 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFramePivotRestIT.java @@ -87,11 +87,11 @@ public void testHistogramPivot() throws Exception { config += " \"pivot\": {" - + " \"group_by\": [ {" + + " \"group_by\": {" + " \"every_2\": {" + " \"histogram\": {" + " \"interval\": 2,\"field\":\"stars\"" - + " } } } ]," + + " } } }," + " \"aggregations\": {" + " \"avg_rating\": {" + " \"avg\": {" @@ -125,11 +125,11 @@ public void testBiggerPivot() throws Exception { config += " \"pivot\": {" - + " \"group_by\": [ {" + + " \"group_by\": {" + " \"reviewer\": {" + " \"terms\": {" + " \"field\": \"user_id\"" - + " } } } ]," + + " } } }," + " \"aggregations\": {" + " \"avg_rating\": {" + " \"avg\": {" @@ -199,11 +199,11 @@ public void testDateHistogramPivot() throws Exception { config += " \"pivot\": {" - + " \"group_by\": [ {" + + " \"group_by\": {" + " \"by_day\": {" + " \"date_histogram\": {" + " \"interval\": \"1d\",\"field\":\"timestamp\",\"format\":\"yyyy-MM-DD\"" - + " } } } ]," + + " } } }," + " \"aggregations\": {" + " \"avg_rating\": {" + " \"avg\": {" diff --git a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java index d31c63de54279..bd6812ae4896d 100644 --- a/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java +++ b/x-pack/plugin/data-frame/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/dataframe/integration/DataFrameRestTestCase.java @@ -125,11 +125,11 @@ protected void createPivotReviewsTransform(String transformId, String dataFrameI } config += " \"pivot\": {" - + " \"group_by\": [ {" + + " \"group_by\": {" + " \"reviewer\": {" + " \"terms\": {" + " \"field\": \"user_id\"" - + " } } } ]," + + " } } }," + " \"aggregations\": {" + " \"avg_rating\": {" + " \"avg\": {" diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java index d74b0cd36ffbb..54b6109520a5b 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfig.java @@ -70,19 +70,27 @@ public static AggregationConfig fromXContent(final XContentParser parser, boolea NamedXContentRegistry registry = parser.getXContentRegistry(); Map source = parser.mapOrdered(); AggregatorFactories.Builder aggregations = null; - try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); - XContentParser sourceParser = XContentType.JSON.xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, - BytesReference.bytes(xContentBuilder).streamInput())) { - sourceParser.nextToken(); - aggregations = AggregatorFactories.parseAggregators(sourceParser); - } catch (Exception e) { + + if (source.isEmpty()) { if (lenient) { - logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_AGGREGATION, e); + logger.warn(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION); } else { - throw e; + throw new IllegalArgumentException(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_AGGREGATION); + } + } else { + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); + XContentParser sourceParser = XContentType.JSON.xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(xContentBuilder).streamInput())) { + sourceParser.nextToken(); + aggregations = AggregatorFactories.parseAggregators(sourceParser); + } catch (Exception e) { + if (lenient) { + logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_AGGREGATION, e); + } else { + throw e; + } } } - return new AggregationConfig(source, aggregations); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java index 234bbb8626c1a..f301e64053664 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtils.java @@ -27,22 +27,22 @@ final class AggregationResultUtils { * Extracts aggregation results from a composite aggregation and puts it into a map. * * @param agg The aggregation result - * @param sources The original sources used for querying + * @param groups The original groupings used for querying * @param aggregationBuilders the aggregation used for querying * @param dataFrameIndexerTransformStats stats collector * @return a map containing the results of the aggregation in a consumable way */ public static Stream> extractCompositeAggregationResults(CompositeAggregation agg, - Iterable sources, Collection aggregationBuilders, + GroupConfig groups, Collection aggregationBuilders, DataFrameIndexerTransformStats dataFrameIndexerTransformStats) { return agg.getBuckets().stream().map(bucket -> { dataFrameIndexerTransformStats.incrementNumDocuments(bucket.getDocCount()); Map document = new HashMap<>(); - for (GroupConfig source : sources) { - String destinationFieldName = source.getDestinationFieldName(); + groups.getGroups().keySet().forEach(destinationFieldName -> { document.put(destinationFieldName, bucket.getKey().get(destinationFieldName)); - } + }); + for (AggregationBuilder aggregationBuilder : aggregationBuilders) { String aggName = aggregationBuilder.getName(); diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java index 539b4d221304b..59efac481d4d1 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/DateHistogramGroupSource.java @@ -83,6 +83,11 @@ public static DateHistogramGroupSource fromXContent(final XContentParser parser, return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); } + @Override + public Type getType() { + return Type.DATE_HISTOGRAM; + } + public long getInterval() { return interval; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java index 4792d59cdac59..8ace9d64d9737 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfig.java @@ -6,17 +6,29 @@ package org.elasticsearch.xpack.dataframe.transforms.pivot; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; +import org.elasticsearch.xpack.core.dataframe.DataFrameMessages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type; import java.io.IOException; +import java.util.LinkedHashMap; import java.util.Locale; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; @@ -26,58 +38,53 @@ */ public class GroupConfig implements Writeable, ToXContentObject { - private final String destinationFieldName; - private final SingleGroupSource.Type groupType; - private final SingleGroupSource groupSource; + private static final Logger logger = LogManager.getLogger(GroupConfig.class); - public GroupConfig(final String destinationFieldName, final SingleGroupSource.Type groupType, final SingleGroupSource groupSource) { - this.destinationFieldName = Objects.requireNonNull(destinationFieldName); - this.groupType = Objects.requireNonNull(groupType); - this.groupSource = Objects.requireNonNull(groupSource); + private final Map source; + private final Map> groups; + + public GroupConfig(final Map source, final Map> groups) { + this.source = ExceptionsHelper.requireNonNull(source, DataFrameField.GROUP_BY.getPreferredName()); + this.groups = groups; } public GroupConfig(StreamInput in) throws IOException { - destinationFieldName = in.readString(); - groupType = Type.fromId(in.readByte()); - switch (groupType) { - case TERMS: - groupSource = in.readOptionalWriteable(TermsGroupSource::new); - break; - case HISTOGRAM: - groupSource = in.readOptionalWriteable(HistogramGroupSource::new); - break; - case DATE_HISTOGRAM: - groupSource = in.readOptionalWriteable(DateHistogramGroupSource::new); - break; - default: - throw new IOException("Unknown group type"); - } + source = in.readMap(); + groups = in.readMap(StreamInput::readString, (stream) -> { + Type groupType = Type.fromId(stream.readByte()); + switch (groupType) { + case TERMS: + return new TermsGroupSource(stream); + case HISTOGRAM: + return new HistogramGroupSource(stream); + case DATE_HISTOGRAM: + return new DateHistogramGroupSource(stream); + default: + throw new IOException("Unknown group type"); + } + }); } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(destinationFieldName); - out.writeByte(groupType.getId()); - out.writeOptionalWriteable(groupSource); + public Map > getGroups() { + return groups; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.startObject(destinationFieldName); - - builder.field(groupType.value(), groupSource); - builder.endObject(); - builder.endObject(); - return builder; + public boolean isValid() { + return this.groups != null; } - public String getDestinationFieldName() { - return destinationFieldName; + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(source); + out.writeMap(groups, StreamOutput::writeString, (stream, value) -> { + stream.writeByte(value.getType().getId()); + value.writeTo(stream); + }); } - public SingleGroupSource getGroupSource() { - return groupSource; + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.map(source); } @Override @@ -92,19 +99,44 @@ public boolean equals(Object other) { final GroupConfig that = (GroupConfig) other; - return Objects.equals(this.destinationFieldName, that.destinationFieldName) && Objects.equals(this.groupType, that.groupType) - && Objects.equals(this.groupSource, that.groupSource); + return Objects.equals(this.source, that.source) && Objects.equals(this.groups, that.groups); } @Override public int hashCode() { - return Objects.hash(destinationFieldName, groupType, groupSource); + return Objects.hash(source, groups); } public static GroupConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { - String destinationFieldName; - Type groupType; - SingleGroupSource groupSource; + NamedXContentRegistry registry = parser.getXContentRegistry(); + Map source = parser.mapOrdered(); + Map> groups = null; + + if (source.isEmpty()) { + if (lenient) { + logger.warn(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY); + } else { + throw new IllegalArgumentException(DataFrameMessages.DATA_FRAME_TRANSFORM_CONFIGURATION_PIVOT_NO_GROUP_BY); + } + } else { + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); + XContentParser sourceParser = XContentType.JSON.xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(xContentBuilder).streamInput())) { + groups = parseGroupConfig(sourceParser, lenient); + } catch (Exception e) { + if (lenient) { + logger.warn(DataFrameMessages.LOG_DATA_FRAME_TRANSFORM_CONFIGURATION_BAD_GROUP_BY, e); + } else { + throw e; + } + } + } + return new GroupConfig(source, groups); + } + + private static Map> parseGroupConfig(final XContentParser parser, + boolean lenient) throws IOException { + LinkedHashMap> groups = new LinkedHashMap<>(); // be parsing friendly, whether the token needs to be advanced or not (similar to what ObjectParser does) XContentParser.Token token; @@ -116,19 +148,21 @@ public static GroupConfig fromXContent(final XContentParser parser, boolean leni throw new ParsingException(parser.getTokenLocation(), "Failed to parse object: Expected START_OBJECT but was: " + token); } } - token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); - destinationFieldName = parser.currentName(); - token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); - token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); - groupType = SingleGroupSource.Type.valueOf(parser.currentName().toUpperCase(Locale.ROOT)); - - token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); - - switch (groupType) { + + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String destinationFieldName = parser.currentName(); + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + Type groupType = SingleGroupSource.Type.valueOf(parser.currentName().toUpperCase(Locale.ROOT)); + + token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + SingleGroupSource groupSource; + switch (groupType) { case TERMS: groupSource = TermsGroupSource.fromXContent(parser, lenient); break; @@ -140,11 +174,12 @@ public static GroupConfig fromXContent(final XContentParser parser, boolean leni break; default: throw new ParsingException(parser.getTokenLocation(), "invalid grouping type: " + groupType); - } + } - parser.nextToken(); - parser.nextToken(); + parser.nextToken(); - return new GroupConfig(destinationFieldName, groupType, groupSource); + groups.put(destinationFieldName, groupSource); + } + return groups; } } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java index 2e6101368619e..3c75dcdedc1b2 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/HistogramGroupSource.java @@ -49,6 +49,11 @@ private static ConstructingObjectParser createParser return parser; } + @Override + public Type getType() { + return Type.HISTOGRAM; + } + public static HistogramGroupSource fromXContent(final XContentParser parser, boolean lenient) throws IOException { return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java index 26760d6f167cf..ca4a7ec8eb4fb 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/Pivot.java @@ -76,10 +76,11 @@ public SearchRequest buildSearchRequest(Map position) { public Stream> extractResults(CompositeAggregation agg, DataFrameIndexerTransformStats dataFrameIndexerTransformStats) { - Iterable sources = config.getGroups(); + + GroupConfig groups = config.getGroupConfig(); Collection aggregationBuilders = config.getAggregationConfig().getAggregatorFactories(); - return AggregationResultUtils.extractCompositeAggregationResults(agg, sources, aggregationBuilders, dataFrameIndexerTransformStats); + return AggregationResultUtils.extractCompositeAggregationResults(agg, groups, aggregationBuilders, dataFrameIndexerTransformStats); } private void runTestQuery(Client client, final ActionListener listener) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java index 06fca1eea2d3d..086268b169fbf 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfig.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.dataframe.transforms.pivot; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -15,10 +14,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.xpack.core.dataframe.DataFrameField; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; -import java.util.List; +import java.util.Map.Entry; import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -27,12 +27,7 @@ public class PivotConfig implements Writeable, ToXContentObject { private static final String NAME = "data_frame_transform_pivot"; - private static final ParseField GROUP_BY = new ParseField("group_by"); - private static final ParseField AGGREGATIONS = new ParseField("aggregations"); - private static final ParseField AGGS = new ParseField("aggs"); - - - private final List groups; + private final GroupConfig groups; private final AggregationConfig aggregationConfig; private static final ConstructingObjectParser STRICT_PARSER = createParser(false); @@ -41,8 +36,7 @@ public class PivotConfig implements Writeable, ToXContentObject { private static ConstructingObjectParser createParser(boolean lenient) { ConstructingObjectParser parser = new ConstructingObjectParser<>(NAME, lenient, args -> { - @SuppressWarnings("unchecked") - List groups = (List) args[0]; + GroupConfig groups = (GroupConfig) args[0]; // allow "aggs" and "aggregations" but require one to be specified // if somebody specifies both: throw @@ -64,30 +58,30 @@ private static ConstructingObjectParser createParser(boolean return new PivotConfig(groups, aggregationConfig); }); - parser.declareObjectArray(constructorArg(), - (p, c) -> (GroupConfig.fromXContent(p, lenient)), GROUP_BY); + parser.declareObject(constructorArg(), + (p, c) -> (GroupConfig.fromXContent(p, lenient)), DataFrameField.GROUP_BY); - parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), AGGREGATIONS); - parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), AGGS); + parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), DataFrameField.AGGREGATIONS); + parser.declareObject(optionalConstructorArg(), (p, c) -> AggregationConfig.fromXContent(p, lenient), DataFrameField.AGGS); return parser; } - public PivotConfig(final List groups, final AggregationConfig aggregationConfig) { - this.groups = ExceptionsHelper.requireNonNull(groups, GROUP_BY.getPreferredName()); - this.aggregationConfig = ExceptionsHelper.requireNonNull(aggregationConfig, AGGREGATIONS.getPreferredName()); + public PivotConfig(final GroupConfig groups, final AggregationConfig aggregationConfig) { + this.groups = ExceptionsHelper.requireNonNull(groups, DataFrameField.GROUP_BY.getPreferredName()); + this.aggregationConfig = ExceptionsHelper.requireNonNull(aggregationConfig, DataFrameField.AGGREGATIONS.getPreferredName()); } public PivotConfig(StreamInput in) throws IOException { - this.groups = in.readList(GroupConfig::new); + this.groups = new GroupConfig(in); this.aggregationConfig = new AggregationConfig(in); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(GROUP_BY.getPreferredName(), groups); - builder.field(AGGREGATIONS.getPreferredName(), aggregationConfig); + builder.field(DataFrameField.GROUP_BY.getPreferredName(), groups); + builder.field(DataFrameField.AGGREGATIONS.getPreferredName(), aggregationConfig); builder.endObject(); return builder; } @@ -96,16 +90,22 @@ public void toCompositeAggXContent(XContentBuilder builder, Params params) throw builder.startObject(); builder.field(CompositeAggregationBuilder.SOURCES_FIELD_NAME.getPreferredName()); builder.startArray(); - for (GroupConfig group : groups) { - group.toXContent(builder, params); + + for (Entry> groupBy : groups.getGroups().entrySet()) { + builder.startObject(); + builder.startObject(groupBy.getKey()); + builder.field(groupBy.getValue().getType().value(), groupBy.getValue()); + builder.endObject(); + builder.endObject(); } + builder.endArray(); builder.endObject(); // sources } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeList(groups); + groups.writeTo(out); aggregationConfig.writeTo(out); } @@ -113,7 +113,7 @@ public AggregationConfig getAggregationConfig() { return aggregationConfig; } - public Iterable getGroups() { + public GroupConfig getGroupConfig() { return groups; } @@ -138,7 +138,7 @@ public int hashCode() { } public boolean isValid() { - return aggregationConfig.isValid(); + return groups.isValid() && aggregationConfig.isValid(); } public static PivotConfig fromXContent(final XContentParser parser, boolean lenient) throws IOException { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java index 55df001d6cc63..619e4514d7674 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SchemaUtil.java @@ -34,8 +34,8 @@ public static void deduceMappings(final Client client, final PivotConfig config, // collects the fieldnames and target fieldnames used for grouping Map fieldNamesForGrouping = new HashMap<>(); - config.getGroups().forEach(group -> { - fieldNamesForGrouping.put(group.getDestinationFieldName(), group.getGroupSource().getField()); + config.getGroupConfig().getGroups().forEach((destinationFieldName, group) -> { + fieldNamesForGrouping.put(destinationFieldName, group.getField()); }); for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java index 5cd65124f0650..9b309e59af4c3 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/SingleGroupSource.java @@ -45,6 +45,10 @@ public static Type fromId(byte id) { switch (id) { case 0: return TERMS; + case 1: + return HISTOGRAM; + case 2: + return DATE_HISTOGRAM; default: throw new IllegalArgumentException("unknown type"); } @@ -89,6 +93,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(field); } + public abstract Type getType(); + public String getField() { return field; } diff --git a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java index 5518c8eb5052f..b3073f0e1de21 100644 --- a/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java +++ b/x-pack/plugin/data-frame/src/main/java/org/elasticsearch/xpack/dataframe/transforms/pivot/TermsGroupSource.java @@ -39,6 +39,11 @@ public TermsGroupSource(StreamInput in) throws IOException { super(in); } + @Override + public Type getType() { + return Type.TERMS; + } + public static TermsGroupSource fromXContent(final XContentParser parser, boolean lenient) throws IOException { return lenient ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null); } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java index 0bac3de558c2d..daabe1cccaa39 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/DataFrameTransformConfigTests.java @@ -68,11 +68,11 @@ public void testDefaultMatchAll( ) throws IOException { + " \"source\" : \"src\"," + " \"dest\" : \"dest\"," + " \"pivot\" : {" - + " \"group_by\": [ {" + + " \"group_by\": {" + " \"id\": {" + " \"terms\": {" + " \"field\": \"id\"" - + "} } } ]," + + "} } }," + " \"aggs\": {" + " \"avg\": {" + " \"avg\": {" diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java index 9328a11f049f6..ccf9090182349 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationConfigTests.java @@ -40,7 +40,7 @@ public static AggregationConfig randomAggregationConfig() { // ensure that the unlikely does not happen: 2 aggs share the same name Set names = new HashSet<>(); - for (int i = 1; i < randomIntBetween(1, 20); ++i) { + for (int i = 0; i < randomIntBetween(1, 20); ++i) { AggregationBuilder aggBuilder = getRandomSupportedAggregation(); if (names.add(aggBuilder.getName())) { builder.addAggregator(aggBuilder); @@ -88,6 +88,21 @@ protected Reader instanceReader() { return AggregationConfig::new; } + public void testEmptyAggregation() throws IOException { + String source = "{}"; + + // lenient, passes but reports invalid + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + AggregationConfig aggregationConfig = AggregationConfig.fromXContent(parser, true); + assertFalse(aggregationConfig.isValid()); + } + + // strict throws + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + expectThrows(IllegalArgumentException.class, () -> AggregationConfig.fromXContent(parser, false)); + } + } + public void testFailOnStrictPassOnLenient() throws IOException { String source = "{\n" + " \"avg_rating\": { \"some_removed_agg\": { \"field\": \"rating\" } }\n" + diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java index a0d5c4851212e..49829750e954a 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/AggregationResultUtilsTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ContextParser; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -54,7 +55,6 @@ import java.util.stream.Collectors; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type.TERMS; public class AggregationResultUtilsTests extends ESTestCase { @@ -93,9 +93,10 @@ protected NamedXContentRegistry xContentRegistry() { public void testExtractCompositeAggregationResults() throws IOException { String targetField = randomAlphaOfLengthBetween(5, 10); - List sources = Collections.singletonList( - new GroupConfig(targetField, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")) - ); + GroupConfig groupBy = parseGroupConfig("{ \"" + targetField + "\" : {" + + "\"terms\" : {" + + " \"field\" : \"doesn't_matter_for_this_test\"" + + "} } }"); String aggName = randomAlphaOfLengthBetween(5, 10); String aggTypedName = "avg#" + aggName; @@ -139,17 +140,23 @@ aggTypedName, asMap( ) ); - executeTest(sources, aggregationBuilders, input, expected, 20); + executeTest(groupBy, aggregationBuilders, input, expected, 20); } public void testExtractCompositeAggregationResultsMultiSources() throws IOException { String targetField = randomAlphaOfLengthBetween(5, 10); String targetField2 = randomAlphaOfLengthBetween(5, 10) + "_2"; - List sources = asList( - new GroupConfig(targetField, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")), - new GroupConfig(targetField2, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")) - ); + GroupConfig groupBy = parseGroupConfig("{" + + "\"" + targetField + "\" : {" + + " \"terms\" : {" + + " \"field\" : \"doesn't_matter_for_this_test\"" + + " } }," + + "\"" + targetField2 + "\" : {" + + " \"terms\" : {" + + " \"field\" : \"doesn't_matter_for_this_test\"" + + " } }" + + "}"); String aggName = randomAlphaOfLengthBetween(5, 10); String aggTypedName = "avg#" + aggName; @@ -214,15 +221,16 @@ aggTypedName, asMap( aggName, 12.55 ) ); - executeTest(sources, aggregationBuilders, input, expected, 10); + executeTest(groupBy, aggregationBuilders, input, expected, 10); } public void testExtractCompositeAggregationResultsMultiAggregations() throws IOException { String targetField = randomAlphaOfLengthBetween(5, 10); - List sources = Collections.singletonList( - new GroupConfig(targetField, TERMS, new TermsGroupSource("doesn't_matter_for_this_test")) - ); + GroupConfig groupBy = parseGroupConfig("{\"" + targetField + "\" : {" + + "\"terms\" : {" + + " \"field\" : \"doesn't_matter_for_this_test\"" + + "} } }"); String aggName = randomAlphaOfLengthBetween(5, 10); String aggTypedName = "avg#" + aggName; @@ -278,10 +286,10 @@ aggTypedName2, asMap( aggName2, -2.44 ) ); - executeTest(sources, aggregationBuilders, input, expected, 200); + executeTest(groupBy, aggregationBuilders, input, expected, 200); } - private void executeTest(Iterable sources, Collection aggregationBuilders, Map input, + private void executeTest(GroupConfig groups, Collection aggregationBuilders, Map input, List> expected, long expectedDocCounts) throws IOException { DataFrameIndexerTransformStats stats = new DataFrameIndexerTransformStats(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); @@ -290,13 +298,19 @@ private void executeTest(Iterable sources, Collection> result = AggregationResultUtils - .extractCompositeAggregationResults(agg, sources, aggregationBuilders, stats).collect(Collectors.toList()); + .extractCompositeAggregationResults(agg, groups, aggregationBuilders, stats).collect(Collectors.toList()); assertEquals(expected, result); assertEquals(expectedDocCounts, stats.getNumDocuments()); } } + private GroupConfig parseGroupConfig(String json) throws IOException { + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); + return GroupConfig.fromXContent(parser, false); + } + static Map asMap(Object... fields) { assert fields.length % 2 == 0; final Map map = new HashMap<>(); diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java index e3d1ed3901558..72b0af31c6d81 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/GroupConfigTests.java @@ -6,19 +6,56 @@ package org.elasticsearch.xpack.dataframe.transforms.pivot; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type; import java.io.IOException; - -import static org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type.TERMS; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; public class GroupConfigTests extends AbstractSerializingTestCase { public static GroupConfig randomGroupConfig() { - String targetFieldName = randomAlphaOfLengthBetween(1, 20); - return new GroupConfig(targetFieldName, TERMS, TermsGroupSourceTests.randomTermsGroupSource()); + Map source = new LinkedHashMap<>(); + Map> groups = new LinkedHashMap<>(); + + // ensure that the unlikely does not happen: 2 group_by's share the same name + Set names = new HashSet<>(); + for (int i = 0; i < randomIntBetween(1, 20); ++i) { + String targetFieldName = randomAlphaOfLengthBetween(1, 20); + if (names.add(targetFieldName)) { + SingleGroupSource groupBy; + Type type = randomFrom(SingleGroupSource.Type.values()); + switch (type) { + case TERMS: + groupBy = TermsGroupSourceTests.randomTermsGroupSource(); + break; + case HISTOGRAM: + groupBy = HistogramGroupSourceTests.randomHistogramGroupSource(); + break; + case DATE_HISTOGRAM: + default: + groupBy = DateHistogramGroupSourceTests.randomDateHistogramGroupSource(); + } + + source.put(targetFieldName, Collections.singletonMap(type.value(), getSource(groupBy))); + groups.put(targetFieldName, groupBy); + } + } + + return new GroupConfig(source, groups); } @Override @@ -35,4 +72,30 @@ protected GroupConfig createTestInstance() { protected Reader instanceReader() { return GroupConfig::new; } + + public void testEmptyGroupBy() throws IOException { + String source = "{}"; + + // lenient, passes but reports invalid + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + GroupConfig groupConfig = GroupConfig.fromXContent(parser, true); + assertFalse(groupConfig.isValid()); + } + + // strict throws + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + expectThrows(IllegalArgumentException.class, () -> GroupConfig.fromXContent(parser, false)); + } + } + + private static Map getSource(SingleGroupSource groupSource) { + try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) { + XContentBuilder content = groupSource.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + return XContentHelper.convertToMap(BytesReference.bytes(content), true, XContentType.JSON).v2(); + } catch (IOException e) { + // should not happen + fail("failed to create random single group source"); + } + return null; + } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java index 0ae59315e69f6..2397c088293f4 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotConfigTests.java @@ -13,29 +13,15 @@ import org.elasticsearch.xpack.dataframe.transforms.AbstractSerializingDataFrameTestCase; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; public class PivotConfigTests extends AbstractSerializingDataFrameTestCase { public static PivotConfig randomPivotConfig() { - List groups = new ArrayList<>(); - - for (int i = 0; i < randomIntBetween(1, 10); ++i) { - groups.add(GroupConfigTests.randomGroupConfig()); - } - - return new PivotConfig(groups, AggregationConfigTests.randomAggregationConfig()); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), AggregationConfigTests.randomAggregationConfig()); } public static PivotConfig randomInvalidPivotConfig() { - List groups = new ArrayList<>(); - - for (int i = 0; i < randomIntBetween(1, 10); ++i) { - groups.add(GroupConfigTests.randomGroupConfig()); - } - - return new PivotConfig(groups, AggregationConfigTests.randomInvalidAggregationConfig()); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), AggregationConfigTests.randomInvalidAggregationConfig()); } @Override @@ -55,42 +41,86 @@ protected Reader instanceReader() { public void testAggsAbbreviations() throws IOException { String pivotAggs = "{" - + " \"group_by\": [ {" + + " \"group_by\": {" + " \"id\": {" + " \"terms\": {" + " \"field\": \"id\"" - + "} } } ]," + + "} } }," + " \"aggs\": {" + " \"avg\": {" + " \"avg\": {" + " \"field\": \"points\"" + "} } } }"; - PivotConfig p1 = createPivotConfigFromString(pivotAggs); + PivotConfig p1 = createPivotConfigFromString(pivotAggs, false); String pivotAggregations = pivotAggs.replace("aggs", "aggregations"); assertNotEquals(pivotAggs, pivotAggregations); - PivotConfig p2 = createPivotConfigFromString(pivotAggregations); + PivotConfig p2 = createPivotConfigFromString(pivotAggregations, false); assertEquals(p1,p2); } public void testMissingAggs() throws IOException { String pivot = "{" - + " \"group_by\": [ {" + + " \"group_by\": {" + + " \"id\": {" + + " \"terms\": {" + + " \"field\": \"id\"" + + "} } } }"; + + expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot, false)); + } + + public void testEmptyAggs() throws IOException { + String pivot = "{" + + " \"group_by\": {" + " \"id\": {" + " \"terms\": {" + " \"field\": \"id\"" - + "} } } ] }"; + + "} } }," + + "\"aggs\": {}" + + " }"; + + expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot, false)); + + // lenient passes but reports invalid + PivotConfig pivotConfig = createPivotConfigFromString(pivot, true); + assertFalse(pivotConfig.isValid()); + } + + public void testEmptyGroupBy() throws IOException { + String pivot = "{" + + " \"group_by\": {}," + + " \"aggs\": {" + + " \"avg\": {" + + " \"avg\": {" + + " \"field\": \"points\"" + + "} } } }"; + + expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot, false)); + + // lenient passes but reports invalid + PivotConfig pivotConfig = createPivotConfigFromString(pivot, true); + assertFalse(pivotConfig.isValid()); + } - expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot)); + public void testMissingGroupBy() throws IOException { + String pivot = "{" + + " \"aggs\": {" + + " \"avg\": {" + + " \"avg\": {" + + " \"field\": \"points\"" + + "} } } }"; + + expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot, false)); } public void testDoubleAggs() throws IOException { String pivot = "{" - + " \"group_by\": [ {" + + " \"group_by\": {" + " \"id\": {" + " \"terms\": {" + " \"field\": \"id\"" - + "} } } ]," + + "} } }," + " \"aggs\": {" + " \"avg\": {" + " \"avg\": {" @@ -103,12 +133,12 @@ public void testDoubleAggs() throws IOException { + "} } }" + "}"; - expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot)); + expectThrows(IllegalArgumentException.class, () -> createPivotConfigFromString(pivot, false)); } - private PivotConfig createPivotConfigFromString(String json) throws IOException { + private PivotConfig createPivotConfigFromString(String json, boolean lenient) throws IOException { final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), DeprecationHandler.THROW_UNSUPPORTED_OPERATION, json); - return PivotConfig.fromXContent(parser, false); + return PivotConfig.fromXContent(parser, lenient); } } diff --git a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java index c25d42cf07261..4845085eba337 100644 --- a/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/data-frame/src/test/java/org/elasticsearch/xpack/dataframe/transforms/pivot/PivotTests.java @@ -42,9 +42,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.Arrays.asList; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.dataframe.transforms.pivot.SingleGroupSource.Type.TERMS; import static org.hamcrest.Matchers.equalTo; public class PivotTests extends ESTestCase { @@ -161,21 +159,11 @@ protected void } private PivotConfig getValidPivotConfig() throws IOException { - List sources = asList( - new GroupConfig("terms", TERMS, new TermsGroupSource("terms")), - new GroupConfig("terms", TERMS, new TermsGroupSource("terms")) - ); - - return new PivotConfig(sources, getValidAggregationConfig()); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), getValidAggregationConfig()); } private PivotConfig getValidPivotConfig(AggregationConfig aggregationConfig) throws IOException { - List sources = asList( - new GroupConfig("terms", TERMS, new TermsGroupSource("terms")), - new GroupConfig("terms", TERMS, new TermsGroupSource("terms")) - ); - - return new PivotConfig(sources, aggregationConfig); + return new PivotConfig(GroupConfigTests.randomGroupConfig(), aggregationConfig); } private AggregationConfig getValidAggregationConfig() throws IOException { From 614cb0f408ff56d10f7bfa56e49b9221ef0993df Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 15 Feb 2019 09:22:22 +0100 Subject: [PATCH 49/49] remove superflous import --- x-pack/plugin/data-frame/build.gradle | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/data-frame/build.gradle b/x-pack/plugin/data-frame/build.gradle index caba438ffaf75..bff8118bfc425 100644 --- a/x-pack/plugin/data-frame/build.gradle +++ b/x-pack/plugin/data-frame/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.BuildPlugin - evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.esplugin'