diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
index 30e79d1dce2fa..8a04c229de261 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java
@@ -28,10 +28,12 @@
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetJobRequest;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.client.ml.FlushJobRequest;
import java.io.IOException;
@@ -126,6 +128,36 @@ static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOExceptio
return request;
}
+ static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(flushJobRequest.getJobId())
+ .addPathPartAsIs("_flush")
+ .build();
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint);
+ request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
+ static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
+ String endpoint = new EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("ml")
+ .addPathPartAsIs("anomaly_detectors")
+ .addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
+ .addPathPartAsIs("_stats")
+ .build();
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+
+ RequestConverters.Params params = new RequestConverters.Params(request);
+ if (getJobStatsRequest.isAllowNoJobs() != null) {
+ params.putParam("allow_no_jobs", Boolean.toString(getJobStatsRequest.isAllowNoJobs()));
+ }
+ return request;
+ }
+
static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
index a972f760d2fde..ac44f16b80b16 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java
@@ -19,6 +19,11 @@
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetJobStatsResponse;
+import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteJobRequest;
@@ -288,6 +293,101 @@ public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, A
Collections.emptySet());
}
+ /**
+ * Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed.
+ * This may cause new results to be calculated depending on the contents of the buffer
+ *
+ * Both flush and close operations are similar,
+ * however the flush is more efficient if you are expecting to send more data for analysis.
+ *
+ * When flushing, the job remains open and is available to continue analyzing data.
+ * A close operation additionally prunes and persists the model state to disk and the
+ * job must be opened again before analyzing further data.
+ *
+ *
+ * For additional info
+ * see Flush ML job documentation
+ *
+ * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ */
+ public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed.
+ * This may cause new results to be calculated depending on the contents of the buffer
+ *
+ * Both flush and close operations are similar,
+ * however the flush is more efficient if you are expecting to send more data for analysis.
+ *
+ * When flushing, the job remains open and is available to continue analyzing data.
+ * A close operation additionally prunes and persists the model state to disk and the
+ * job must be opened again before analyzing further data.
+ *
+ *
+ * For additional info
+ * see Flush ML job documentation
+ *
+ * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified upon request completion
+ */
+ public void flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::flushJob,
+ options,
+ FlushJobResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets usage statistics for one or more Machine Learning jobs
+ *
+ *
+ * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return {@link GetJobStatsResponse} response object containing
+ * the {@link JobStats} objects and the number of jobs found
+ * @throws IOException when there is a serialization issue sending the request or receiving the response
+ */
+ public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request,
+ MLRequestConverters::getJobStats,
+ options,
+ GetJobStatsResponse::fromXContent,
+ Collections.emptySet());
+ }
+
+ /**
+ * Gets one or more Machine Learning job configuration info, asynchronously.
+ *
+ *
+ * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options
+ * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion
+ */
+ public void getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request,
+ MLRequestConverters::getJobStats,
+ options,
+ GetJobStatsResponse::fromXContent,
+ listener,
+ Collections.emptySet());
+ }
+
/**
* Gets the records for a Machine Learning Job.
*
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index edb18b89d1c11..26322e7e6589d 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -107,6 +107,7 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
+import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
import org.elasticsearch.protocol.xpack.XPackUsageRequest;
import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest;
@@ -832,6 +833,21 @@ static Request clusterHealth(ClusterHealthRequest healthRequest) {
return request;
}
+ static Request reindex(ReindexRequest reindexRequest) throws IOException {
+ String endpoint = new EndpointBuilder().addPathPart("_reindex").build();
+ Request request = new Request(HttpPost.METHOD_NAME, endpoint);
+ Params params = new Params(request)
+ .withRefresh(reindexRequest.isRefresh())
+ .withTimeout(reindexRequest.getTimeout())
+ .withWaitForActiveShards(reindexRequest.getWaitForActiveShards(), ActiveShardCount.DEFAULT);
+
+ if (reindexRequest.getScrollTime() != null) {
+ params.putParam("scroll", reindexRequest.getScrollTime());
+ }
+ request.setEntity(createEntity(reindexRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName()).build();
@@ -1140,10 +1156,10 @@ static Request xPackInfo(XPackInfoRequest infoRequest) {
static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException {
String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
- request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
+ request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
- }
-
+ }
+
static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_xpack")
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
index 3145c2c771c66..d82d44d7e4634 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
@@ -65,6 +65,8 @@
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.index.rankeval.RankEvalResponse;
+import org.elasticsearch.index.reindex.BulkByScrollResponse;
+import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestStatus;
@@ -323,7 +325,7 @@ public final XPackClient xpack() {
* Watcher APIs on elastic.co for more information.
*/
public WatcherClient watcher() { return watcherClient; }
-
+
/**
* Provides methods for accessing the Elastic Licensed Graph explore API that
* is shipped with the default distribution of Elasticsearch. All of
@@ -332,7 +334,7 @@ public final XPackClient xpack() {
* See the
* Graph API on elastic.co for more information.
*/
- public GraphClient graph() { return graphClient; }
+ public GraphClient graph() { return graphClient; }
/**
* Provides methods for accessing the Elastic Licensed Licensing APIs that
@@ -415,6 +417,33 @@ public final void bulkAsync(BulkRequest bulkRequest, ActionListenerReindex API on elastic.co
+ * @param reindexRequest the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return the response
+ * @throws IOException in case there is a problem sending the request or parsing back the response
+ */
+ public final BulkByScrollResponse reindex(ReindexRequest reindexRequest, RequestOptions options) throws IOException {
+ return performRequestAndParseEntity(
+ reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, emptySet()
+ );
+ }
+
+ /**
+ * Asynchronously executes a reindex request.
+ * See Reindex API on elastic.co
+ * @param reindexRequest the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener the listener to be notified upon request completion
+ */
+ public final void reindexAsync(ReindexRequest reindexRequest, RequestOptions options, ActionListener listener) {
+ performRequestAsyncAndParseEntity(
+ reindexRequest, RequestConverters::reindex, options, BulkByScrollResponse::fromXContent, listener, emptySet()
+ );
+ }
+
/**
* Pings the remote Elasticsearch cluster and returns true if the ping succeeded, false otherwise
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java
new file mode 100644
index 0000000000000..067851d452666
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Request object to flush a given Machine Learning job.
+ */
+public class FlushJobRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField CALC_INTERIM = new ParseField("calc_interim");
+ public static final ParseField START = new ParseField("start");
+ public static final ParseField END = new ParseField("end");
+ public static final ParseField ADVANCE_TIME = new ParseField("advance_time");
+ public static final ParseField SKIP_TIME = new ParseField("skip_time");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("flush_job_request", (a) -> new FlushJobRequest((String) a[0]));
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
+ PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM);
+ PARSER.declareString(FlushJobRequest::setStart, START);
+ PARSER.declareString(FlushJobRequest::setEnd, END);
+ PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME);
+ PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME);
+ }
+
+ private final String jobId;
+ private Boolean calcInterim;
+ private String start;
+ private String end;
+ private String advanceTime;
+ private String skipTime;
+
+ /**
+ * Create new Flush job request
+ *
+ * @param jobId The job ID of the job to flush
+ */
+ public FlushJobRequest(String jobId) {
+ this.jobId = jobId;
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ public boolean getCalcInterim() {
+ return calcInterim;
+ }
+
+ /**
+ * When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period.
+ *
+ * @param calcInterim defaults to {@code false}.
+ */
+ public void setCalcInterim(boolean calcInterim) {
+ this.calcInterim = calcInterim;
+ }
+
+ public String getStart() {
+ return start;
+ }
+
+ /**
+ * When used in conjunction with {@link FlushJobRequest#calcInterim},
+ * specifies the start of the range of buckets on which to calculate interim results.
+ *
+ * @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setStart(String start) {
+ this.start = start;
+ }
+
+ public String getEnd() {
+ return end;
+ }
+
+ /**
+ * When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range
+ * of buckets on which to calculate interim results
+ *
+ * @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setEnd(String end) {
+ this.end = end;
+ }
+
+ public String getAdvanceTime() {
+ return advanceTime;
+ }
+
+ /**
+ * Specifies to advance to a particular time value.
+ * Results are generated and the model is updated for data from the specified time interval.
+ *
+ * @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setAdvanceTime(String advanceTime) {
+ this.advanceTime = advanceTime;
+ }
+
+ public String getSkipTime() {
+ return skipTime;
+ }
+
+ /**
+ * Specifies to skip to a particular time value.
+ * Results are not generated and the model is not updated for data from the specified time interval.
+ *
+ * @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string
+ */
+ public void setSkipTime(String skipTime) {
+ this.skipTime = skipTime;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ FlushJobRequest other = (FlushJobRequest) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ calcInterim == other.calcInterim &&
+ Objects.equals(start, other.start) &&
+ Objects.equals(end, other.end) &&
+ Objects.equals(advanceTime, other.advanceTime) &&
+ Objects.equals(skipTime, other.skipTime);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ if (calcInterim != null) {
+ builder.field(CALC_INTERIM.getPreferredName(), calcInterim);
+ }
+ if (start != null) {
+ builder.field(START.getPreferredName(), start);
+ }
+ if (end != null) {
+ builder.field(END.getPreferredName(), end);
+ }
+ if (advanceTime != null) {
+ builder.field(ADVANCE_TIME.getPreferredName(), advanceTime);
+ }
+ if (skipTime != null) {
+ builder.field(SKIP_TIME.getPreferredName(), skipTime);
+ }
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java
new file mode 100644
index 0000000000000..048b07b504ae0
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Date;
+import java.util.Objects;
+
+/**
+ * Response object containing flush acknowledgement and additional data
+ */
+public class FlushJobResponse extends ActionResponse implements ToXContentObject {
+
+ public static final ParseField FLUSHED = new ParseField("flushed");
+ public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("flush_job_response",
+ true,
+ (a) -> {
+ boolean flushed = (boolean) a[0];
+ Date date = a[1] == null ? null : new Date((long) a[1]);
+ return new FlushJobResponse(flushed, date);
+ });
+
+ static {
+ PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED);
+ PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END);
+ }
+
+ public static FlushJobResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ private final boolean flushed;
+ private final Date lastFinalizedBucketEnd;
+
+ public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) {
+ this.flushed = flushed;
+ this.lastFinalizedBucketEnd = lastFinalizedBucketEnd;
+ }
+
+ /**
+ * Was the job successfully flushed or not
+ */
+ public boolean isFlushed() {
+ return flushed;
+ }
+
+ /**
+ * Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
+ */
+ @Nullable
+ public Date getLastFinalizedBucketEnd() {
+ return lastFinalizedBucketEnd;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(flushed, lastFinalizedBucketEnd);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ FlushJobResponse that = (FlushJobResponse) other;
+ return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(FLUSHED.getPreferredName(), flushed);
+ if (lastFinalizedBucketEnd != null) {
+ builder.timeField(LAST_FINALIZED_BUCKET_END.getPreferredName(),
+ LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", lastFinalizedBucketEnd.getTime());
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java
new file mode 100644
index 0000000000000..d8eb350755dcb
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+
+/**
+ * Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds
+ *
+ * `_all` explicitly gets all the jobs' statistics in the cluster
+ * An empty request (no `jobId`s) implicitly gets all the jobs' statistics in the cluster
+ */
+public class GetJobStatsRequest extends ActionRequest implements ToXContentObject {
+
+ public static final ParseField ALLOW_NO_JOBS = new ParseField("allow_no_jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "get_jobs_stats_request", a -> new GetJobStatsRequest((List) a[0]));
+
+ static {
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())),
+ Job.ID, ObjectParser.ValueType.STRING_ARRAY);
+ PARSER.declareBoolean(GetJobStatsRequest::setAllowNoJobs, ALLOW_NO_JOBS);
+ }
+
+ private static final String ALL_JOBS = "_all";
+
+ private final List jobIds;
+ private Boolean allowNoJobs;
+
+ /**
+ * Explicitly gets all jobs statistics
+ *
+ * @return a {@link GetJobStatsRequest} for all existing jobs
+ */
+ public static GetJobStatsRequest getAllJobStatsRequest(){
+ return new GetJobStatsRequest(ALL_JOBS);
+ }
+
+ GetJobStatsRequest(List jobIds) {
+ if (jobIds.stream().anyMatch(Objects::isNull)) {
+ throw new NullPointerException("jobIds must not contain null values");
+ }
+ this.jobIds = new ArrayList<>(jobIds);
+ }
+
+ /**
+ * Get the specified Job's statistics via their unique jobIds
+ *
+ * @param jobIds must be non-null and each jobId must be non-null
+ */
+ public GetJobStatsRequest(String... jobIds) {
+ this(Arrays.asList(jobIds));
+ }
+
+ /**
+ * All the jobIds for which to get statistics
+ */
+ public List getJobIds() {
+ return jobIds;
+ }
+
+ public Boolean isAllowNoJobs() {
+ return this.allowNoJobs;
+ }
+
+ /**
+ * Whether to ignore if a wildcard expression matches no jobs.
+ *
+ * This includes `_all` string or when no jobs have been specified
+ *
+ * @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
+ */
+ public void setAllowNoJobs(boolean allowNoJobs) {
+ this.allowNoJobs = allowNoJobs;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobIds, allowNoJobs);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ GetJobStatsRequest that = (GetJobStatsRequest) other;
+ return Objects.equals(jobIds, that.jobIds) &&
+ Objects.equals(allowNoJobs, that.allowNoJobs);
+ }
+
+ @Override
+ public ActionRequestValidationException validate() {
+ return null;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds));
+ if (allowNoJobs != null) {
+ builder.field(ALLOW_NO_JOBS.getPreferredName(), allowNoJobs);
+ }
+ builder.endObject();
+ return builder;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java
new file mode 100644
index 0000000000000..2e3ba113d193c
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java
@@ -0,0 +1,88 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.client.ml.job.stats.JobStats;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Contains a {@link List} of the found {@link JobStats} objects and the total count found
+ */
+public class GetJobStatsResponse extends AbstractResultResponse {
+
+ public static final ParseField RESULTS_FIELD = new ParseField("jobs");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("jobs_stats_response", true,
+ a -> new GetJobStatsResponse((List) a[0], (long) a[1]));
+
+ static {
+ PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD);
+ PARSER.declareLong(constructorArg(), COUNT);
+ }
+
+ GetJobStatsResponse(List jobStats, long count) {
+ super(RESULTS_FIELD, jobStats, count);
+ }
+
+ /**
+ * The collection of {@link JobStats} objects found in the query
+ */
+ public List jobStats() {
+ return results;
+ }
+
+ public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(results, count);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ GetJobStatsResponse other = (GetJobStatsResponse) obj;
+ return Objects.equals(results, other.results) && count == other.count;
+ }
+
+ @Override
+ public final String toString() {
+ return Strings.toString(this);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java
new file mode 100644
index 0000000000000..892df340abd6b
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A Pojo class containing an Elastic Node's attributes
+ */
+public class NodeAttributes implements ToXContentObject {
+
+ public static final ParseField ID = new ParseField("id");
+ public static final ParseField NAME = new ParseField("name");
+ public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id");
+ public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address");
+ public static final ParseField ATTRIBUTES = new ParseField("attributes");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("node", true,
+ (a) -> {
+ int i = 0;
+ String id = (String) a[i++];
+ String name = (String) a[i++];
+ String ephemeralId = (String) a[i++];
+ String transportAddress = (String) a[i++];
+ Map attributes = (Map) a[i];
+ return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes);
+ });
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), ID);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID);
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS);
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ (p, c) -> p.mapStrings(),
+ ATTRIBUTES,
+ ObjectParser.ValueType.OBJECT);
+ }
+
+ private final String id;
+ private final String name;
+ private final String ephemeralId;
+ private final String transportAddress;
+ private final Map attributes;
+
+ public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map attributes) {
+ this.id = id;
+ this.name = name;
+ this.ephemeralId = ephemeralId;
+ this.transportAddress = transportAddress;
+ this.attributes = Collections.unmodifiableMap(attributes);
+ }
+
+ /**
+ * The unique identifier of the node.
+ */
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * The node name.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * The ephemeral id of the node.
+ */
+ public String getEphemeralId() {
+ return ephemeralId;
+ }
+
+ /**
+ * The host and port where transport HTTP connections are accepted.
+ */
+ public String getTransportAddress() {
+ return transportAddress;
+ }
+
+ /**
+ * Additional attributes related to this node e.g., {"ml.max_open_jobs": "10"}.
+ */
+ public Map getAttributes() {
+ return attributes;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(ID.getPreferredName(), id);
+ builder.field(NAME.getPreferredName(), name);
+ builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId);
+ builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress);
+ builder.field(ATTRIBUTES.getPreferredName(), attributes);
+ builder.endObject();
+ return builder;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, name, ephemeralId, transportAddress, attributes);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+
+ NodeAttributes that = (NodeAttributes) other;
+ return Objects.equals(id, that.id) &&
+ Objects.equals(name, that.name) &&
+ Objects.equals(ephemeralId, that.ephemeralId) &&
+ Objects.equals(transportAddress, that.transportAddress) &&
+ Objects.equals(attributes, that.attributes);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java
new file mode 100644
index 0000000000000..32684bd7e62b4
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.config;
+
+import java.util.Locale;
+
+/**
+ * Jobs whether running or complete are in one of these states.
+ * When a job is created it is initialised in the state closed
+ * i.e. it is not running.
+ */
+public enum JobState {
+
+ CLOSING, CLOSED, OPENED, FAILED, OPENING;
+
+ public static JobState fromString(String name) {
+ return valueOf(name.trim().toUpperCase(Locale.ROOT));
+ }
+
+ public String value() {
+ return name().toLowerCase(Locale.ROOT);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java
new file mode 100644
index 0000000000000..a6b41beca8366
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java
@@ -0,0 +1,174 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A class to hold statistics about forecasts.
+ */
+public class ForecastStats implements ToXContentObject {
+
+ public static final ParseField TOTAL = new ParseField("total");
+ public static final ParseField FORECASTED_JOBS = new ParseField("forecasted_jobs");
+ public static final ParseField MEMORY_BYTES = new ParseField("memory_bytes");
+ public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms");
+ public static final ParseField RECORDS = new ParseField("records");
+ public static final ParseField STATUS = new ParseField("status");
+
+ @SuppressWarnings("unchecked")
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("forecast_stats",
+ true,
+ (a) -> {
+ int i = 0;
+ long total = (long)a[i++];
+ SimpleStats memoryStats = (SimpleStats)a[i++];
+ SimpleStats recordStats = (SimpleStats)a[i++];
+ SimpleStats runtimeStats = (SimpleStats)a[i++];
+ Map statusCounts = (Map)a[i];
+ return new ForecastStats(total, memoryStats, recordStats, runtimeStats, statusCounts);
+ });
+
+ static {
+ PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, MEMORY_BYTES);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, RECORDS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, PROCESSING_TIME_MS);
+ PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
+ p -> {
+ Map counts = new HashMap<>();
+ p.map().forEach((key, value) -> counts.put(key, ((Number)value).longValue()));
+ return counts;
+ }, STATUS, ObjectParser.ValueType.OBJECT);
+ }
+
+ private final long total;
+ private final long forecastedJobs;
+ private SimpleStats memoryStats;
+ private SimpleStats recordStats;
+ private SimpleStats runtimeStats;
+ private Map statusCounts;
+
+ public ForecastStats(long total,
+ SimpleStats memoryStats,
+ SimpleStats recordStats,
+ SimpleStats runtimeStats,
+ Map statusCounts) {
+ this.total = total;
+ this.forecastedJobs = total > 0 ? 1 : 0;
+ if (total > 0) {
+ this.memoryStats = Objects.requireNonNull(memoryStats);
+ this.recordStats = Objects.requireNonNull(recordStats);
+ this.runtimeStats = Objects.requireNonNull(runtimeStats);
+ this.statusCounts = Collections.unmodifiableMap(statusCounts);
+ }
+ }
+
+ /**
+ * The number of forecasts currently available for this model.
+ */
+ public long getTotal() {
+ return total;
+ }
+
+ /**
+ * The number of jobs that have at least one forecast.
+ */
+ public long getForecastedJobs() {
+ return forecastedJobs;
+ }
+
+ /**
+ * Statistics about the memory usage: minimum, maximum, average and total.
+ */
+ public SimpleStats getMemoryStats() {
+ return memoryStats;
+ }
+
+ /**
+ * Statistics about the number of forecast records: minimum, maximum, average and total.
+ */
+ public SimpleStats getRecordStats() {
+ return recordStats;
+ }
+
+ /**
+ * Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total
+ */
+ public SimpleStats getRuntimeStats() {
+ return runtimeStats;
+ }
+
+ /**
+ * Counts per forecast status, for example: {"finished" : 2}.
+ */
+ public Map getStatusCounts() {
+ return statusCounts;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(TOTAL.getPreferredName(), total);
+ builder.field(FORECASTED_JOBS.getPreferredName(), forecastedJobs);
+
+ if (total > 0) {
+ builder.field(MEMORY_BYTES.getPreferredName(), memoryStats);
+ builder.field(RECORDS.getPreferredName(), recordStats);
+ builder.field(PROCESSING_TIME_MS.getPreferredName(), runtimeStats);
+ builder.field(STATUS.getPreferredName(), statusCounts);
+ }
+ return builder.endObject();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ ForecastStats other = (ForecastStats) obj;
+ return Objects.equals(total, other.total) &&
+ Objects.equals(forecastedJobs, other.forecastedJobs) &&
+ Objects.equals(memoryStats, other.memoryStats) &&
+ Objects.equals(recordStats, other.recordStats) &&
+ Objects.equals(runtimeStats, other.runtimeStats) &&
+ Objects.equals(statusCounts, other.statusCounts);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java
new file mode 100644
index 0000000000000..df5be4aa4c5cc
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.client.ml.job.config.Job;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.process.DataCounts;
+import org.elasticsearch.client.ml.job.process.ModelSizeStats;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.client.ml.NodeAttributes;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Class containing the statistics for a Machine Learning job.
+ *
+ */
+public class JobStats implements ToXContentObject {
+
+ private static final ParseField DATA_COUNTS = new ParseField("data_counts");
+ private static final ParseField MODEL_SIZE_STATS = new ParseField("model_size_stats");
+ private static final ParseField FORECASTS_STATS = new ParseField("forecasts_stats");
+ private static final ParseField STATE = new ParseField("state");
+ private static final ParseField NODE = new ParseField("node");
+ private static final ParseField OPEN_TIME = new ParseField("open_time");
+ private static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation");
+
+ public static final ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("job_stats",
+ true,
+ (a) -> {
+ int i = 0;
+ String jobId = (String) a[i++];
+ DataCounts dataCounts = (DataCounts) a[i++];
+ JobState jobState = (JobState) a[i++];
+ ModelSizeStats.Builder modelSizeStatsBuilder = (ModelSizeStats.Builder) a[i++];
+ ModelSizeStats modelSizeStats = modelSizeStatsBuilder == null ? null : modelSizeStatsBuilder.build();
+ ForecastStats forecastStats = (ForecastStats) a[i++];
+ NodeAttributes node = (NodeAttributes) a[i++];
+ String assignmentExplanation = (String) a[i++];
+ TimeValue openTime = (TimeValue) a[i];
+ return new JobStats(jobId,
+ dataCounts,
+ jobState,
+ modelSizeStats,
+ forecastStats,
+ node,
+ assignmentExplanation,
+ openTime);
+ });
+
+ static {
+ PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID);
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataCounts.PARSER, DATA_COUNTS);
+ PARSER.declareField(ConstructingObjectParser.constructorArg(),
+ (p) -> JobState.fromString(p.text()),
+ STATE,
+ ObjectParser.ValueType.VALUE);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER, MODEL_SIZE_STATS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastStats.PARSER, FORECASTS_STATS);
+ PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE);
+ PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION);
+ PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(),
+ (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), OPEN_TIME.getPreferredName()),
+ OPEN_TIME,
+ ObjectParser.ValueType.STRING_OR_NULL);
+ }
+
+
+ private final String jobId;
+ private final DataCounts dataCounts;
+ private final JobState state;
+ private final ModelSizeStats modelSizeStats;
+ private final ForecastStats forecastStats;
+ private final NodeAttributes node;
+ private final String assignmentExplanation;
+ private final TimeValue openTime;
+
+ JobStats(String jobId, DataCounts dataCounts, JobState state, @Nullable ModelSizeStats modelSizeStats,
+ @Nullable ForecastStats forecastStats, @Nullable NodeAttributes node,
+ @Nullable String assignmentExplanation, @Nullable TimeValue opentime) {
+ this.jobId = Objects.requireNonNull(jobId);
+ this.dataCounts = Objects.requireNonNull(dataCounts);
+ this.state = Objects.requireNonNull(state);
+ this.modelSizeStats = modelSizeStats;
+ this.forecastStats = forecastStats;
+ this.node = node;
+ this.assignmentExplanation = assignmentExplanation;
+ this.openTime = opentime;
+ }
+
+ /**
+ * The jobId referencing the job for these statistics
+ */
+ public String getJobId() {
+ return jobId;
+ }
+
+ /**
+ * An object that describes the number of records processed and any related error counts
+ * See {@link DataCounts}
+ */
+ public DataCounts getDataCounts() {
+ return dataCounts;
+ }
+
+ /**
+ * An object that provides information about the size and contents of the model.
+ * See {@link ModelSizeStats}
+ */
+ public ModelSizeStats getModelSizeStats() {
+ return modelSizeStats;
+ }
+
+ /**
+ * An object that provides statistical information about forecasts of this job.
+ * See {@link ForecastStats}
+ */
+ public ForecastStats getForecastStats() {
+ return forecastStats;
+ }
+
+ /**
+ * The status of the job
+ * See {@link JobState}
+ */
+ public JobState getState() {
+ return state;
+ }
+
+ /**
+ * For open jobs only, contains information about the node where the job runs
+ * See {@link NodeAttributes}
+ */
+ public NodeAttributes getNode() {
+ return node;
+ }
+
+ /**
+ * For open jobs only, contains messages relating to the selection of a node to run the job.
+ */
+ public String getAssignmentExplanation() {
+ return assignmentExplanation;
+ }
+
+ /**
+ * For open jobs only, the elapsed time for which the job has been open
+ */
+ public TimeValue getOpenTime() {
+ return openTime;
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(Job.ID.getPreferredName(), jobId);
+ builder.field(DATA_COUNTS.getPreferredName(), dataCounts);
+ builder.field(STATE.getPreferredName(), state.toString());
+ if (modelSizeStats != null) {
+ builder.field(MODEL_SIZE_STATS.getPreferredName(), modelSizeStats);
+ }
+ if (forecastStats != null) {
+ builder.field(FORECASTS_STATS.getPreferredName(), forecastStats);
+ }
+ if (node != null) {
+ builder.field(NODE.getPreferredName(), node);
+ }
+ if (assignmentExplanation != null) {
+ builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation);
+ }
+ if (openTime != null) {
+ builder.field(OPEN_TIME.getPreferredName(), openTime.getStringRep());
+ }
+ return builder.endObject();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(jobId, dataCounts, modelSizeStats, forecastStats, state, node, assignmentExplanation, openTime);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ JobStats other = (JobStats) obj;
+ return Objects.equals(jobId, other.jobId) &&
+ Objects.equals(this.dataCounts, other.dataCounts) &&
+ Objects.equals(this.modelSizeStats, other.modelSizeStats) &&
+ Objects.equals(this.forecastStats, other.forecastStats) &&
+ Objects.equals(this.state, other.state) &&
+ Objects.equals(this.node, other.node) &&
+ Objects.equals(this.assignmentExplanation, other.assignmentExplanation) &&
+ Objects.equals(this.openTime, other.openTime);
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java
new file mode 100644
index 0000000000000..f4c8aa0fa3b29
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+
+import java.io.IOException;
+import java.util.Objects;
+
+/**
+ * Helper class for min, max, avg and total statistics for a quantity
+ */
+public class SimpleStats implements ToXContentObject {
+
+ public static final ParseField MIN = new ParseField("min");
+ public static final ParseField MAX = new ParseField("max");
+ public static final ParseField AVG = new ParseField("avg");
+ public static final ParseField TOTAL = new ParseField("total");
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("simple_stats", true,
+ (a) -> {
+ int i = 0;
+ double total = (double)a[i++];
+ double min = (double)a[i++];
+ double max = (double)a[i++];
+ double avg = (double)a[i++];
+ return new SimpleStats(total, min, max, avg);
+ });
+
+ static {
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), TOTAL);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MIN);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX);
+ PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AVG);
+ }
+
+ private final double total;
+ private final double min;
+ private final double max;
+ private final double avg;
+
+ SimpleStats(double total, double min, double max, double avg) {
+ this.total = total;
+ this.min = min;
+ this.max = max;
+ this.avg = avg;
+ }
+
+ public double getMin() {
+ return min;
+ }
+
+ public double getMax() {
+ return max;
+ }
+
+ public double getAvg() {
+ return avg;
+ }
+
+ public double getTotal() {
+ return total;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(total, min, max, avg);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+
+ SimpleStats other = (SimpleStats) obj;
+ return Objects.equals(total, other.total) &&
+ Objects.equals(min, other.min) &&
+ Objects.equals(avg, other.avg) &&
+ Objects.equals(max, other.max);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+ builder.field(MIN.getPreferredName(), min);
+ builder.field(MAX.getPreferredName(), max);
+ builder.field(AVG.getPreferredName(), avg);
+ builder.field(TOTAL.getPreferredName(), total);
+ builder.endObject();
+ return builder;
+ }
+}
+
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
index 7ed791e1a837a..72480fcbc49d5 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
@@ -41,12 +41,16 @@
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.get.GetResult;
+import org.elasticsearch.index.query.IdsQueryBuilder;
+import org.elasticsearch.index.reindex.BulkByScrollResponse;
+import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
@@ -689,6 +693,69 @@ public void testBulk() throws IOException {
validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest);
}
+ public void testReindex() throws IOException {
+ final String sourceIndex = "source1";
+ final String destinationIndex = "dest";
+ {
+ // Prepare
+ Settings settings = Settings.builder()
+ .put("number_of_shards", 1)
+ .put("number_of_replicas", 0)
+ .build();
+ createIndex(sourceIndex, settings);
+ createIndex(destinationIndex, settings);
+ assertEquals(
+ RestStatus.OK,
+ highLevelClient().bulk(
+ new BulkRequest()
+ .add(new IndexRequest(sourceIndex, "type", "1")
+ .source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
+ .add(new IndexRequest(sourceIndex, "type", "2")
+ .source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON))
+ .setRefreshPolicy(RefreshPolicy.IMMEDIATE),
+ RequestOptions.DEFAULT
+ ).status()
+ );
+ }
+ {
+ // test1: create one doc in dest
+ ReindexRequest reindexRequest = new ReindexRequest();
+ reindexRequest.setSourceIndices(sourceIndex);
+ reindexRequest.setDestIndex(destinationIndex);
+ reindexRequest.setSourceQuery(new IdsQueryBuilder().addIds("1").types("type"));
+ reindexRequest.setRefresh(true);
+ BulkByScrollResponse bulkResponse = execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync);
+ assertEquals(1, bulkResponse.getCreated());
+ assertEquals(1, bulkResponse.getTotal());
+ assertEquals(0, bulkResponse.getDeleted());
+ assertEquals(0, bulkResponse.getNoops());
+ assertEquals(0, bulkResponse.getVersionConflicts());
+ assertEquals(1, bulkResponse.getBatches());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(1, bulkResponse.getBatches());
+ assertEquals(0, bulkResponse.getBulkFailures().size());
+ assertEquals(0, bulkResponse.getSearchFailures().size());
+ }
+ {
+ // test2: create 1 and update 1
+ ReindexRequest reindexRequest = new ReindexRequest();
+ reindexRequest.setSourceIndices(sourceIndex);
+ reindexRequest.setDestIndex(destinationIndex);
+ BulkByScrollResponse bulkResponse = execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync);
+ assertEquals(1, bulkResponse.getCreated());
+ assertEquals(2, bulkResponse.getTotal());
+ assertEquals(1, bulkResponse.getUpdated());
+ assertEquals(0, bulkResponse.getDeleted());
+ assertEquals(0, bulkResponse.getNoops());
+ assertEquals(0, bulkResponse.getVersionConflicts());
+ assertEquals(1, bulkResponse.getBatches());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(1, bulkResponse.getBatches());
+ assertEquals(0, bulkResponse.getBulkFailures().size());
+ assertEquals(0, bulkResponse.getSearchFailures().size());
+ }
+ }
+
public void testBulkProcessorIntegration() throws IOException {
int nbItems = randomIntBetween(10, 100);
boolean[] errors = new boolean[nbItems];
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
index 43f3ef41a8d73..d84099d9a3c40 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java
@@ -36,6 +36,8 @@
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream;
@@ -139,6 +141,44 @@ public void testGetBuckets() throws IOException {
}
}
+ public void testFlushJob() throws Exception {
+ String jobId = randomAlphaOfLength(10);
+ FlushJobRequest flushJobRequest = new FlushJobRequest(jobId);
+
+ Request request = MLRequestConverters.flushJob(flushJobRequest);
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_flush", request.getEndpoint());
+ assertEquals("{\"job_id\":\"" + jobId + "\"}", requestEntityToString(request));
+
+ flushJobRequest.setSkipTime("1000");
+ flushJobRequest.setStart("105");
+ flushJobRequest.setEnd("200");
+ flushJobRequest.setAdvanceTime("100");
+ flushJobRequest.setCalcInterim(true);
+ request = MLRequestConverters.flushJob(flushJobRequest);
+ assertEquals(
+ "{\"job_id\":\"" + jobId + "\",\"calc_interim\":true,\"start\":\"105\"," +
+ "\"end\":\"200\",\"advance_time\":\"100\",\"skip_time\":\"1000\"}",
+ requestEntityToString(request));
+ }
+
+ public void testGetJobStats() {
+ GetJobStatsRequest getJobStatsRequestRequest = new GetJobStatsRequest();
+
+ Request request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals("/_xpack/ml/anomaly_detectors/_stats", request.getEndpoint());
+ assertFalse(request.getParameters().containsKey("allow_no_jobs"));
+
+ getJobStatsRequestRequest = new GetJobStatsRequest("job1", "jobs*");
+ getJobStatsRequestRequest.setAllowNoJobs(true);
+ request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
+
+ assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
+ assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
+ }
+
private static Job createValidJob(String jobId) {
AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList(
Detector.builder().setFunction("count").build()));
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
index cb9dbea129d2b..cd4b6ffc7691f 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java
@@ -19,6 +19,12 @@
package org.elasticsearch.client;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
+import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetJobStatsResponse;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteJobRequest;
@@ -34,6 +40,8 @@
import org.elasticsearch.client.ml.job.config.Detector;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
import org.junit.After;
import java.io.IOException;
@@ -41,6 +49,7 @@
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
+import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasSize;
@@ -138,6 +147,77 @@ public void testCloseJob() throws Exception {
assertTrue(response.isClosed());
}
+ public void testFlushJob() throws Exception {
+ String jobId = randomValidJobId();
+ Job job = buildJob(jobId);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+ machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT);
+
+ FlushJobResponse response = execute(new FlushJobRequest(jobId),
+ machineLearningClient::flushJob,
+ machineLearningClient::flushJobAsync);
+ assertTrue(response.isFlushed());
+ }
+
+ public void testGetJobStats() throws Exception {
+ String jobId1 = "ml-get-job-stats-test-id-1";
+ String jobId2 = "ml-get-job-stats-test-id-2";
+
+ Job job1 = buildJob(jobId1);
+ Job job2 = buildJob(jobId2);
+ MachineLearningClient machineLearningClient = highLevelClient().machineLearning();
+ machineLearningClient.putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
+ machineLearningClient.putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
+
+ machineLearningClient.openJob(new OpenJobRequest(jobId1), RequestOptions.DEFAULT);
+
+ GetJobStatsRequest request = new GetJobStatsRequest(jobId1, jobId2);
+
+ // Test getting specific
+ GetJobStatsResponse response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertEquals(2, response.count());
+ assertThat(response.jobStats(), hasSize(2));
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), containsInAnyOrder(jobId1, jobId2));
+ for (JobStats stats : response.jobStats()) {
+ if (stats.getJobId().equals(jobId1)) {
+ assertEquals(JobState.OPENED, stats.getState());
+ } else {
+ assertEquals(JobState.CLOSED, stats.getState());
+ }
+ }
+
+ // Test getting all explicitly
+ request = GetJobStatsRequest.getAllJobStatsRequest();
+ response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all implicitly
+ response = execute(new GetJobStatsRequest(), machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test getting all with wildcard
+ request = new GetJobStatsRequest("ml-get-job-stats-test-id-*");
+ response = execute(request, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync);
+ assertTrue(response.count() >= 2L);
+ assertTrue(response.jobStats().size() >= 2L);
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()), hasItems(jobId1, jobId2));
+
+ // Test when allow_no_jobs is false
+ final GetJobStatsRequest erroredRequest = new GetJobStatsRequest("jobs-that-do-not-exist*");
+ erroredRequest.setAllowNoJobs(false);
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
+ () -> execute(erroredRequest, machineLearningClient::getJobStats, machineLearningClient::getJobStatsAsync));
+ assertThat(exception.status().getStatus(), equalTo(404));
+ }
+
public static String randomValidJobId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
index c589ce7d8c9a0..41f280cd1ba80 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
@@ -117,6 +117,7 @@
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.RandomCreateIndexGenerator;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.QueryBuilder;
@@ -127,6 +128,8 @@
import org.elasticsearch.index.rankeval.RankEvalSpec;
import org.elasticsearch.index.rankeval.RatedRequest;
import org.elasticsearch.index.rankeval.RestRankEvalAction;
+import org.elasticsearch.index.reindex.ReindexRequest;
+import org.elasticsearch.index.reindex.RemoteInfo;
import org.elasticsearch.protocol.xpack.XPackInfoRequest;
import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
@@ -173,6 +176,7 @@
import java.util.function.Supplier;
import java.util.stream.Collectors;
+import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.enforceSameContentType;
@@ -180,6 +184,7 @@
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomCreateIndexRequest;
import static org.elasticsearch.index.RandomCreateIndexGenerator.randomIndexSettings;
import static org.elasticsearch.index.alias.RandomAliasActionsGenerator.randomAliasAction;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.CoreMatchers.equalTo;
@@ -417,6 +422,64 @@ public void testUpdateAliases() throws IOException {
assertToXContentBody(indicesAliasesRequest, request.getEntity());
}
+ public void testReindex() throws IOException {
+ ReindexRequest reindexRequest = new ReindexRequest();
+ reindexRequest.setSourceIndices("source_idx");
+ reindexRequest.setDestIndex("dest_idx");
+ Map expectedParams = new HashMap<>();
+ if (randomBoolean()) {
+ XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
+ RemoteInfo remoteInfo = new RemoteInfo("http", "remote-host", 9200, null,
+ BytesReference.bytes(matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS)),
+ "user",
+ "pass",
+ emptyMap(),
+ RemoteInfo.DEFAULT_SOCKET_TIMEOUT,
+ RemoteInfo.DEFAULT_CONNECT_TIMEOUT
+ );
+ reindexRequest.setRemoteInfo(remoteInfo);
+ }
+ if (randomBoolean()) {
+ reindexRequest.setSourceDocTypes("doc", "tweet");
+ }
+ if (randomBoolean()) {
+ reindexRequest.setSourceBatchSize(randomInt(100));
+ }
+ if (randomBoolean()) {
+ reindexRequest.setDestDocType("tweet_and_doc");
+ }
+ if (randomBoolean()) {
+ reindexRequest.setDestOpType("create");
+ }
+ if (randomBoolean()) {
+ reindexRequest.setDestPipeline("my_pipeline");
+ }
+ if (randomBoolean()) {
+ reindexRequest.setDestRouting("=cat");
+ }
+ if (randomBoolean()) {
+ reindexRequest.setSize(randomIntBetween(100, 1000));
+ }
+ if (randomBoolean()) {
+ reindexRequest.setAbortOnVersionConflict(false);
+ }
+ if (randomBoolean()) {
+ String ts = randomTimeValue();
+ reindexRequest.setScroll(TimeValue.parseTimeValue(ts, "scroll"));
+ }
+ if (reindexRequest.getRemoteInfo() == null && randomBoolean()) {
+ reindexRequest.setSourceQuery(new TermQueryBuilder("foo", "fooval"));
+ }
+ setRandomTimeout(reindexRequest::setTimeout, ReplicationRequest.DEFAULT_TIMEOUT, expectedParams);
+ setRandomWaitForActiveShards(reindexRequest::setWaitForActiveShards, ActiveShardCount.DEFAULT, expectedParams);
+ expectedParams.put("scroll", reindexRequest.getScrollTime().getStringRep());
+ Request request = RequestConverters.reindex(reindexRequest);
+ assertEquals("/_reindex", request.getEndpoint());
+ assertEquals(HttpPost.METHOD_NAME, request.getMethod());
+ assertEquals(expectedParams, request.getParameters());
+ assertToXContentBody(reindexRequest, request.getEntity());
+ }
+
public void testPutMapping() throws IOException {
PutMappingRequest putMappingRequest = new PutMappingRequest();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
index e82a1922b1344..d9a5359056f7f 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
@@ -678,7 +678,6 @@ public void testApiNamingConventions() throws Exception {
"indices.put_alias",
"mtermvectors",
"put_script",
- "reindex",
"reindex_rethrottle",
"render_search_template",
"scripts_painless_execute",
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
index 97f8d37850405..90b44f5a8eb8b 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
@@ -50,6 +50,8 @@
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@@ -59,13 +61,22 @@
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.get.GetResult;
+import org.elasticsearch.index.query.MatchAllQueryBuilder;
+import org.elasticsearch.index.query.TermQueryBuilder;
+import org.elasticsearch.index.reindex.BulkByScrollResponse;
+import org.elasticsearch.index.reindex.ReindexRequest;
+import org.elasticsearch.index.reindex.RemoteInfo;
+import org.elasticsearch.index.reindex.ScrollableHitSource;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
+import org.elasticsearch.search.sort.SortOrder;
+import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@@ -760,6 +771,144 @@ public void onFailure(Exception e) {
}
}
+ public void testReindex() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+ {
+ String mapping =
+ "\"doc\": {\n" +
+ " \"properties\": {\n" +
+ " \"user\": {\n" +
+ " \"type\": \"text\"\n" +
+ " },\n" +
+ " \"field1\": {\n" +
+ " \"type\": \"integer\"\n" +
+ " },\n" +
+ " \"field2\": {\n" +
+ " \"type\": \"integer\"\n" +
+ " }\n" +
+ " }\n" +
+ " }";
+ createIndex("source1", Settings.EMPTY, mapping);
+ createIndex("source2", Settings.EMPTY, mapping);
+ createPipeline("my_pipeline");
+ }
+ {
+ // tag::reindex-request
+ ReindexRequest request = new ReindexRequest(); // <1>
+ request.setSourceIndices("source1", "source2"); // <2>
+ request.setDestIndex("dest"); // <3>
+ // end::reindex-request
+ // tag::reindex-request-versionType
+ request.setDestVersionType(VersionType.EXTERNAL); // <1>
+ // end::reindex-request-versionType
+ // tag::reindex-request-opType
+ request.setDestOpType("create"); // <1>
+ // end::reindex-request-opType
+ // tag::reindex-request-conflicts
+ request.setConflicts("proceed"); // <1>
+ // end::reindex-request-conflicts
+ // tag::reindex-request-typeOrQuery
+ request.setSourceDocTypes("doc"); // <1>
+ request.setSourceQuery(new TermQueryBuilder("user", "kimchy")); // <2>
+ // end::reindex-request-typeOrQuery
+ // tag::reindex-request-size
+ request.setSize(10); // <1>
+ // end::reindex-request-size
+ // tag::reindex-request-sourceSize
+ request.setSourceBatchSize(100); // <1>
+ // end::reindex-request-sourceSize
+ // tag::reindex-request-pipeline
+ request.setDestPipeline("my_pipeline"); // <1>
+ // end::reindex-request-pipeline
+ // tag::reindex-request-sort
+ request.addSortField("field1", SortOrder.DESC); // <1>
+ request.addSortField("field2", SortOrder.ASC); // <2>
+ // end::reindex-request-sort
+ // tag::reindex-request-script
+ request.setScript(
+ new Script(
+ ScriptType.INLINE, "painless",
+ "if (ctx._source.user == 'kimchy') {ctx._source.likes++;}",
+ Collections.emptyMap())); // <1>
+ // end::reindex-request-script
+ // tag::reindex-request-remote
+ request.setRemoteInfo(
+ new RemoteInfo(
+ "https", "localhost", 9002, null, new BytesArray(new MatchAllQueryBuilder().toString()),
+ "user", "pass", Collections.emptyMap(), new TimeValue(100, TimeUnit.MILLISECONDS),
+ new TimeValue(100, TimeUnit.SECONDS)
+ )
+ ); // <1>
+ // end::reindex-request-remote
+ request.setRemoteInfo(null); // Remove it for tests
+ // tag::reindex-request-timeout
+ request.setTimeout(TimeValue.timeValueMinutes(2)); // <1>
+ // end::reindex-request-timeout
+ // tag::reindex-request-refresh
+ request.setRefresh(true); // <1>
+ // end::reindex-request-refresh
+ // tag::reindex-request-slices
+ request.setSlices(2); // <1>
+ // end::reindex-request-slices
+ // tag::reindex-request-scroll
+ request.setScroll(TimeValue.timeValueMinutes(10)); // <1>
+ // end::reindex-request-scroll
+
+
+ // tag::reindex-execute
+ BulkByScrollResponse bulkResponse = client.reindex(request, RequestOptions.DEFAULT);
+ // end::reindex-execute
+ assertSame(0, bulkResponse.getSearchFailures().size());
+ assertSame(0, bulkResponse.getBulkFailures().size());
+ // tag::reindex-response
+ TimeValue timeTaken = bulkResponse.getTook(); // <1>
+ boolean timedOut = bulkResponse.isTimedOut(); // <2>
+ long totalDocs = bulkResponse.getTotal(); // <3>
+ long updatedDocs = bulkResponse.getUpdated(); // <4>
+ long createdDocs = bulkResponse.getCreated(); // <5>
+ long deletedDocs = bulkResponse.getDeleted(); // <6>
+ long batches = bulkResponse.getBatches(); // <7>
+ long noops = bulkResponse.getNoops(); // <8>
+ long versionConflicts = bulkResponse.getVersionConflicts(); // <9>
+ long bulkRetries = bulkResponse.getBulkRetries(); // <10>
+ long searchRetries = bulkResponse.getSearchRetries(); // <11>
+ TimeValue throttledMillis = bulkResponse.getStatus().getThrottled(); // <12>
+ TimeValue throttledUntilMillis = bulkResponse.getStatus().getThrottledUntil(); // <13>
+ List searchFailures = bulkResponse.getSearchFailures(); // <14>
+ List bulkFailures = bulkResponse.getBulkFailures(); // <15>
+ // end::reindex-response
+ }
+ {
+ ReindexRequest request = new ReindexRequest();
+ request.setSourceIndices("source1");
+ request.setDestIndex("dest");
+
+ // tag::reindex-execute-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(BulkByScrollResponse bulkResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::reindex-execute-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::reindex-execute-async
+ client.reindexAsync(request, RequestOptions.DEFAULT, listener); // <1>
+ // end::reindex-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
public void testGet() throws Exception {
RestHighLevelClient client = highLevelClient();
{
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
index 94793f0ab7911..f92f01f6bad19 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java
@@ -35,6 +35,8 @@
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
+import org.elasticsearch.client.ml.GetJobStatsRequest;
+import org.elasticsearch.client.ml.GetJobStatsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
@@ -50,6 +52,9 @@
import org.elasticsearch.client.ml.job.util.PageParams;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.client.ml.FlushJobRequest;
+import org.elasticsearch.client.ml.FlushJobResponse;
+import org.elasticsearch.client.ml.job.stats.JobStats;
import org.junit.After;
import java.io.IOException;
@@ -458,6 +463,127 @@ public void onFailure(Exception e) {
}
}
+ public void testFlushJob() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ Job job = MachineLearningIT.buildJob("flushing-my-first-machine-learning-job");
+ client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+ client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
+
+ Job secondJob = MachineLearningIT.buildJob("flushing-my-second-machine-learning-job");
+ client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
+ client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT);
+
+ {
+ //tag::x-pack-ml-flush-job-request
+ FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); //<1>
+ //end::x-pack-ml-flush-job-request
+
+ //tag::x-pack-ml-flush-job-request-options
+ flushJobRequest.setCalcInterim(true); //<1>
+ flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); //<2>
+ flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); //<3>
+ flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); //<4>
+ flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); //<5>
+ //end::x-pack-ml-flush-job-request-options
+
+ //tag::x-pack-ml-flush-job-execute
+ FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT);
+ //end::x-pack-ml-flush-job-execute
+
+ //tag::x-pack-ml-flush-job-response
+ boolean isFlushed = flushJobResponse.isFlushed(); //<1>
+ Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); //<2>
+ //end::x-pack-ml-flush-job-response
+
+ }
+ {
+ //tag::x-pack-ml-flush-job-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(FlushJobResponse FlushJobResponse) {
+ //<1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ //end::x-pack-ml-flush-job-listener
+ FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job");
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::x-pack-ml-flush-job-execute-async
+ client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); //<1>
+ // end::x-pack-ml-flush-job-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
+
+ public void testGetJobStats() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ Job job = MachineLearningIT.buildJob("get-machine-learning-job-stats1");
+ client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
+
+ Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job-stats2");
+ client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
+
+ {
+ //tag::x-pack-ml-get-job-stats-request
+ GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); //<1>
+ request.setAllowNoJobs(true); //<2>
+ //end::x-pack-ml-get-job-stats-request
+
+ //tag::x-pack-ml-get-job-stats-execute
+ GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT);
+ //end::x-pack-ml-get-job-stats-execute
+
+ //tag::x-pack-ml-get-job-stats-response
+ long numberOfJobStats = response.count(); //<1>
+ List jobStats = response.jobStats(); //<2>
+ //end::x-pack-ml-get-job-stats-response
+
+ assertEquals(2, response.count());
+ assertThat(response.jobStats(), hasSize(2));
+ assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()),
+ containsInAnyOrder(job.getId(), secondJob.getId()));
+ }
+ {
+ GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*");
+
+ // tag::x-pack-ml-get-job-stats-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(GetJobStatsResponse response) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::x-pack-ml-get-job-stats-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::x-pack-ml-get-job-stats-execute-async
+ client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
+ // end::x-pack-ml-get-job-stats-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
public void testGetRecords() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobRequestTests.java
new file mode 100644
index 0000000000000..c2bddd436ccd5
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobRequestTests.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+public class FlushJobRequestTests extends AbstractXContentTestCase {
+
+ @Override
+ protected FlushJobRequest createTestInstance() {
+ FlushJobRequest request = new FlushJobRequest(randomAlphaOfLengthBetween(1, 20));
+
+ if (randomBoolean()) {
+ request.setCalcInterim(randomBoolean());
+ }
+ if (randomBoolean()) {
+ request.setAdvanceTime(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setStart(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setEnd(String.valueOf(randomLong()));
+ }
+ if (randomBoolean()) {
+ request.setSkipTime(String.valueOf(randomLong()));
+ }
+ return request;
+ }
+
+ @Override
+ protected FlushJobRequest doParseInstance(XContentParser parser) throws IOException {
+ return FlushJobRequest.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobResponseTests.java
new file mode 100644
index 0000000000000..bc968ff4564ab
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/FlushJobResponseTests.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.Date;
+
+public class FlushJobResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected FlushJobResponse createTestInstance() {
+ return new FlushJobResponse(randomBoolean(),
+ randomBoolean() ? null : new Date(randomNonNegativeLong()));
+ }
+
+ @Override
+ protected FlushJobResponse doParseInstance(XContentParser parser) throws IOException {
+ return FlushJobResponse.PARSER.apply(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java
index 181804c9676fe..8cc990730f78e 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobResponseTests.java
@@ -26,6 +26,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.function.Predicate;
public class GetJobResponseTests extends AbstractXContentTestCase {
@@ -46,8 +47,13 @@ protected GetJobResponse doParseInstance(XContentParser parser) throws IOExcepti
return GetJobResponse.fromXContent(parser);
}
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
@Override
protected boolean supportsUnknownFields() {
- return false;
+ return true;
}
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsRequestTests.java
new file mode 100644
index 0000000000000..690e582976656
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsRequestTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GetJobStatsRequestTests extends AbstractXContentTestCase {
+
+ public void testAllJobsRequest() {
+ GetJobStatsRequest request = GetJobStatsRequest.getAllJobStatsRequest();
+
+ assertEquals(request.getJobIds().size(), 1);
+ assertEquals(request.getJobIds().get(0), "_all");
+ }
+
+ public void testNewWithJobId() {
+ Exception exception = expectThrows(NullPointerException.class, () -> new GetJobStatsRequest("job", null));
+ assertEquals(exception.getMessage(), "jobIds must not contain null values");
+ }
+
+ @Override
+ protected GetJobStatsRequest createTestInstance() {
+ int jobCount = randomIntBetween(0, 10);
+ List jobIds = new ArrayList<>(jobCount);
+
+ for (int i = 0; i < jobCount; i++) {
+ jobIds.add(randomAlphaOfLength(10));
+ }
+
+ GetJobStatsRequest request = new GetJobStatsRequest(jobIds);
+
+ if (randomBoolean()) {
+ request.setAllowNoJobs(randomBoolean());
+ }
+
+ return request;
+ }
+
+ @Override
+ protected GetJobStatsRequest doParseInstance(XContentParser parser) throws IOException {
+ return GetJobStatsRequest.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsResponseTests.java
new file mode 100644
index 0000000000000..23f7bcc042b4a
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetJobStatsResponseTests.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.client.ml.job.stats.JobStats;
+import org.elasticsearch.client.ml.job.stats.JobStatsTests;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GetJobStatsResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected GetJobStatsResponse createTestInstance() {
+
+ int count = randomIntBetween(1, 5);
+ List results = new ArrayList<>(count);
+ for(int i = 0; i < count; i++) {
+ results.add(JobStatsTests.createRandomInstance());
+ }
+
+ return new GetJobStatsResponse(results, count);
+ }
+
+ @Override
+ protected GetJobStatsResponse doParseInstance(XContentParser parser) throws IOException {
+ return GetJobStatsResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/NodeAttributesTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/NodeAttributesTests.java
new file mode 100644
index 0000000000000..cee1710a62232
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/NodeAttributesTests.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Predicate;
+
+public class NodeAttributesTests extends AbstractXContentTestCase {
+
+ public static NodeAttributes createRandom() {
+ int numberOfAttributes = randomIntBetween(1, 10);
+ Map attributes = new HashMap<>(numberOfAttributes);
+ for(int i = 0; i < numberOfAttributes; i++) {
+ String val = randomAlphaOfLength(10);
+ attributes.put("key-"+i, val);
+ }
+ return new NodeAttributes(randomAlphaOfLength(10),
+ randomAlphaOfLength(10),
+ randomAlphaOfLength(10),
+ randomAlphaOfLength(10),
+ attributes);
+ }
+
+ @Override
+ protected NodeAttributes createTestInstance() {
+ return createRandom();
+ }
+
+ @Override
+ protected NodeAttributes doParseInstance(XContentParser parser) throws IOException {
+ return NodeAttributes.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/ForecastStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/ForecastStatsTests.java
new file mode 100644
index 0000000000000..16dfa305479be
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/ForecastStatsTests.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Predicate;
+
+public class ForecastStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ public ForecastStats createTestInstance() {
+ if (randomBoolean()) {
+ return createRandom(1, 22);
+ }
+ return new ForecastStats(0, null,null,null,null);
+ }
+
+ @Override
+ protected ForecastStats doParseInstance(XContentParser parser) throws IOException {
+ return ForecastStats.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
+ public static ForecastStats createRandom(long minTotal, long maxTotal) {
+ return new ForecastStats(
+ randomLongBetween(minTotal, maxTotal),
+ SimpleStatsTests.createRandom(),
+ SimpleStatsTests.createRandom(),
+ SimpleStatsTests.createRandom(),
+ createCountStats());
+ }
+
+ private static Map createCountStats() {
+ Map countStats = new HashMap<>();
+ for (int i = 0; i < randomInt(10); ++i) {
+ countStats.put(randomAlphaOfLengthBetween(1, 20), randomLongBetween(1L, 100L));
+ }
+ return countStats;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/JobStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/JobStatsTests.java
new file mode 100644
index 0000000000000..5d00f879140e0
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/JobStatsTests.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.client.ml.NodeAttributes;
+import org.elasticsearch.client.ml.NodeAttributesTests;
+import org.elasticsearch.client.ml.job.process.DataCounts;
+import org.elasticsearch.client.ml.job.process.DataCountsTests;
+import org.elasticsearch.client.ml.job.process.ModelSizeStats;
+import org.elasticsearch.client.ml.job.process.ModelSizeStatsTests;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.client.ml.job.config.JobState;
+import org.elasticsearch.client.ml.job.config.JobTests;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.function.Predicate;
+
+
+public class JobStatsTests extends AbstractXContentTestCase {
+
+ public static JobStats createRandomInstance() {
+ String jobId = JobTests.randomValidJobId();
+ JobState state = randomFrom(JobState.CLOSING, JobState.CLOSED, JobState.OPENED, JobState.FAILED, JobState.OPENING);
+ DataCounts dataCounts = DataCountsTests.createTestInstance(jobId);
+
+ ModelSizeStats modelSizeStats = randomBoolean() ? ModelSizeStatsTests.createRandomized() : null;
+ ForecastStats forecastStats = randomBoolean() ? ForecastStatsTests.createRandom(1, 22) : null;
+ NodeAttributes nodeAttributes = randomBoolean() ? NodeAttributesTests.createRandom() : null;
+ String assigmentExplanation = randomBoolean() ? randomAlphaOfLength(10) : null;
+ TimeValue openTime = randomBoolean() ? TimeValue.timeValueMillis(randomIntBetween(1, 10000)) : null;
+
+ return new JobStats(jobId, dataCounts, state, modelSizeStats, forecastStats, nodeAttributes, assigmentExplanation, openTime);
+ }
+
+ @Override
+ protected JobStats createTestInstance() {
+ return createRandomInstance();
+ }
+
+ @Override
+ protected JobStats doParseInstance(XContentParser parser) throws IOException {
+ return JobStats.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ return field -> !field.isEmpty();
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/SimpleStatsTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/SimpleStatsTests.java
new file mode 100644
index 0000000000000..eb9e47af9ba28
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/stats/SimpleStatsTests.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.client.ml.job.stats;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+
+
+public class SimpleStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SimpleStats createTestInstance() {
+ return createRandom();
+ }
+
+ @Override
+ protected SimpleStats doParseInstance(XContentParser parser) throws IOException {
+ return SimpleStats.PARSER.parse(parser, null);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ public static SimpleStats createRandom() {
+ return new SimpleStats(randomDouble(), randomDouble(), randomDouble(), randomDouble());
+ }
+}
diff --git a/docs/build.gradle b/docs/build.gradle
index 78d3aac48ce95..c444d2c5b5899 100644
--- a/docs/build.gradle
+++ b/docs/build.gradle
@@ -19,10 +19,20 @@
apply plugin: 'elasticsearch.docs-test'
-/* List of files that have snippets that require a gold or platinum licence
-and therefore cannot be tested yet... */
+/* List of files that have snippets that will not work until platinum tests can occur ... */
buildRestTests.expectedUnconvertedCandidates = [
'reference/ml/transforms.asciidoc',
+ 'reference/ml/apis/delete-calendar-event.asciidoc',
+ 'reference/ml/apis/get-bucket.asciidoc',
+ 'reference/ml/apis/get-category.asciidoc',
+ 'reference/ml/apis/get-influencer.asciidoc',
+ 'reference/ml/apis/get-job-stats.asciidoc',
+ 'reference/ml/apis/get-overall-buckets.asciidoc',
+ 'reference/ml/apis/get-record.asciidoc',
+ 'reference/ml/apis/get-snapshot.asciidoc',
+ 'reference/ml/apis/post-data.asciidoc',
+ 'reference/ml/apis/revert-snapshot.asciidoc',
+ 'reference/ml/apis/update-snapshot.asciidoc',
]
integTestCluster {
@@ -863,3 +873,224 @@ buildRestTests.setups['sensor_prefab_data'] = '''
{"node.terms.value":"c","temperature.sum.value":202.0,"temperature.max.value":202.0,"timestamp.date_histogram.time_zone":"UTC","temperature.min.value":202.0,"timestamp.date_histogram._count":1,"timestamp.date_histogram.interval":"1h","_rollup.computed":["temperature.sum","temperature.min","voltage.avg","temperature.max","node.terms","timestamp.date_histogram"],"voltage.avg.value":4.0,"node.terms._count":1,"_rollup.version":1,"timestamp.date_histogram.timestamp":1516294800000,"voltage.avg._count":1.0,"_rollup.id":"sensor"}
'''
+buildRestTests.setups['sample_job'] = '''
+ - do:
+ xpack.ml.put_job:
+ job_id: "sample_job"
+ body: >
+ {
+ "description" : "Very basic job",
+ "analysis_config" : {
+ "bucket_span":"10m",
+ "detectors" :[
+ {
+ "function": "count"
+ }
+ ]},
+ "data_description" : {
+ "time_field":"timestamp",
+ "time_format": "epoch_ms"
+ }
+ }
+'''
+buildRestTests.setups['farequote_index'] = '''
+ - do:
+ indices.create:
+ index: farequote
+ body:
+ settings:
+ number_of_shards: 1
+ number_of_replicas: 0
+ mappings:
+ metric:
+ properties:
+ time:
+ type: date
+ responsetime:
+ type: float
+ airline:
+ type: keyword
+ doc_count:
+ type: integer
+'''
+buildRestTests.setups['farequote_data'] = buildRestTests.setups['farequote_index'] + '''
+ - do:
+ bulk:
+ index: farequote
+ type: metric
+ refresh: true
+ body: |
+ {"index": {"_id":"1"}}
+ {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5}
+ {"index": {"_id":"2"}}
+ {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23}
+ {"index": {"_id":"3"}}
+ {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42}
+'''
+buildRestTests.setups['farequote_job'] = buildRestTests.setups['farequote_data'] + '''
+ - do:
+ xpack.ml.put_job:
+ job_id: "farequote"
+ body: >
+ {
+ "analysis_config": {
+ "bucket_span": "60m",
+ "detectors": [{
+ "function": "mean",
+ "field_name": "responsetime",
+ "by_field_name": "airline"
+ }],
+ "summary_count_field_name": "doc_count"
+ },
+ "data_description": {
+ "time_field": "time"
+ }
+ }
+'''
+buildRestTests.setups['farequote_datafeed'] = buildRestTests.setups['farequote_job'] + '''
+ - do:
+ xpack.ml.put_datafeed:
+ datafeed_id: "datafeed-farequote"
+ body: >
+ {
+ "job_id":"farequote",
+ "indexes":"farequote"
+ }
+'''
+buildRestTests.setups['server_metrics_index'] = '''
+ - do:
+ indices.create:
+ index: server-metrics
+ body:
+ settings:
+ number_of_shards: 1
+ number_of_replicas: 0
+ mappings:
+ metric:
+ properties:
+ timestamp:
+ type: date
+ total:
+ type: long
+'''
+buildRestTests.setups['server_metrics_data'] = buildRestTests.setups['server_metrics_index'] + '''
+ - do:
+ bulk:
+ index: server-metrics
+ type: metric
+ refresh: true
+ body: |
+ {"index": {"_id":"1177"}}
+ {"timestamp":"2017-03-23T13:00:00","total":40476}
+ {"index": {"_id":"1178"}}
+ {"timestamp":"2017-03-23T13:00:00","total":15287}
+ {"index": {"_id":"1179"}}
+ {"timestamp":"2017-03-23T13:00:00","total":-776}
+ {"index": {"_id":"1180"}}
+ {"timestamp":"2017-03-23T13:00:00","total":11366}
+ {"index": {"_id":"1181"}}
+ {"timestamp":"2017-03-23T13:00:00","total":3606}
+ {"index": {"_id":"1182"}}
+ {"timestamp":"2017-03-23T13:00:00","total":19006}
+ {"index": {"_id":"1183"}}
+ {"timestamp":"2017-03-23T13:00:00","total":38613}
+ {"index": {"_id":"1184"}}
+ {"timestamp":"2017-03-23T13:00:00","total":19516}
+ {"index": {"_id":"1185"}}
+ {"timestamp":"2017-03-23T13:00:00","total":-258}
+ {"index": {"_id":"1186"}}
+ {"timestamp":"2017-03-23T13:00:00","total":9551}
+ {"index": {"_id":"1187"}}
+ {"timestamp":"2017-03-23T13:00:00","total":11217}
+ {"index": {"_id":"1188"}}
+ {"timestamp":"2017-03-23T13:00:00","total":22557}
+ {"index": {"_id":"1189"}}
+ {"timestamp":"2017-03-23T13:00:00","total":40508}
+ {"index": {"_id":"1190"}}
+ {"timestamp":"2017-03-23T13:00:00","total":11887}
+ {"index": {"_id":"1191"}}
+ {"timestamp":"2017-03-23T13:00:00","total":31659}
+'''
+buildRestTests.setups['server_metrics_job'] = buildRestTests.setups['server_metrics_data'] + '''
+ - do:
+ xpack.ml.put_job:
+ job_id: "total-requests"
+ body: >
+ {
+ "description" : "Total sum of requests",
+ "analysis_config" : {
+ "bucket_span":"10m",
+ "detectors" :[
+ {
+ "detector_description": "Sum of total",
+ "function": "sum",
+ "field_name": "total"
+ }
+ ]},
+ "data_description" : {
+ "time_field":"timestamp",
+ "time_format": "epoch_ms"
+ }
+ }
+'''
+buildRestTests.setups['server_metrics_datafeed'] = buildRestTests.setups['server_metrics_job'] + '''
+ - do:
+ xpack.ml.put_datafeed:
+ datafeed_id: "datafeed-total-requests"
+ body: >
+ {
+ "job_id":"total-requests",
+ "indexes":"server-metrics"
+ }
+'''
+buildRestTests.setups['server_metrics_openjob'] = buildRestTests.setups['server_metrics_datafeed'] + '''
+ - do:
+ xpack.ml.open_job:
+ job_id: "total-requests"
+'''
+buildRestTests.setups['server_metrics_startdf'] = buildRestTests.setups['server_metrics_openjob'] + '''
+ - do:
+ xpack.ml.start_datafeed:
+ datafeed_id: "datafeed-total-requests"
+'''
+buildRestTests.setups['calendar_outages'] = '''
+ - do:
+ xpack.ml.put_calendar:
+ calendar_id: "planned-outages"
+'''
+buildRestTests.setups['calendar_outages_addevent'] = buildRestTests.setups['calendar_outages'] + '''
+ - do:
+ xpack.ml.post_calendar_events:
+ calendar_id: "planned-outages"
+ body: >
+ { "description": "event 1", "start_time": "2017-12-01T00:00:00Z", "end_time": "2017-12-02T00:00:00Z", "calendar_id": "planned-outages" }
+
+
+'''
+buildRestTests.setups['calendar_outages_openjob'] = buildRestTests.setups['server_metrics_openjob'] + '''
+ - do:
+ xpack.ml.put_calendar:
+ calendar_id: "planned-outages"
+'''
+buildRestTests.setups['calendar_outages_addjob'] = buildRestTests.setups['server_metrics_openjob'] + '''
+ - do:
+ xpack.ml.put_calendar:
+ calendar_id: "planned-outages"
+ body: >
+ {
+ "job_ids": ["total-requests"]
+ }
+'''
+buildRestTests.setups['calendar_outages_addevent'] = buildRestTests.setups['calendar_outages_addjob'] + '''
+ - do:
+ xpack.ml.post_calendar_events:
+ calendar_id: "planned-outages"
+ body: >
+ { "events" : [
+ { "description": "event 1", "start_time": "1513641600000", "end_time": "1513728000000"},
+ { "description": "event 2", "start_time": "1513814400000", "end_time": "1513900800000"},
+ { "description": "event 3", "start_time": "1514160000000", "end_time": "1514246400000"}
+ ]}
+'''
+
+
diff --git a/docs/java-rest/high-level/document/reindex.asciidoc b/docs/java-rest/high-level/document/reindex.asciidoc
new file mode 100644
index 0000000000000..b6d98b42dc509
--- /dev/null
+++ b/docs/java-rest/high-level/document/reindex.asciidoc
@@ -0,0 +1,215 @@
+[[java-rest-high-document-reindex]]
+=== Reindex API
+
+[[java-rest-high-document-reindex-request]]
+==== Reindex Request
+
+A `ReindexRequest` can be used to copy documents from one or more indexes into a destination index.
+
+It requires an existing source index and a target index which may or may not exist pre-request. Reindex does not attempt
+to set up the destination index. It does not copy the settings of the source index. You should set up the destination
+index prior to running a _reindex action, including setting up mappings, shard counts, replicas, etc.
+
+The simplest form of a `ReindexRequest` looks like follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request]
+--------------------------------------------------
+<1> Creates the `ReindexRequest`
+<2> Adds a list of sources to copy from
+<3> Adds the destination index
+
+The `dest` element can be configured like the index API to control optimistic concurrency control. Just leaving out
+`versionType` (as above) or setting it to internal will cause Elasticsearch to blindly dump documents into the target.
+Setting `versionType` to external will cause Elasticsearch to preserve the version from the source, create any documents
+that are missing, and update any documents that have an older version in the destination index than they do in the
+source index.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-versionType]
+--------------------------------------------------
+<1> Set the versionType to `EXTERNAL`
+
+Setting `opType` to `create` will cause `_reindex` to only create missing documents in the target index. All existing
+documents will cause a version conflict. The default `opType` is `index`.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-opType]
+--------------------------------------------------
+<1> Set the opType to `create`
+
+By default version conflicts abort the `_reindex` process but you can just count them by settings it to `proceed`
+in the request body
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-conflicts]
+--------------------------------------------------
+<1> Set `proceed` on version conflict
+
+You can limit the documents by adding a type to the source or by adding a query.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-typeOrQuery]
+--------------------------------------------------
+<1> Only copy `doc` type
+<2> Only copy documents which have field `user` set to `kimchy`
+
+It’s also possible to limit the number of processed documents by setting size.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-size]
+--------------------------------------------------
+<1> Only copy 10 documents
+
+By default `_reindex` uses batches of 1000. You can change the batch size with `sourceBatchSize`.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-sourceSize]
+--------------------------------------------------
+<1> Use batches of 100 documents
+
+Reindex can also use the ingest feature by specifying a `pipeline`.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-pipeline]
+--------------------------------------------------
+<1> set pipeline to `my_pipeline`
+
+If you want a particular set of documents from the source index you’ll need to use sort. If possible, prefer a more
+selective query to size and sort.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-sort]
+--------------------------------------------------
+<1> add descending sort to`field1`
+<2> add ascending sort to `field2`
+
+`ReindexRequest` also supports a `script` that modifies the document. It allows you to also change the document's
+metadata. The following example illustrates that.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-script]
+--------------------------------------------------
+<1> `setScript` to increment the `likes` field on all documents with user `kimchy`.
+
+`ReindexRequest` supports reindexing from a remote Elasticsearch cluster. When using a remote cluster the query should be
+specified inside the `RemoteInfo` object and not using `setSourceQuery`. If both the remote info and the source query are
+set it results in a validation error during the request. The reason for this is that the remote Elasticsearch may not
+understand queries built by the modern query builders. The remote cluster support works all the way back to Elasticsearch
+0.90 and the query language has changed since then. When reaching older versions, it is safer to write the query by hand
+in JSON.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-remote]
+--------------------------------------------------
+<1> set remote elastic cluster
+
+`ReindexRequest` also helps in automatically parallelizing using `sliced-scroll` to
+slice on `_uid`. Use `setSlices` to specify the number of slices to use.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-slices]
+--------------------------------------------------
+<1> set number of slices to use
+
+`ReindexRequest` uses the `scroll` parameter to control how long it keeps the "search context" alive.
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-scroll]
+--------------------------------------------------
+<1> set scroll time
+
+
+==== Optional arguments
+In addition to the options above the following arguments can optionally be also provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-timeout]
+--------------------------------------------------
+<1> Timeout to wait for the reindex request to be performed as a `TimeValue`
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-request-refresh]
+--------------------------------------------------
+<1> Refresh index after calling reindex
+
+
+[[java-rest-high-document-reindex-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-execute]
+--------------------------------------------------
+
+[[java-rest-high-document-reindex-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of a reindex request requires both the `ReindexRequest`
+instance and an `ActionListener` instance to be passed to the asynchronous
+method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-execute-async]
+--------------------------------------------------
+<1> The `ReindexRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A typical listener for `BulkByScrollResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument and contains a list of individual results for each
+operation that was executed. Note that one or more operations might have
+failed while the others have been successfully executed.
+<2> Called when the whole `ReindexRequest` fails. In this case the raised
+exception is provided as an argument and no operation has been executed.
+
+[[java-rest-high-document-reindex-response]]
+==== Reindex Response
+
+The returned `BulkByScrollResponse` contains information about the executed operations and
+ allows to iterate over each result as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/CRUDDocumentationIT.java[reindex-response]
+--------------------------------------------------
+<1> Get total time taken
+<2> Check if the request timed out
+<3> Get total number of docs processed
+<4> Number of docs that were updated
+<5> Number of docs that were created
+<6> Number of docs that were deleted
+<7> Number of batches that were executed
+<8> Number of skipped docs
+<9> Number of version conflicts
+<10> Number of times request had to retry bulk index operations
+<11> Number of times request had to retry search operations
+<12> The total time this request has throttled itself not including the current throttle time if it is currently sleeping
+<13> Remaining delay of any current throttle sleep or 0 if not sleeping
+<14> Failures during search phase
+<15> Failures during bulk index operation
diff --git a/docs/java-rest/high-level/ml/flush-job.asciidoc b/docs/java-rest/high-level/ml/flush-job.asciidoc
new file mode 100644
index 0000000000000..1f815bba0d564
--- /dev/null
+++ b/docs/java-rest/high-level/ml/flush-job.asciidoc
@@ -0,0 +1,83 @@
+[[java-rest-high-x-pack-ml-flush-job]]
+=== Flush Job API
+
+The Flush Job API provides the ability to flush a {ml} job's
+datafeed in the cluster.
+It accepts a `FlushJobRequest` object and responds
+with a `FlushJobResponse` object.
+
+[[java-rest-high-x-pack-ml-flush-job-request]]
+==== Flush Job Request
+
+A `FlushJobRequest` object gets created with an existing non-null `jobId`.
+All other fields are optional for the request.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request]
+--------------------------------------------------
+<1> Constructing a new request referencing an existing `jobId`
+
+==== Optional Arguments
+
+The following arguments are optional.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-request-options]
+--------------------------------------------------
+<1> Set request to calculate the interim results
+<2> Set the advanced time to flush to the particular time value
+<3> Set the start time for the range of buckets on which
+to calculate the interim results (requires `calc_interim` to be `true`)
+<4> Set the end time for the range of buckets on which
+to calculate interim results (requires `calc_interim` to be `true`)
+<5> Set the skip time to skip a particular time value
+
+[[java-rest-high-x-pack-ml-flush-job-execution]]
+==== Execution
+
+The request can be executed through the `MachineLearningClient` contained
+in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute]
+--------------------------------------------------
+
+[[java-rest-high-x-pack-ml-flush-job-execution-async]]
+==== Asynchronous Execution
+
+The request can also be executed asynchronously:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-execute-async]
+--------------------------------------------------
+<1> The `FlushJobRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The method does not block and returns immediately. The passed `ActionListener` is used
+to notify the caller of completion. A typical `ActionListener` for `FlushJobResponse` may
+look like
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-listener]
+--------------------------------------------------
+<1> `onResponse` is called back when the action is completed successfully
+<2> `onFailure` is called back when some unexpected error occurs
+
+[[java-rest-high-x-pack-ml-flush-job-response]]
+==== Flush Job Response
+
+A `FlushJobResponse` contains an acknowledgement and an optional end date for the
+last finalized bucket
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-flush-job-response]
+--------------------------------------------------
+<1> `isFlushed()` indicates if the job was successfully flushed or not.
+<2> `getLastFinalizedBucketEnd()` provides the timestamp
+(in milliseconds-since-the-epoch) of the end of the last bucket that was processed.
\ No newline at end of file
diff --git a/docs/java-rest/high-level/ml/get-job-stats.asciidoc b/docs/java-rest/high-level/ml/get-job-stats.asciidoc
new file mode 100644
index 0000000000000..90f7794ae765b
--- /dev/null
+++ b/docs/java-rest/high-level/ml/get-job-stats.asciidoc
@@ -0,0 +1,67 @@
+[[java-rest-high-x-pack-ml-get-job-stats]]
+=== Get Job Stats API
+
+The Get Job Stats API provides the ability to get any number of
+ {ml} job's statistics in the cluster.
+It accepts a `GetJobStatsRequest` object and responds
+with a `GetJobStatsResponse` object.
+
+[[java-rest-high-x-pack-ml-get-job-stats-request]]
+==== Get Job Stats Request
+
+A `GetJobsStatsRequest` object can have any number of `jobId`
+entries. However, they all must be non-null. An empty list is the same as
+requesting statistics for all jobs.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-request]
+--------------------------------------------------
+<1> Constructing a new request referencing existing `jobIds`, can contain wildcards
+<2> Whether to ignore if a wildcard expression matches no jobs.
+ (This includes `_all` string or when no jobs have been specified)
+
+[[java-rest-high-x-pack-ml-get-job-stats-execution]]
+==== Execution
+
+The request can be executed through the `MachineLearningClient` contained
+in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute]
+--------------------------------------------------
+
+[[java-rest-high-x-pack-ml-get-job-stats-execution-async]]
+==== Asynchronous Execution
+
+The request can also be executed asynchronously:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-execute-async]
+--------------------------------------------------
+<1> The `GetJobsStatsRequest` to execute and the `ActionListener` to use when
+the execution completes
+
+The method does not block and returns immediately. The passed `ActionListener` is used
+to notify the caller of completion. A typical `ActionListener` for `GetJobsStatsResponse` may
+look like
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-listener]
+--------------------------------------------------
+<1> `onResponse` is called back when the action is completed successfully
+<2> `onFailure` is called back when some unexpected error occurs
+
+[[java-rest-high-x-pack-ml-get-job-stats-response]]
+==== Get Job Stats Response
+The returned `GetJobStatsResponse` contains the requested job statistics:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-job-stats-response]
+--------------------------------------------------
+<1> `getCount()` indicates the number of jobs statistics found
+<2> `getJobStats()` is the collection of {ml} `JobStats` objects found
\ No newline at end of file
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index f970705b4fcd8..68320fbfe9ff3 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -15,6 +15,7 @@ Single document APIs::
Multi-document APIs::
* <>
* <>
+* <>
include::document/index.asciidoc[]
include::document/get.asciidoc[]
@@ -23,6 +24,7 @@ include::document/delete.asciidoc[]
include::document/update.asciidoc[]
include::document/bulk.asciidoc[]
include::document/multi-get.asciidoc[]
+include::document/reindex.asciidoc[]
== Search APIs
@@ -209,6 +211,8 @@ The Java High Level REST Client supports the following Machine Learning APIs:
* <>
* <>
* <>
+* <>
+* <>
* <>
* <>
@@ -217,6 +221,8 @@ include::ml/get-job.asciidoc[]
include::ml/delete-job.asciidoc[]
include::ml/open-job.asciidoc[]
include::ml/close-job.asciidoc[]
+include::ml/flush-job.asciidoc[]
+include::ml/get-job-stats.asciidoc[]
include::ml/get-buckets.asciidoc[]
include::ml/get-records.asciidoc[]
diff --git a/x-pack/docs/en/rest-api/ml/calendarresource.asciidoc b/docs/reference/ml/apis/calendarresource.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/calendarresource.asciidoc
rename to docs/reference/ml/apis/calendarresource.asciidoc
index 8edb43ed7a393..4279102cd35fc 100644
--- a/x-pack/docs/en/rest-api/ml/calendarresource.asciidoc
+++ b/docs/reference/ml/apis/calendarresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-calendar-resource]]
=== Calendar Resources
diff --git a/x-pack/docs/en/rest-api/ml/close-job.asciidoc b/docs/reference/ml/apis/close-job.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/close-job.asciidoc
rename to docs/reference/ml/apis/close-job.asciidoc
index 8e7e8eb0ce850..6dec6402c8766 100644
--- a/x-pack/docs/en/rest-api/ml/close-job.asciidoc
+++ b/docs/reference/ml/apis/close-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-close-job]]
=== Close Jobs API
++++
@@ -80,7 +81,7 @@ The following example closes the `total-requests` job:
POST _xpack/ml/anomaly_detectors/total-requests/_close
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the job is closed, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/datafeedresource.asciidoc b/docs/reference/ml/apis/datafeedresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/datafeedresource.asciidoc
rename to docs/reference/ml/apis/datafeedresource.asciidoc
index 0ffeb6bc89d72..6fe0b35d95185 100644
--- a/x-pack/docs/en/rest-api/ml/datafeedresource.asciidoc
+++ b/docs/reference/ml/apis/datafeedresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-datafeed-resource]]
=== {dfeed-cap} Resources
diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc b/docs/reference/ml/apis/delete-calendar-event.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc
rename to docs/reference/ml/apis/delete-calendar-event.asciidoc
index ef8dad39dba70..8961726f57322 100644
--- a/x-pack/docs/en/rest-api/ml/delete-calendar-event.asciidoc
+++ b/docs/reference/ml/apis/delete-calendar-event.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-calendar-event]]
=== Delete Events from Calendar API
++++
@@ -44,7 +45,7 @@ calendar:
DELETE _xpack/ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st
--------------------------------------------------
// CONSOLE
-// TEST[catch:missing]
+// TEST[skip:catch:missing]
When the event is removed, you receive the following results:
[source,js]
@@ -53,4 +54,3 @@ When the event is removed, you receive the following results:
"acknowledged": true
}
----
-// NOTCONSOLE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar-job.asciidoc b/docs/reference/ml/apis/delete-calendar-job.asciidoc
similarity index 93%
rename from x-pack/docs/en/rest-api/ml/delete-calendar-job.asciidoc
rename to docs/reference/ml/apis/delete-calendar-job.asciidoc
index 94388c0c4b680..4362a82b5cb7e 100644
--- a/x-pack/docs/en/rest-api/ml/delete-calendar-job.asciidoc
+++ b/docs/reference/ml/apis/delete-calendar-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-calendar-job]]
=== Delete Jobs from Calendar API
++++
@@ -38,7 +39,7 @@ calendar and `total-requests` job:
DELETE _xpack/ml/calendars/planned-outages/jobs/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addjob]
+// TEST[skip:setup:calendar_outages_addjob]
When the job is removed from the calendar, you receive the following
results:
@@ -50,4 +51,4 @@ results:
"job_ids": []
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/delete-calendar.asciidoc b/docs/reference/ml/apis/delete-calendar.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/delete-calendar.asciidoc
rename to docs/reference/ml/apis/delete-calendar.asciidoc
index f7673b545748b..9f9f3457f24d2 100644
--- a/x-pack/docs/en/rest-api/ml/delete-calendar.asciidoc
+++ b/docs/reference/ml/apis/delete-calendar.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-calendar]]
=== Delete Calendar API
++++
@@ -40,7 +41,7 @@ The following example deletes the `planned-outages` calendar:
DELETE _xpack/ml/calendars/planned-outages
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages]
+// TEST[skip:setup:calendar_outages]
When the calendar is deleted, you receive the following results:
[source,js]
@@ -49,4 +50,4 @@ When the calendar is deleted, you receive the following results:
"acknowledged": true
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/delete-datafeed.asciidoc b/docs/reference/ml/apis/delete-datafeed.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/delete-datafeed.asciidoc
rename to docs/reference/ml/apis/delete-datafeed.asciidoc
index db4fd5c177aed..996d2c7dd2eaf 100644
--- a/x-pack/docs/en/rest-api/ml/delete-datafeed.asciidoc
+++ b/docs/reference/ml/apis/delete-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-datafeed]]
=== Delete {dfeeds-cap} API
++++
@@ -47,7 +48,7 @@ The following example deletes the `datafeed-total-requests` {dfeed}:
DELETE _xpack/ml/datafeeds/datafeed-total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_datafeed]
+// TEST[skip:setup:server_metrics_datafeed]
When the {dfeed} is deleted, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/delete-filter.asciidoc b/docs/reference/ml/apis/delete-filter.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/delete-filter.asciidoc
rename to docs/reference/ml/apis/delete-filter.asciidoc
index b58d2980b888a..21e35b66076f6 100644
--- a/x-pack/docs/en/rest-api/ml/delete-filter.asciidoc
+++ b/docs/reference/ml/apis/delete-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-filter]]
=== Delete Filter API
++++
@@ -41,7 +42,7 @@ The following example deletes the `safe_domains` filter:
DELETE _xpack/ml/filters/safe_domains
--------------------------------------------------
// CONSOLE
-// TEST[setup:ml_filter_safe_domains]
+// TEST[skip:setup:ml_filter_safe_domains]
When the filter is deleted, you receive the following results:
[source,js]
@@ -50,4 +51,4 @@ When the filter is deleted, you receive the following results:
"acknowledged": true
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/delete-job.asciidoc b/docs/reference/ml/apis/delete-job.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/delete-job.asciidoc
rename to docs/reference/ml/apis/delete-job.asciidoc
index c01b08545b638..d5ef120ad040b 100644
--- a/x-pack/docs/en/rest-api/ml/delete-job.asciidoc
+++ b/docs/reference/ml/apis/delete-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-job]]
=== Delete Jobs API
++++
@@ -56,7 +57,7 @@ The following example deletes the `total-requests` job:
DELETE _xpack/ml/anomaly_detectors/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the job is deleted, you receive the following results:
[source,js]
@@ -65,4 +66,4 @@ When the job is deleted, you receive the following results:
"acknowledged": true
}
----
-// TESTRESPONSE
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/delete-snapshot.asciidoc b/docs/reference/ml/apis/delete-snapshot.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/delete-snapshot.asciidoc
rename to docs/reference/ml/apis/delete-snapshot.asciidoc
index 2ab0116fe74d9..96a3590054557 100644
--- a/x-pack/docs/en/rest-api/ml/delete-snapshot.asciidoc
+++ b/docs/reference/ml/apis/delete-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-delete-snapshot]]
=== Delete Model Snapshots API
++++
@@ -32,7 +33,6 @@ the `model_snapshot_id` in the results from the get jobs API.
You must have `manage_ml`, or `manage` cluster privileges to use this API.
For more information, see {xpack-ref}/security-privileges.html[Security Privileges].
-//<>.
==== Examples
@@ -53,3 +53,4 @@ When the snapshot is deleted, you receive the following results:
"acknowledged": true
}
----
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/eventresource.asciidoc b/docs/reference/ml/apis/eventresource.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/eventresource.asciidoc
rename to docs/reference/ml/apis/eventresource.asciidoc
index c9ab78964213e..a1e96f5c25a0a 100644
--- a/x-pack/docs/en/rest-api/ml/eventresource.asciidoc
+++ b/docs/reference/ml/apis/eventresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-event-resource]]
=== Scheduled Event Resources
diff --git a/x-pack/docs/en/rest-api/ml/filterresource.asciidoc b/docs/reference/ml/apis/filterresource.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/filterresource.asciidoc
rename to docs/reference/ml/apis/filterresource.asciidoc
index e942447c1ee60..e67c92dc8d096 100644
--- a/x-pack/docs/en/rest-api/ml/filterresource.asciidoc
+++ b/docs/reference/ml/apis/filterresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-filter-resource]]
=== Filter Resources
diff --git a/x-pack/docs/en/rest-api/ml/flush-job.asciidoc b/docs/reference/ml/apis/flush-job.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/flush-job.asciidoc
rename to docs/reference/ml/apis/flush-job.asciidoc
index 934a2d81b1778..f19d2aa648f68 100644
--- a/x-pack/docs/en/rest-api/ml/flush-job.asciidoc
+++ b/docs/reference/ml/apis/flush-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-flush-job]]
=== Flush Jobs API
++++
@@ -74,7 +75,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_flush
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the operation succeeds, you receive the following results:
[source,js]
@@ -84,7 +85,7 @@ When the operation succeeds, you receive the following results:
"last_finalized_bucket_end": 1455234900000
}
----
-// TESTRESPONSE[s/"last_finalized_bucket_end": 1455234900000/"last_finalized_bucket_end": $body.last_finalized_bucket_end/]
+//TESTRESPONSE[s/"last_finalized_bucket_end": 1455234900000/"last_finalized_bucket_end": $body.last_finalized_bucket_end/]
The `last_finalized_bucket_end` provides the timestamp (in
milliseconds-since-the-epoch) of the end of the last bucket that was processed.
@@ -101,7 +102,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_flush
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the operation succeeds, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/forecast.asciidoc b/docs/reference/ml/apis/forecast.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/forecast.asciidoc
rename to docs/reference/ml/apis/forecast.asciidoc
index 99647ecae1b25..197876f3f04a7 100644
--- a/x-pack/docs/en/rest-api/ml/forecast.asciidoc
+++ b/docs/reference/ml/apis/forecast.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-forecast]]
=== Forecast Jobs API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-bucket.asciidoc b/docs/reference/ml/apis/get-bucket.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/get-bucket.asciidoc
rename to docs/reference/ml/apis/get-bucket.asciidoc
index 95b05ff7f5dd2..3a276c13e895b 100644
--- a/x-pack/docs/en/rest-api/ml/get-bucket.asciidoc
+++ b/docs/reference/ml/apis/get-bucket.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-bucket]]
=== Get Buckets API
++++
@@ -81,7 +82,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
roles provide these privileges. For more information, see
{xpack-ref}/security-privileges.html[Security Privileges] and
{xpack-ref}/built-in-roles.html[Built-in Roles].
-//<> and <>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/get-calendar-event.asciidoc b/docs/reference/ml/apis/get-calendar-event.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/get-calendar-event.asciidoc
rename to docs/reference/ml/apis/get-calendar-event.asciidoc
index e89173c3382d9..43dd74e47c977 100644
--- a/x-pack/docs/en/rest-api/ml/get-calendar-event.asciidoc
+++ b/docs/reference/ml/apis/get-calendar-event.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-calendar-event]]
=== Get Scheduled Events API
++++
@@ -66,7 +67,7 @@ The following example gets information about the scheduled events in the
GET _xpack/ml/calendars/planned-outages/events
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addevent]
+// TEST[skip:setup:calendar_outages_addevent]
The API returns the following results:
diff --git a/x-pack/docs/en/rest-api/ml/get-calendar.asciidoc b/docs/reference/ml/apis/get-calendar.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/get-calendar.asciidoc
rename to docs/reference/ml/apis/get-calendar.asciidoc
index ae95fd9968893..f86875f326cd9 100644
--- a/x-pack/docs/en/rest-api/ml/get-calendar.asciidoc
+++ b/docs/reference/ml/apis/get-calendar.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-calendar]]
=== Get Calendars API
++++
@@ -62,7 +63,7 @@ calendar:
GET _xpack/ml/calendars/planned-outages
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addjob]
+// TEST[skip:setup:calendar_outages_addjob]
The API returns the following results:
[source,js]
@@ -79,4 +80,4 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/get-category.asciidoc b/docs/reference/ml/apis/get-category.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/get-category.asciidoc
rename to docs/reference/ml/apis/get-category.asciidoc
index 13f274133c0d1..e5d6fe16802a1 100644
--- a/x-pack/docs/en/rest-api/ml/get-category.asciidoc
+++ b/docs/reference/ml/apis/get-category.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-category]]
=== Get Categories API
++++
@@ -18,7 +19,6 @@ Retrieves job results for one or more categories.
For more information about categories, see
{xpack-ref}/ml-configuring-categories.html[Categorizing Log Messages].
-//<>.
==== Path Parameters
@@ -56,7 +56,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
roles provide these privileges. For more information, see
{xpack-ref}/security-privileges.html[Security Privileges] and
{xpack-ref}/built-in-roles.html[Built-in Roles].
-//<> and <>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/get-datafeed-stats.asciidoc b/docs/reference/ml/apis/get-datafeed-stats.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/get-datafeed-stats.asciidoc
rename to docs/reference/ml/apis/get-datafeed-stats.asciidoc
index 2869e8222f86f..9ca67cc17fb44 100644
--- a/x-pack/docs/en/rest-api/ml/get-datafeed-stats.asciidoc
+++ b/docs/reference/ml/apis/get-datafeed-stats.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-datafeed-stats]]
=== Get {dfeed-cap} Statistics API
++++
@@ -66,7 +67,7 @@ The following example gets usage information for the
GET _xpack/ml/datafeeds/datafeed-total-requests/_stats
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_startdf]
+// TEST[skip:setup:server_metrics_startdf]
The API returns the following results:
[source,js]
@@ -97,4 +98,4 @@ The API returns the following results:
// TESTRESPONSE[s/"node-0"/$body.$_path/]
// TESTRESPONSE[s/"hoXMLZB0RWKfR9UPPUCxXX"/$body.$_path/]
// TESTRESPONSE[s/"127.0.0.1:9300"/$body.$_path/]
-// TESTRESPONSE[s/"17179869184"/$body.datafeeds.0.node.attributes.ml\\.machine_memory/]
+// TESTRESPONSE[s/"17179869184"/$body.datafeeds.0.node.attributes.ml\\.machine_memory/]
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/get-datafeed.asciidoc b/docs/reference/ml/apis/get-datafeed.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/get-datafeed.asciidoc
rename to docs/reference/ml/apis/get-datafeed.asciidoc
index 0fa51773fd162..db5f4249669bb 100644
--- a/x-pack/docs/en/rest-api/ml/get-datafeed.asciidoc
+++ b/docs/reference/ml/apis/get-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-datafeed]]
=== Get {dfeeds-cap} API
++++
@@ -60,7 +61,7 @@ The following example gets configuration information for the
GET _xpack/ml/datafeeds/datafeed-total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_datafeed]
+// TEST[skip:setup:server_metrics_datafeed]
The API returns the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/get-filter.asciidoc b/docs/reference/ml/apis/get-filter.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/get-filter.asciidoc
rename to docs/reference/ml/apis/get-filter.asciidoc
index b4699e9d622cf..2dbb5d16cc5a4 100644
--- a/x-pack/docs/en/rest-api/ml/get-filter.asciidoc
+++ b/docs/reference/ml/apis/get-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-filter]]
=== Get Filters API
++++
@@ -62,7 +63,7 @@ filter:
GET _xpack/ml/filters/safe_domains
--------------------------------------------------
// CONSOLE
-// TEST[setup:ml_filter_safe_domains]
+// TEST[skip:setup:ml_filter_safe_domains]
The API returns the following results:
[source,js]
@@ -81,4 +82,4 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/get-influencer.asciidoc b/docs/reference/ml/apis/get-influencer.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-influencer.asciidoc
rename to docs/reference/ml/apis/get-influencer.asciidoc
index bffd2b8e09633..182cca7aa9917 100644
--- a/x-pack/docs/en/rest-api/ml/get-influencer.asciidoc
+++ b/docs/reference/ml/apis/get-influencer.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-influencer]]
=== Get Influencers API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-job-stats.asciidoc b/docs/reference/ml/apis/get-job-stats.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-job-stats.asciidoc
rename to docs/reference/ml/apis/get-job-stats.asciidoc
index bd59ee8b258fa..509d9448a693e 100644
--- a/x-pack/docs/en/rest-api/ml/get-job-stats.asciidoc
+++ b/docs/reference/ml/apis/get-job-stats.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-job-stats]]
=== Get Job Statistics API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-job.asciidoc b/docs/reference/ml/apis/get-job.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/get-job.asciidoc
rename to docs/reference/ml/apis/get-job.asciidoc
index b1329bd9b19c3..3bb74f066efd1 100644
--- a/x-pack/docs/en/rest-api/ml/get-job.asciidoc
+++ b/docs/reference/ml/apis/get-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-job]]
=== Get Jobs API
++++
@@ -59,7 +60,7 @@ The following example gets configuration information for the `total-requests` jo
GET _xpack/ml/anomaly_detectors/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
The API returns the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/get-overall-buckets.asciidoc b/docs/reference/ml/apis/get-overall-buckets.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-overall-buckets.asciidoc
rename to docs/reference/ml/apis/get-overall-buckets.asciidoc
index f2581f4904e37..f4818f3bbbe44 100644
--- a/x-pack/docs/en/rest-api/ml/get-overall-buckets.asciidoc
+++ b/docs/reference/ml/apis/get-overall-buckets.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-overall-buckets]]
=== Get Overall Buckets API
++++
@@ -93,7 +94,6 @@ that stores the results. The `machine_learning_admin` and `machine_learning_user
roles provide these privileges. For more information, see
{xpack-ref}/security-privileges.html[Security Privileges] and
{xpack-ref}/built-in-roles.html[Built-in Roles].
-//<> and <>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/get-record.asciidoc b/docs/reference/ml/apis/get-record.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-record.asciidoc
rename to docs/reference/ml/apis/get-record.asciidoc
index 1870b44159760..199cce1548427 100644
--- a/x-pack/docs/en/rest-api/ml/get-record.asciidoc
+++ b/docs/reference/ml/apis/get-record.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-record]]
=== Get Records API
++++
diff --git a/x-pack/docs/en/rest-api/ml/get-snapshot.asciidoc b/docs/reference/ml/apis/get-snapshot.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/get-snapshot.asciidoc
rename to docs/reference/ml/apis/get-snapshot.asciidoc
index 6f76096cf29d5..74806f7a73a9d 100644
--- a/x-pack/docs/en/rest-api/ml/get-snapshot.asciidoc
+++ b/docs/reference/ml/apis/get-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-get-snapshot]]
=== Get Model Snapshots API
++++
diff --git a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc b/docs/reference/ml/apis/jobcounts.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/jobcounts.asciidoc
rename to docs/reference/ml/apis/jobcounts.asciidoc
index d343cc23ae0ad..d0169e228d549 100644
--- a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc
+++ b/docs/reference/ml/apis/jobcounts.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-jobstats]]
=== Job Statistics
diff --git a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc b/docs/reference/ml/apis/jobresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/jobresource.asciidoc
rename to docs/reference/ml/apis/jobresource.asciidoc
index 5b109b1c21d78..e0c314724e762 100644
--- a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc
+++ b/docs/reference/ml/apis/jobresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-job-resource]]
=== Job Resources
diff --git a/x-pack/docs/en/rest-api/ml-api.asciidoc b/docs/reference/ml/apis/ml-api.asciidoc
similarity index 61%
rename from x-pack/docs/en/rest-api/ml-api.asciidoc
rename to docs/reference/ml/apis/ml-api.asciidoc
index b48e9f934042d..b8509f221524c 100644
--- a/x-pack/docs/en/rest-api/ml-api.asciidoc
+++ b/docs/reference/ml/apis/ml-api.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-apis]]
== Machine Learning APIs
@@ -70,57 +71,57 @@ machine learning APIs and in advanced job configuration options in Kibana.
* <>
//ADD
-include::ml/post-calendar-event.asciidoc[]
-include::ml/put-calendar-job.asciidoc[]
+include::post-calendar-event.asciidoc[]
+include::put-calendar-job.asciidoc[]
//CLOSE
-include::ml/close-job.asciidoc[]
+include::close-job.asciidoc[]
//CREATE
-include::ml/put-calendar.asciidoc[]
-include::ml/put-datafeed.asciidoc[]
-include::ml/put-filter.asciidoc[]
-include::ml/put-job.asciidoc[]
+include::put-calendar.asciidoc[]
+include::put-datafeed.asciidoc[]
+include::put-filter.asciidoc[]
+include::put-job.asciidoc[]
//DELETE
-include::ml/delete-calendar.asciidoc[]
-include::ml/delete-datafeed.asciidoc[]
-include::ml/delete-calendar-event.asciidoc[]
-include::ml/delete-filter.asciidoc[]
-include::ml/delete-job.asciidoc[]
-include::ml/delete-calendar-job.asciidoc[]
-include::ml/delete-snapshot.asciidoc[]
+include::delete-calendar.asciidoc[]
+include::delete-datafeed.asciidoc[]
+include::delete-calendar-event.asciidoc[]
+include::delete-filter.asciidoc[]
+include::delete-job.asciidoc[]
+include::delete-calendar-job.asciidoc[]
+include::delete-snapshot.asciidoc[]
//FLUSH
-include::ml/flush-job.asciidoc[]
+include::flush-job.asciidoc[]
//FORECAST
-include::ml/forecast.asciidoc[]
+include::forecast.asciidoc[]
//GET
-include::ml/get-calendar.asciidoc[]
-include::ml/get-bucket.asciidoc[]
-include::ml/get-overall-buckets.asciidoc[]
-include::ml/get-category.asciidoc[]
-include::ml/get-datafeed.asciidoc[]
-include::ml/get-datafeed-stats.asciidoc[]
-include::ml/get-influencer.asciidoc[]
-include::ml/get-job.asciidoc[]
-include::ml/get-job-stats.asciidoc[]
-include::ml/get-snapshot.asciidoc[]
-include::ml/get-calendar-event.asciidoc[]
-include::ml/get-filter.asciidoc[]
-include::ml/get-record.asciidoc[]
+include::get-calendar.asciidoc[]
+include::get-bucket.asciidoc[]
+include::get-overall-buckets.asciidoc[]
+include::get-category.asciidoc[]
+include::get-datafeed.asciidoc[]
+include::get-datafeed-stats.asciidoc[]
+include::get-influencer.asciidoc[]
+include::get-job.asciidoc[]
+include::get-job-stats.asciidoc[]
+include::get-snapshot.asciidoc[]
+include::get-calendar-event.asciidoc[]
+include::get-filter.asciidoc[]
+include::get-record.asciidoc[]
//OPEN
-include::ml/open-job.asciidoc[]
+include::open-job.asciidoc[]
//POST
-include::ml/post-data.asciidoc[]
+include::post-data.asciidoc[]
//PREVIEW
-include::ml/preview-datafeed.asciidoc[]
+include::preview-datafeed.asciidoc[]
//REVERT
-include::ml/revert-snapshot.asciidoc[]
+include::revert-snapshot.asciidoc[]
//START/STOP
-include::ml/start-datafeed.asciidoc[]
-include::ml/stop-datafeed.asciidoc[]
+include::start-datafeed.asciidoc[]
+include::stop-datafeed.asciidoc[]
//UPDATE
-include::ml/update-datafeed.asciidoc[]
-include::ml/update-filter.asciidoc[]
-include::ml/update-job.asciidoc[]
-include::ml/update-snapshot.asciidoc[]
+include::update-datafeed.asciidoc[]
+include::update-filter.asciidoc[]
+include::update-job.asciidoc[]
+include::update-snapshot.asciidoc[]
//VALIDATE
-//include::ml/validate-detector.asciidoc[]
-//include::ml/validate-job.asciidoc[]
+//include::validate-detector.asciidoc[]
+//include::validate-job.asciidoc[]
diff --git a/x-pack/docs/en/rest-api/ml/open-job.asciidoc b/docs/reference/ml/apis/open-job.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/open-job.asciidoc
rename to docs/reference/ml/apis/open-job.asciidoc
index 59d5568ac775a..c1e5977b734fd 100644
--- a/x-pack/docs/en/rest-api/ml/open-job.asciidoc
+++ b/docs/reference/ml/apis/open-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-open-job]]
=== Open Jobs API
++++
@@ -56,7 +57,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_open
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the job opens, you receive the following results:
[source,js]
@@ -65,5 +66,4 @@ When the job opens, you receive the following results:
"opened": true
}
----
-//CONSOLE
// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/post-calendar-event.asciidoc b/docs/reference/ml/apis/post-calendar-event.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/post-calendar-event.asciidoc
rename to docs/reference/ml/apis/post-calendar-event.asciidoc
index 41af0841d2e83..998db409fc7d6 100644
--- a/x-pack/docs/en/rest-api/ml/post-calendar-event.asciidoc
+++ b/docs/reference/ml/apis/post-calendar-event.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-post-calendar-event]]
=== Add Events to Calendar API
++++
@@ -52,7 +53,7 @@ POST _xpack/ml/calendars/planned-outages/events
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_addjob]
+// TEST[skip:setup:calendar_outages_addjob]
The API returns the following results:
@@ -81,7 +82,7 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
For more information about these properties, see
<>.
diff --git a/x-pack/docs/en/rest-api/ml/post-data.asciidoc b/docs/reference/ml/apis/post-data.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/post-data.asciidoc
rename to docs/reference/ml/apis/post-data.asciidoc
index 40354d7f6f760..6a5a3d3d6cb5e 100644
--- a/x-pack/docs/en/rest-api/ml/post-data.asciidoc
+++ b/docs/reference/ml/apis/post-data.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-post-data]]
=== Post Data to Jobs API
++++
diff --git a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc b/docs/reference/ml/apis/preview-datafeed.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc
rename to docs/reference/ml/apis/preview-datafeed.asciidoc
index 637b506cb9af7..7b9eccd9a5920 100644
--- a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc
+++ b/docs/reference/ml/apis/preview-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-preview-datafeed]]
=== Preview {dfeeds-cap} API
++++
@@ -53,7 +54,7 @@ The following example obtains a preview of the `datafeed-farequote` {dfeed}:
GET _xpack/ml/datafeeds/datafeed-farequote/_preview
--------------------------------------------------
// CONSOLE
-// TEST[setup:farequote_datafeed]
+// TEST[skip:setup:farequote_datafeed]
The data that is returned for this example is as follows:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/put-calendar-job.asciidoc b/docs/reference/ml/apis/put-calendar-job.asciidoc
similarity index 93%
rename from x-pack/docs/en/rest-api/ml/put-calendar-job.asciidoc
rename to docs/reference/ml/apis/put-calendar-job.asciidoc
index 6940957b15926..0563047043ae4 100644
--- a/x-pack/docs/en/rest-api/ml/put-calendar-job.asciidoc
+++ b/docs/reference/ml/apis/put-calendar-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-calendar-job]]
=== Add Jobs to Calendar API
++++
@@ -38,7 +39,7 @@ The following example associates the `planned-outages` calendar with the
PUT _xpack/ml/calendars/planned-outages/jobs/total-requests
--------------------------------------------------
// CONSOLE
-// TEST[setup:calendar_outages_openjob]
+// TEST[skip:setup:calendar_outages_openjob]
The API returns the following results:
@@ -51,4 +52,4 @@ The API returns the following results:
]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/put-calendar.asciidoc b/docs/reference/ml/apis/put-calendar.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/put-calendar.asciidoc
rename to docs/reference/ml/apis/put-calendar.asciidoc
index a82da5a2c0c0a..06b8e55d7747c 100644
--- a/x-pack/docs/en/rest-api/ml/put-calendar.asciidoc
+++ b/docs/reference/ml/apis/put-calendar.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-calendar]]
=== Create Calendar API
++++
@@ -44,6 +45,7 @@ The following example creates the `planned-outages` calendar:
PUT _xpack/ml/calendars/planned-outages
--------------------------------------------------
// CONSOLE
+// TEST[skip:need-license]
When the calendar is created, you receive the following results:
[source,js]
@@ -53,4 +55,4 @@ When the calendar is created, you receive the following results:
"job_ids": []
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/put-datafeed.asciidoc b/docs/reference/ml/apis/put-datafeed.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/put-datafeed.asciidoc
rename to docs/reference/ml/apis/put-datafeed.asciidoc
index 6b8ad932a1d42..b5c99fc8e36af 100644
--- a/x-pack/docs/en/rest-api/ml/put-datafeed.asciidoc
+++ b/docs/reference/ml/apis/put-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-datafeed]]
=== Create {dfeeds-cap} API
++++
@@ -107,7 +108,7 @@ PUT _xpack/ml/datafeeds/datafeed-total-requests
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the {dfeed} is created, you receive the following results:
[source,js]
@@ -132,4 +133,4 @@ When the {dfeed} is created, you receive the following results:
}
----
// TESTRESPONSE[s/"query_delay": "83474ms"/"query_delay": $body.query_delay/]
-// TESTRESPONSE[s/"query.boost": "1.0"/"query.boost": $body.query.boost/]
+// TESTRESPONSE[s/"query.boost": "1.0"/"query.boost": $body.query.boost/]
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/put-filter.asciidoc b/docs/reference/ml/apis/put-filter.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/put-filter.asciidoc
rename to docs/reference/ml/apis/put-filter.asciidoc
index d2982a56f612e..165fe9697584c 100644
--- a/x-pack/docs/en/rest-api/ml/put-filter.asciidoc
+++ b/docs/reference/ml/apis/put-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-filter]]
=== Create Filter API
++++
@@ -55,6 +56,7 @@ PUT _xpack/ml/filters/safe_domains
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:need-licence]
When the filter is created, you receive the following response:
[source,js]
@@ -65,4 +67,4 @@ When the filter is created, you receive the following response:
"items": ["*.google.com", "wikipedia.org"]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/put-job.asciidoc b/docs/reference/ml/apis/put-job.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/put-job.asciidoc
rename to docs/reference/ml/apis/put-job.asciidoc
index ea72396f9f56b..2158e590ec673 100644
--- a/x-pack/docs/en/rest-api/ml/put-job.asciidoc
+++ b/docs/reference/ml/apis/put-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-put-job]]
=== Create Jobs API
++++
@@ -104,6 +105,7 @@ PUT _xpack/ml/anomaly_detectors/total-requests
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:need-licence]
When the job is created, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc b/docs/reference/ml/apis/resultsresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/resultsresource.asciidoc
rename to docs/reference/ml/apis/resultsresource.asciidoc
index c28ed72aedb36..d3abd094be79c 100644
--- a/x-pack/docs/en/rest-api/ml/resultsresource.asciidoc
+++ b/docs/reference/ml/apis/resultsresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-results-resource]]
=== Results Resources
diff --git a/x-pack/docs/en/rest-api/ml/revert-snapshot.asciidoc b/docs/reference/ml/apis/revert-snapshot.asciidoc
similarity index 67%
rename from x-pack/docs/en/rest-api/ml/revert-snapshot.asciidoc
rename to docs/reference/ml/apis/revert-snapshot.asciidoc
index 3ba228cc274b1..d23ecc0c7dd96 100644
--- a/x-pack/docs/en/rest-api/ml/revert-snapshot.asciidoc
+++ b/docs/reference/ml/apis/revert-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-revert-snapshot]]
=== Revert Model Snapshots API
++++
@@ -22,33 +23,6 @@ then it might be appropriate to reset the model state to a time before this
event. For example, you might consider reverting to a saved snapshot after Black
Friday or a critical system failure.
-////
-To revert to a saved snapshot, you must follow this sequence:
-. Close the job
-. Revert to a snapshot
-. Open the job
-. Send new data to the job
-
-When reverting to a snapshot, there is a choice to make about whether or not
-you want to keep the results that were created between the time of the snapshot
-and the current time. In the case of Black Friday for instance, you might want
-to keep the results and carry on processing data from the current time,
-though without the models learning the one-off behavior and compensating for it.
-However, say in the event of a critical system failure and you decide to reset
-and models to a previous known good state and process data from that time,
-it makes sense to delete the intervening results for the known bad period and
-resend data from that earlier time.
-
-Any gaps in data since the snapshot time will be treated as nulls and not modeled.
-If there is a partial bucket at the end of the snapshot and/or at the beginning
-of the new input data, then this will be ignored and treated as a gap.
-
-For jobs with many entities, the model state may be very large.
-If a model state is several GB, this could take 10-20 mins to revert depending
-upon machine spec and resources. If this is the case, please ensure this time
-is planned for.
-Model size (in bytes) is available as part of the Job Resource Model Size Stats.
-////
IMPORTANT: Before you revert to a saved snapshot, you must close the job.
@@ -77,7 +51,6 @@ If you want to resend data, then delete the intervening results.
You must have `manage_ml`, or `manage` cluster privileges to use this API.
For more information, see
{xpack-ref}/security-privileges.html[Security Privileges].
-//<>.
==== Examples
diff --git a/x-pack/docs/en/rest-api/ml/snapshotresource.asciidoc b/docs/reference/ml/apis/snapshotresource.asciidoc
similarity index 99%
rename from x-pack/docs/en/rest-api/ml/snapshotresource.asciidoc
rename to docs/reference/ml/apis/snapshotresource.asciidoc
index fbcf038f3e055..20ae15d269dbb 100644
--- a/x-pack/docs/en/rest-api/ml/snapshotresource.asciidoc
+++ b/docs/reference/ml/apis/snapshotresource.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-snapshot-resource]]
=== Model Snapshot Resources
diff --git a/x-pack/docs/en/rest-api/ml/start-datafeed.asciidoc b/docs/reference/ml/apis/start-datafeed.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/start-datafeed.asciidoc
rename to docs/reference/ml/apis/start-datafeed.asciidoc
index fa3ea35a751f7..566e700dd043b 100644
--- a/x-pack/docs/en/rest-api/ml/start-datafeed.asciidoc
+++ b/docs/reference/ml/apis/start-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-start-datafeed]]
=== Start {dfeeds-cap} API
++++
@@ -79,7 +80,6 @@ of the latest processed record.
You must have `manage_ml`, or `manage` cluster privileges to use this API.
For more information, see
{xpack-ref}/security-privileges.html[Security Privileges].
-//<>.
==== Security Integration
@@ -101,7 +101,7 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_start
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_openjob]
+// TEST[skip:setup:server_metrics_openjob]
When the {dfeed} starts, you receive the following results:
[source,js]
@@ -110,5 +110,4 @@ When the {dfeed} starts, you receive the following results:
"started": true
}
----
-// CONSOLE
-// TESTRESPONSE
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/stop-datafeed.asciidoc b/docs/reference/ml/apis/stop-datafeed.asciidoc
similarity index 92%
rename from x-pack/docs/en/rest-api/ml/stop-datafeed.asciidoc
rename to docs/reference/ml/apis/stop-datafeed.asciidoc
index 27872ff5a2080..7ea48974f2df1 100644
--- a/x-pack/docs/en/rest-api/ml/stop-datafeed.asciidoc
+++ b/docs/reference/ml/apis/stop-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-stop-datafeed]]
=== Stop {dfeeds-cap} API
++++
@@ -18,7 +19,6 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle.
`POST _xpack/ml/datafeeds/_all/_stop`
-//TBD: Can there be spaces between the items in the list?
===== Description
@@ -63,14 +63,14 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_stop
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_startdf]
+// TEST[skip:setup:server_metrics_startdf]
When the {dfeed} stops, you receive the following results:
+
[source,js]
----
{
"stopped": true
}
----
-// CONSOLE
-// TESTRESPONSE
+// TESTRESPONSE
\ No newline at end of file
diff --git a/x-pack/docs/en/rest-api/ml/update-datafeed.asciidoc b/docs/reference/ml/apis/update-datafeed.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/update-datafeed.asciidoc
rename to docs/reference/ml/apis/update-datafeed.asciidoc
index bc9462347c1c0..be55d864c871e 100644
--- a/x-pack/docs/en/rest-api/ml/update-datafeed.asciidoc
+++ b/docs/reference/ml/apis/update-datafeed.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-datafeed]]
=== Update {dfeeds-cap} API
++++
@@ -106,7 +107,7 @@ POST _xpack/ml/datafeeds/datafeed-total-requests/_update
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_datafeed]
+// TEST[skip:setup:server_metrics_datafeed]
When the {dfeed} is updated, you receive the full {dfeed} configuration with
with the updated values:
diff --git a/x-pack/docs/en/rest-api/ml/update-filter.asciidoc b/docs/reference/ml/apis/update-filter.asciidoc
similarity index 94%
rename from x-pack/docs/en/rest-api/ml/update-filter.asciidoc
rename to docs/reference/ml/apis/update-filter.asciidoc
index 1b6760dfed654..f551c8e923b89 100644
--- a/x-pack/docs/en/rest-api/ml/update-filter.asciidoc
+++ b/docs/reference/ml/apis/update-filter.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-filter]]
=== Update Filter API
++++
@@ -52,7 +53,7 @@ POST _xpack/ml/filters/safe_domains/_update
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:ml_filter_safe_domains]
+// TEST[skip:setup:ml_filter_safe_domains]
The API returns the following results:
@@ -64,4 +65,4 @@ The API returns the following results:
"items": ["*.google.com", "*.myorg.com"]
}
----
-//TESTRESPONSE
+// TESTRESPONSE
diff --git a/x-pack/docs/en/rest-api/ml/update-job.asciidoc b/docs/reference/ml/apis/update-job.asciidoc
similarity index 97%
rename from x-pack/docs/en/rest-api/ml/update-job.asciidoc
rename to docs/reference/ml/apis/update-job.asciidoc
index f916aef8f85a0..4ef17f3b6cf07 100644
--- a/x-pack/docs/en/rest-api/ml/update-job.asciidoc
+++ b/docs/reference/ml/apis/update-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-job]]
=== Update Jobs API
++++
@@ -121,7 +122,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_update
}
--------------------------------------------------
// CONSOLE
-// TEST[setup:server_metrics_job]
+// TEST[skip:setup:server_metrics_job]
When the job is updated, you receive a summary of the job configuration
information, including the updated property values. For example:
@@ -131,7 +132,7 @@ information, including the updated property values. For example:
{
"job_id": "total-requests",
"job_type": "anomaly_detector",
- "job_version": "6.3.0",
+ "job_version": "6.5.0",
"groups": [
"group1",
"group2"
@@ -176,5 +177,5 @@ information, including the updated property values. For example:
"results_index_name": "shared"
}
----
-// TESTRESPONSE[s/"job_version": "6.3.0"/"job_version": $body.job_version/]
+// TESTRESPONSE[s/"job_version": "6.5.0"/"job_version": $body.job_version/]
// TESTRESPONSE[s/"create_time": 1518808660505/"create_time": $body.create_time/]
diff --git a/x-pack/docs/en/rest-api/ml/update-snapshot.asciidoc b/docs/reference/ml/apis/update-snapshot.asciidoc
similarity index 98%
rename from x-pack/docs/en/rest-api/ml/update-snapshot.asciidoc
rename to docs/reference/ml/apis/update-snapshot.asciidoc
index 8c98a7b732186..b58eebe810fa1 100644
--- a/x-pack/docs/en/rest-api/ml/update-snapshot.asciidoc
+++ b/docs/reference/ml/apis/update-snapshot.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-update-snapshot]]
=== Update Model Snapshots API
++++
diff --git a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc b/docs/reference/ml/apis/validate-detector.asciidoc
similarity index 95%
rename from x-pack/docs/en/rest-api/ml/validate-detector.asciidoc
rename to docs/reference/ml/apis/validate-detector.asciidoc
index ab8a0de442cf8..e525b1a1b2008 100644
--- a/x-pack/docs/en/rest-api/ml/validate-detector.asciidoc
+++ b/docs/reference/ml/apis/validate-detector.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-valid-detector]]
=== Validate Detectors API
++++
@@ -44,6 +45,7 @@ POST _xpack/ml/anomaly_detectors/_validate/detector
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:needs-licence]
When the validation completes, you receive the following results:
[source,js]
diff --git a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc b/docs/reference/ml/apis/validate-job.asciidoc
similarity index 96%
rename from x-pack/docs/en/rest-api/ml/validate-job.asciidoc
rename to docs/reference/ml/apis/validate-job.asciidoc
index 0ccc5bc04e1d1..b83260582602e 100644
--- a/x-pack/docs/en/rest-api/ml/validate-job.asciidoc
+++ b/docs/reference/ml/apis/validate-job.asciidoc
@@ -1,4 +1,5 @@
[role="xpack"]
+[testenv="platinum"]
[[ml-valid-job]]
=== Validate Jobs API
++++
@@ -55,6 +56,7 @@ POST _xpack/ml/anomaly_detectors/_validate
}
--------------------------------------------------
// CONSOLE
+// TEST[skip:needs-licence]
When the validation is complete, you receive the following results:
[source,js]
diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc
index db26bc84a4cc4..b692e643844c1 100644
--- a/docs/reference/redirects.asciidoc
+++ b/docs/reference/redirects.asciidoc
@@ -544,3 +544,9 @@ See <>.
=== X-Pack commands
See <>.
+
+[role="exclude",id="ml-api-definitions"]
+=== Machine learning API definitions
+
+See <>.
+
diff --git a/docs/reference/rest-api/defs.asciidoc b/docs/reference/rest-api/defs.asciidoc
new file mode 100644
index 0000000000000..4eeedc5539992
--- /dev/null
+++ b/docs/reference/rest-api/defs.asciidoc
@@ -0,0 +1,27 @@
+[role="xpack"]
+[[api-definitions]]
+== Definitions
+
+These resource definitions are used in {ml} and {security} APIs and in {kib}
+advanced {ml} job configuration options.
+
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+* <>
+
+include::{es-repo-dir}/ml/apis/calendarresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/datafeedresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/filterresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/jobresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/jobcounts.asciidoc[]
+include::{es-repo-dir}/ml/apis/snapshotresource.asciidoc[]
+include::{xes-repo-dir}/rest-api/security/role-mapping-resources.asciidoc[]
+include::{es-repo-dir}/ml/apis/resultsresource.asciidoc[]
+include::{es-repo-dir}/ml/apis/eventresource.asciidoc[]
diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc
index e1d607948e1e3..b80e8badf5bb3 100644
--- a/docs/reference/rest-api/index.asciidoc
+++ b/docs/reference/rest-api/index.asciidoc
@@ -22,8 +22,8 @@ include::info.asciidoc[]
include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[]
include::{es-repo-dir}/licensing/index.asciidoc[]
include::{es-repo-dir}/migration/migration.asciidoc[]
-include::{xes-repo-dir}/rest-api/ml-api.asciidoc[]
+include::{es-repo-dir}/ml/apis/ml-api.asciidoc[]
include::{es-repo-dir}/rollup/rollup-api.asciidoc[]
include::{xes-repo-dir}/rest-api/security.asciidoc[]
include::{xes-repo-dir}/rest-api/watcher.asciidoc[]
-include::{xes-repo-dir}/rest-api/defs.asciidoc[]
+include::defs.asciidoc[]
diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateWithoutContentIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateWithoutContentIT.java
index cbc6adf6be227..023d3b246761d 100644
--- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateWithoutContentIT.java
+++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateWithoutContentIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.script.mustache;
+import org.elasticsearch.client.Request;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.test.rest.ESRestTestCase;
@@ -30,14 +31,14 @@ public class SearchTemplateWithoutContentIT extends ESRestTestCase {
public void testSearchTemplateMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(
- randomBoolean() ? "POST" : "GET", "/_search/template"));
+ new Request(randomBoolean() ? "POST" : "GET", "/_search/template")));
assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode());
assertThat(responseException.getMessage(), containsString("request body or source parameter is required"));
}
public void testMultiSearchTemplateMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(
- randomBoolean() ? "POST" : "GET", "/_msearch/template"));
+ new Request(randomBoolean() ? "POST" : "GET", "/_msearch/template")));
assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode());
assertThat(responseException.getMessage(), containsString("request body or source parameter is required"));
}
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java
index a5520c90b0ff5..50d01535d7ff0 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.reindex;
import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@@ -118,7 +117,7 @@ protected ReindexRequest buildRequest(RestRequest request) throws IOException {
throw new IllegalArgumentException("_reindex doesn't support [pipeline] as a query parameter. "
+ "Specify it in the [dest] object instead.");
}
- ReindexRequest internal = new ReindexRequest(new SearchRequest(), new IndexRequest());
+ ReindexRequest internal = new ReindexRequest();
try (XContentParser parser = request.contentParser()) {
PARSER.parse(parser, internal, null);
}
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java
index 4611f9dcbcddb..ec34da777b533 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java
@@ -21,7 +21,6 @@
import org.elasticsearch.index.reindex.ScrollableHitSource.Hit;
import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.settings.Settings;
/**
@@ -73,7 +72,7 @@ protected TestAction action() {
@Override
protected ReindexRequest request() {
- return new ReindexRequest(new SearchRequest(), new IndexRequest());
+ return new ReindexRequest();
}
private class TestAction extends TransportReindexAction.AsyncIndexBySearchAction {
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java
index 4e2834a771a94..ad6ea0ed26af8 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.reindex;
import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.script.ScriptService;
@@ -106,7 +105,7 @@ public void testSetRouting() throws Exception {
@Override
protected ReindexRequest request() {
- return new ReindexRequest(new SearchRequest(), new IndexRequest());
+ return new ReindexRequest();
}
@Override
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java
index f580b1400c3bd..73745ca690d74 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexWithoutContentIT.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.reindex;
+import org.elasticsearch.client.Request;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.test.rest.ESRestTestCase;
@@ -30,7 +31,7 @@ public class ReindexWithoutContentIT extends ESRestTestCase {
public void testReindexMissingBody() throws IOException {
ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(
- "POST", "/_reindex"));
+ new Request("POST", "/_reindex")));
assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode());
assertThat(responseException.getMessage(), containsString("request body is required"));
}
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java
index b06948b90581a..70e29ed12c5b4 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java
@@ -19,8 +19,6 @@
package org.elasticsearch.index.reindex;
-import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
@@ -144,7 +142,7 @@ public void testReindexFromRemoteRequestParsing() throws IOException {
request = BytesReference.bytes(b);
}
try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) {
- ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest());
+ ReindexRequest r = new ReindexRequest();
RestReindexAction.PARSER.parse(p, r, null);
assertEquals("localhost", r.getRemoteInfo().getHost());
assertArrayEquals(new String[] {"source"}, r.getSearchRequest().indices());
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java
index 97809c9bc8dc3..46aa6df120f3e 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java
@@ -20,7 +20,6 @@
package org.elasticsearch.index.reindex;
import org.elasticsearch.Version;
-import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@@ -47,7 +46,7 @@
*/
public class RoundTripTests extends ESTestCase {
public void testReindexRequest() throws IOException {
- ReindexRequest reindex = new ReindexRequest(new SearchRequest(), new IndexRequest());
+ ReindexRequest reindex = new ReindexRequest();
randomRequest(reindex);
reindex.getDestination().version(randomFrom(Versions.MATCH_ANY, Versions.MATCH_DELETED, 12L, 1L, 123124L, 12L));
reindex.getDestination().index("test");
diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java
index 2800b3f30c296..aa64881b63cc2 100644
--- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java
+++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java
@@ -32,7 +32,6 @@
import java.io.IOException;
import java.nio.charset.Charset;
-import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
@@ -71,7 +70,7 @@ public void testBadRequest() throws IOException {
final ResponseException e =
expectThrows(
ResponseException.class,
- () -> client().performRequest(randomFrom("GET", "POST", "PUT"), path, Collections.emptyMap()));
+ () -> client().performRequest(new Request(randomFrom("GET", "POST", "PUT"), path)));
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(BAD_REQUEST.getStatus()));
assertThat(e, hasToString(containsString("too_long_frame_exception")));
assertThat(e, hasToString(matches("An HTTP line is larger than \\d+ bytes")));
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java
index fb535d312cf65..c0615064de1be 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java
@@ -28,11 +28,13 @@
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.common.CheckedConsumer;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.StatusToXContentObject;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -42,6 +44,8 @@
import java.io.IOException;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
@@ -161,11 +165,11 @@ public static BulkItemResponse fromXContent(XContentParser parser, int id) throw
* Represents a failure.
*/
public static class Failure implements Writeable, ToXContentFragment {
- static final String INDEX_FIELD = "index";
- static final String TYPE_FIELD = "type";
- static final String ID_FIELD = "id";
- static final String CAUSE_FIELD = "cause";
- static final String STATUS_FIELD = "status";
+ public static final String INDEX_FIELD = "index";
+ public static final String TYPE_FIELD = "type";
+ public static final String ID_FIELD = "id";
+ public static final String CAUSE_FIELD = "cause";
+ public static final String STATUS_FIELD = "status";
private final String index;
private final String type;
@@ -175,6 +179,23 @@ public static class Failure implements Writeable, ToXContentFragment {
private final long seqNo;
private final boolean aborted;
+ public static ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>(
+ "bulk_failures",
+ true,
+ a ->
+ new Failure(
+ (String)a[0], (String)a[1], (String)a[2], (Exception)a[3], RestStatus.fromCode((int)a[4])
+ )
+ );
+ static {
+ PARSER.declareString(constructorArg(), new ParseField(INDEX_FIELD));
+ PARSER.declareString(constructorArg(), new ParseField(TYPE_FIELD));
+ PARSER.declareString(optionalConstructorArg(), new ParseField(ID_FIELD));
+ PARSER.declareObject(constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), new ParseField(CAUSE_FIELD));
+ PARSER.declareInt(constructorArg(), new ParseField(STATUS_FIELD));
+ }
+
/**
* For write failures before operation was assigned a sequence number.
*
@@ -322,6 +343,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}
+ public static Failure fromXContent(XContentParser parser) {
+ return PARSER.apply(parser, null);
+ }
+
@Override
public String toString() {
return Strings.toString(this);
diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java
index 30c02194b5694..e952d56d23e0a 100644
--- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java
+++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java
@@ -252,6 +252,14 @@ public Self setTimeout(TimeValue timeout) {
return self();
}
+ /**
+ * Timeout to wait for the shards on to be available for each bulk request?
+ */
+ public Self setTimeout(String timeout) {
+ this.timeout = TimeValue.parseTimeValue(timeout, this.timeout, getClass().getSimpleName() + ".timeout");
+ return self();
+ }
+
/**
* The number of shard copies that must be active before proceeding with the write.
*/
diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java
index ac206c2c44f06..7fe60db2ddda7 100644
--- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java
+++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponse.java
@@ -19,14 +19,23 @@
package org.elasticsearch.index.reindex;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.index.reindex.BulkByScrollTask.Status;
import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParser.Token;
+import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
+import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.ArrayList;
@@ -36,6 +45,7 @@
import static java.lang.Math.min;
import static java.util.Objects.requireNonNull;
import static org.elasticsearch.common.unit.TimeValue.timeValueNanos;
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/**
* Response used for actions that index many documents using a scroll request.
@@ -47,6 +57,27 @@ public class BulkByScrollResponse extends ActionResponse implements ToXContentFr
private List searchFailures;
private boolean timedOut;
+ private static final String TOOK_FIELD = "took";
+ private static final String TIMED_OUT_FIELD = "timed_out";
+ private static final String FAILURES_FIELD = "failures";
+
+ @SuppressWarnings("unchecked")
+ private static final ObjectParser PARSER =
+ new ObjectParser<>(
+ "bulk_by_scroll_response",
+ true,
+ BulkByScrollResponseBuilder::new
+ );
+ static {
+ PARSER.declareLong(BulkByScrollResponseBuilder::setTook, new ParseField(TOOK_FIELD));
+ PARSER.declareBoolean(BulkByScrollResponseBuilder::setTimedOut, new ParseField(TIMED_OUT_FIELD));
+ PARSER.declareObjectArray(
+ BulkByScrollResponseBuilder::setFailures, (p, c) -> parseFailure(p), new ParseField(FAILURES_FIELD)
+ );
+ // since the result of BulkByScrollResponse.Status are mixed we also parse that in this
+ Status.declareFields(PARSER);
+ }
+
public BulkByScrollResponse() {
}
@@ -87,6 +118,10 @@ public long getCreated() {
return status.getCreated();
}
+ public long getTotal() {
+ return status.getTotal();
+ }
+
public long getDeleted() {
return status.getDeleted();
}
@@ -171,8 +206,8 @@ public void readFrom(StreamInput in) throws IOException {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- builder.field("took", took.millis());
- builder.field("timed_out", timedOut);
+ builder.field(TOOK_FIELD, took.millis());
+ builder.field(TIMED_OUT_FIELD, timedOut);
status.innerXContent(builder, params);
builder.startArray("failures");
for (Failure failure: bulkFailures) {
@@ -187,6 +222,80 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
return builder;
}
+ public static BulkByScrollResponse fromXContent(XContentParser parser) {
+ return PARSER.apply(parser, null).buildResponse();
+ }
+
+ private static Object parseFailure(XContentParser parser) throws IOException {
+ ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
+ Token token;
+ String index = null;
+ String type = null;
+ String id = null;
+ Integer status = null;
+ Integer shardId = null;
+ String nodeId = null;
+ ElasticsearchException bulkExc = null;
+ ElasticsearchException searchExc = null;
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ ensureExpectedToken(Token.FIELD_NAME, token, parser::getTokenLocation);
+ String name = parser.currentName();
+ token = parser.nextToken();
+ if (token == Token.START_ARRAY) {
+ parser.skipChildren();
+ } else if (token == Token.START_OBJECT) {
+ switch (name) {
+ case SearchFailure.REASON_FIELD:
+ bulkExc = ElasticsearchException.fromXContent(parser);
+ break;
+ case Failure.CAUSE_FIELD:
+ searchExc = ElasticsearchException.fromXContent(parser);
+ break;
+ default:
+ parser.skipChildren();
+ }
+ } else if (token == Token.VALUE_STRING) {
+ switch (name) {
+ // This field is the same as SearchFailure.index
+ case Failure.INDEX_FIELD:
+ index = parser.text();
+ break;
+ case Failure.TYPE_FIELD:
+ type = parser.text();
+ break;
+ case Failure.ID_FIELD:
+ id = parser.text();
+ break;
+ case SearchFailure.NODE_FIELD:
+ nodeId = parser.text();
+ break;
+ default:
+ // Do nothing
+ break;
+ }
+ } else if (token == Token.VALUE_NUMBER) {
+ switch (name) {
+ case Failure.STATUS_FIELD:
+ status = parser.intValue();
+ break;
+ case SearchFailure.SHARD_FIELD:
+ shardId = parser.intValue();
+ break;
+ default:
+ // Do nothing
+ break;
+ }
+ }
+ }
+ if (bulkExc != null) {
+ return new Failure(index, type, id, bulkExc, RestStatus.fromCode(status));
+ } else if (searchExc != null) {
+ return new SearchFailure(searchExc, index, shardId, nodeId);
+ } else {
+ throw new ElasticsearchParseException("failed to parse failures array. At least one of {reason,cause} must be present");
+ }
+ }
+
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponseBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponseBuilder.java
new file mode 100644
index 0000000000000..ad5bfd6e03cdf
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollResponseBuilder.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.index.reindex;
+
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
+import org.elasticsearch.index.reindex.BulkByScrollTask.StatusBuilder;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Helps build a {@link BulkByScrollResponse}. Used by an instance of {@link ObjectParser} when parsing from XContent.
+ */
+class BulkByScrollResponseBuilder extends StatusBuilder {
+ private TimeValue took;
+ private BulkByScrollTask.Status status;
+ private List bulkFailures = new ArrayList<>();
+ private List searchFailures = new ArrayList<>();
+ private boolean timedOut;
+
+ BulkByScrollResponseBuilder() {}
+
+ public void setTook(long took) {
+ setTook(new TimeValue(took, TimeUnit.MILLISECONDS));
+ }
+
+ public void setTook(TimeValue took) {
+ this.took = took;
+ }
+
+ public void setStatus(BulkByScrollTask.Status status) {
+ this.status = status;
+ }
+
+ public void setFailures(List