Skip to content

Commit

Permalink
[ML] fix x-pack usage regression caused by index migration (#36936)
Browse files Browse the repository at this point in the history
Changes the feature usage retrieval to use the job manager rather than
directly talking to the cluster state, because jobs can now be either in
cluster state or stored in an index

This is a follow-up of #36702 / #36698
  • Loading branch information
Hendrik Muhs committed Dec 31, 2018
1 parent 5d000ed commit 50950ce
Show file tree
Hide file tree
Showing 5 changed files with 136 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.SecuritySettingsSourceField;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner;
Expand All @@ -22,7 +23,9 @@
import org.junit.After;

import java.io.IOException;
import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Matcher;
Expand Down Expand Up @@ -111,6 +114,21 @@ public void testGetJobs_GivenMultipleJobs() throws Exception {
assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
}

// tests the _xpack/usage endpoint
public void testUsage() throws IOException {
createFarequoteJob("job-1");
createFarequoteJob("job-2");
Map<String, Object> usage = entityAsMap(client().performRequest(new Request("GET", "_xpack/usage")));
assertEquals(2, XContentMapValues.extractValue("ml.jobs._all.count", usage));
assertEquals(2, XContentMapValues.extractValue("ml.jobs.closed.count", usage));
Response openResponse = client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/job-1/_open"));
assertEquals(Collections.singletonMap("opened", true), entityAsMap(openResponse));
usage = entityAsMap(client().performRequest(new Request("GET", "_xpack/usage")));
assertEquals(2, XContentMapValues.extractValue("ml.jobs._all.count", usage));
assertEquals(1, XContentMapValues.extractValue("ml.jobs.closed.count", usage));
assertEquals(1, XContentMapValues.extractValue("ml.jobs.opened.count", usage));
}

private Response createFarequoteJob(String jobId) throws IOException {
Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
request.setJsonEntity(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@
import org.elasticsearch.xpack.ml.datafeed.DatafeedManager;
import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider;
import org.elasticsearch.xpack.ml.job.JobManager;
import org.elasticsearch.xpack.ml.job.JobManagerHolder;
import org.elasticsearch.xpack.ml.job.UpdateJobProcessNotifier;
import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizer;
import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizerFactory;
Expand Down Expand Up @@ -372,7 +373,8 @@ public Collection<Object> createComponents(Client client, ClusterService cluster
NamedXContentRegistry xContentRegistry, Environment environment,
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
if (enabled == false || transportClientMode || tribeNode || tribeNodeClient) {
return emptyList();
// special holder for @link(MachineLearningFeatureSetUsage) which needs access to job manager, empty if ML is disabled
return Collections.singletonList(new JobManagerHolder());
}

Auditor auditor = new Auditor(client, clusterService.getNodeName());
Expand All @@ -382,6 +384,9 @@ public Collection<Object> createComponents(Client client, ClusterService cluster
UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(client, clusterService, threadPool);
JobManager jobManager = new JobManager(env, settings, jobResultsProvider, clusterService, auditor, threadPool, client, notifier);

// special holder for @link(MachineLearningFeatureSetUsage) which needs access to job manager if ML is enabled
JobManagerHolder jobManagerHolder = new JobManagerHolder(jobManager);

JobDataCountsPersister jobDataCountsPersister = new JobDataCountsPersister(client);
JobResultsPersister jobResultsPersister = new JobResultsPersister(client);

Expand Down Expand Up @@ -440,6 +445,7 @@ public Collection<Object> createComponents(Client client, ClusterService cluster
jobConfigProvider,
datafeedConfigProvider,
jobManager,
jobManagerHolder,
autodetectProcessManager,
new MlInitializationService(settings, threadPool, clusterService, client),
jobDataCountsPersister,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.action.GetDatafeedsStatsAction;
import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState;
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.ml.job.JobManagerHolder;
import org.elasticsearch.xpack.ml.process.NativeController;
import org.elasticsearch.xpack.ml.process.NativeControllerHolder;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
Expand All @@ -48,6 +48,7 @@
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;

public class MachineLearningFeatureSet implements XPackFeatureSet {

Expand All @@ -61,15 +62,17 @@ public class MachineLearningFeatureSet implements XPackFeatureSet {
private final XPackLicenseState licenseState;
private final ClusterService clusterService;
private final Client client;
private final JobManagerHolder jobManagerHolder;
private final Map<String, Object> nativeCodeInfo;

@Inject
public MachineLearningFeatureSet(Environment environment, ClusterService clusterService, Client client,
@Nullable XPackLicenseState licenseState) {
@Nullable XPackLicenseState licenseState, JobManagerHolder jobManagerHolder) {
this.enabled = XPackSettings.MACHINE_LEARNING_ENABLED.get(environment.settings());
this.clusterService = Objects.requireNonNull(clusterService);
this.client = Objects.requireNonNull(client);
this.licenseState = licenseState;
this.jobManagerHolder = jobManagerHolder;
Map<String, Object> nativeCodeInfo = NativeController.UNKNOWN_NATIVE_CODE_INFO;
// Don't try to get the native code version if ML is disabled - it causes too much controversy
// if ML has been disabled because of some OS incompatibility. Also don't try to get the native
Expand Down Expand Up @@ -135,7 +138,7 @@ public Map<String, Object> nativeCodeInfo() {
@Override
public void usage(ActionListener<XPackFeatureSet.Usage> listener) {
ClusterState state = clusterService.state();
new Retriever(client, MlMetadata.getMlMetadata(state), available(), enabled(), mlNodeCount(state)).execute(listener);
new Retriever(client, jobManagerHolder, available(), enabled(), mlNodeCount(state)).execute(listener);
}

private int mlNodeCount(final ClusterState clusterState) {
Expand All @@ -156,16 +159,16 @@ private int mlNodeCount(final ClusterState clusterState) {
public static class Retriever {

private final Client client;
private final MlMetadata mlMetadata;
private final JobManagerHolder jobManagerHolder;
private final boolean available;
private final boolean enabled;
private Map<String, Object> jobsUsage;
private Map<String, Object> datafeedsUsage;
private int nodeCount;

public Retriever(Client client, MlMetadata mlMetadata, boolean available, boolean enabled, int nodeCount) {
public Retriever(Client client, JobManagerHolder jobManagerHolder, boolean available, boolean enabled, int nodeCount) {
this.client = Objects.requireNonNull(client);
this.mlMetadata = mlMetadata;
this.jobManagerHolder = jobManagerHolder;
this.available = available;
this.enabled = enabled;
this.jobsUsage = new LinkedHashMap<>();
Expand All @@ -174,7 +177,8 @@ public Retriever(Client client, MlMetadata mlMetadata, boolean available, boolea
}

public void execute(ActionListener<Usage> listener) {
if (enabled == false) {
// empty holder means either ML disabled or transport client mode
if (jobManagerHolder.isEmpty()) {
listener.onResponse(
new MachineLearningFeatureSetUsage(available, enabled, Collections.emptyMap(), Collections.emptyMap(), 0));
return;
Expand All @@ -194,20 +198,19 @@ public void execute(ActionListener<Usage> listener) {
GetJobsStatsAction.Request jobStatsRequest = new GetJobsStatsAction.Request(MetaData.ALL);
ActionListener<GetJobsStatsAction.Response> jobStatsListener = ActionListener.wrap(
response -> {
addJobsUsage(response);
GetDatafeedsStatsAction.Request datafeedStatsRequest =
new GetDatafeedsStatsAction.Request(GetDatafeedsStatsAction.ALL);
client.execute(GetDatafeedsStatsAction.INSTANCE, datafeedStatsRequest,
datafeedStatsListener);
},
listener::onFailure
);
jobManagerHolder.getJobManager().expandJobs(MetaData.ALL, true, ActionListener.wrap(jobs -> {
addJobsUsage(response, jobs.results());
GetDatafeedsStatsAction.Request datafeedStatsRequest = new GetDatafeedsStatsAction.Request(
GetDatafeedsStatsAction.ALL);
client.execute(GetDatafeedsStatsAction.INSTANCE, datafeedStatsRequest, datafeedStatsListener);
}, listener::onFailure));
}, listener::onFailure);

// Step 0. Kick off the chain of callbacks by requesting jobs stats
client.execute(GetJobsStatsAction.INSTANCE, jobStatsRequest, jobStatsListener);
}

private void addJobsUsage(GetJobsStatsAction.Response response) {
private void addJobsUsage(GetJobsStatsAction.Response response, List<Job> jobs) {
StatsAccumulator allJobsDetectorsStats = new StatsAccumulator();
StatsAccumulator allJobsModelSizeStats = new StatsAccumulator();
ForecastStats allJobsForecastStats = new ForecastStats();
Expand All @@ -217,11 +220,11 @@ private void addJobsUsage(GetJobsStatsAction.Response response) {
Map<JobState, StatsAccumulator> modelSizeStatsByState = new HashMap<>();
Map<JobState, ForecastStats> forecastStatsByState = new HashMap<>();

Map<String, Job> jobs = mlMetadata.getJobs();
List<GetJobsStatsAction.Response.JobStats> jobsStats = response.getResponse().results();
Map<String, Job> jobMap = jobs.stream().collect(Collectors.toMap(Job::getId, item -> item));
for (GetJobsStatsAction.Response.JobStats jobStats : jobsStats) {
ModelSizeStats modelSizeStats = jobStats.getModelSizeStats();
int detectorsCount = jobs.get(jobStats.getJobId()).getAnalysisConfig()
int detectorsCount = jobMap.get(jobStats.getJobId()).getAnalysisConfig()
.getDetectors().size();
double modelSize = modelSizeStats == null ? 0.0
: jobStats.getModelSizeStats().getModelBytes();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

package org.elasticsearch.xpack.ml.job;

import org.elasticsearch.ElasticsearchException;

public class JobManagerHolder {

private final JobManager instance;

/**
* Create an empty holder which also means that no job manager gets created.
*/
public JobManagerHolder() {
this.instance = null;
}

/**
* Create a holder that allows lazy creation of a job manager.
*
*/
public JobManagerHolder(JobManager jobManager) {
this.instance = jobManager;
}

public boolean isEmpty() {
return instance == null;
}

/**
* Get the instance of the held JobManager.
*
* @return job manager instance
* @throws ElasticsearchException if holder has been created with the empty constructor
*/
public JobManager getJobManager() {
if (instance == null) {
throw new ElasticsearchException("Tried to get job manager although Machine Learning is disabled");
}
return instance;
}
}
Loading

0 comments on commit 50950ce

Please sign in to comment.