Skip to content

Commit

Permalink
[ML] Prevent stack overflow while copying ML jobs and datafeeds (#36370)
Browse files Browse the repository at this point in the history
ML jobs and datafeeds wrap collections into their unmodifiable
equivalents in their constructor. However, the copying builder
does not make a copy of some of those collections resulting
in wrapping those again and again. This can eventually result
to stack overflow.

This commit addressed this issue by copying the collections in
question in the copying builder constructor.

Closes #36360
  • Loading branch information
dimitris-athanasiou committed Dec 7, 2018
1 parent 12d15dd commit ea7c8d4
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -286,11 +286,11 @@ public Builder(DatafeedConfig config) {
this.jobId = config.jobId;
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indices = config.indices;
this.types = config.types;
this.indices = config.indices == null ? null : new ArrayList<>(config.indices);
this.types = config.types == null ? null : new ArrayList<>(config.types);
this.query = config.query;
this.aggregations = config.aggregations;
this.scriptFields = config.scriptFields;
this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields);
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,10 @@
import org.elasticsearch.common.xcontent.XContentBuilder;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -449,7 +451,7 @@ public Builder(String id) {
public Builder(Job job) {
this.id = job.getId();
this.jobType = job.getJobType();
this.groups = job.getGroups();
this.groups = new ArrayList<>(job.getGroups());
this.description = job.getDescription();
this.analysisConfig = job.getAnalysisConfig();
this.analysisLimits = job.getAnalysisLimits();
Expand All @@ -463,7 +465,7 @@ public Builder(Job job) {
this.backgroundPersistInterval = job.getBackgroundPersistInterval();
this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays();
this.resultsRetentionDays = job.getResultsRetentionDays();
this.customSettings = job.getCustomSettings();
this.customSettings = job.getCustomSettings() == null ? null : new HashMap<>(job.getCustomSettings());
this.modelSnapshotId = job.getModelSnapshotId();
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
this.deleting = job.getDeleting();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -461,14 +462,14 @@ public Builder(DatafeedConfig config) {
this.jobId = config.jobId;
this.queryDelay = config.queryDelay;
this.frequency = config.frequency;
this.indices = config.indices;
this.types = config.types;
this.indices = new ArrayList<>(config.indices);
this.types = new ArrayList<>(config.types);
this.query = config.query;
this.aggregations = config.aggregations;
this.scriptFields = config.scriptFields;
this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields);
this.scrollSize = config.scrollSize;
this.chunkingConfig = config.chunkingConfig;
this.headers = config.headers;
this.headers = new HashMap<>(config.headers);
}

public void setId(String datafeedId) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -664,7 +665,7 @@ public Builder(Job job) {
this.id = job.getId();
this.jobType = job.getJobType();
this.jobVersion = job.getJobVersion();
this.groups = job.getGroups();
this.groups = new ArrayList<>(job.getGroups());
this.description = job.getDescription();
this.analysisConfig = job.getAnalysisConfig();
this.analysisLimits = job.getAnalysisLimits();
Expand All @@ -678,7 +679,7 @@ public Builder(Job job) {
this.backgroundPersistInterval = job.getBackgroundPersistInterval();
this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays();
this.resultsRetentionDays = job.getResultsRetentionDays();
this.customSettings = job.getCustomSettings();
this.customSettings = job.getCustomSettings() == null ? null : new HashMap<>(job.getCustomSettings());
this.modelSnapshotId = job.getModelSnapshotId();
this.resultsIndexName = job.getResultsIndexNameNoPrefix();
this.deleting = job.isDeleting();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
package org.elasticsearch.xpack.core.ml.datafeed;

import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;

import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
Expand Down Expand Up @@ -490,6 +489,13 @@ public void testDefaultFrequency_GivenAggregationsWithHistogramInterval_1_Hour()
assertEquals(TimeValue.timeValueHours(1), datafeed.defaultFrequency(TimeValue.timeValueHours(12)));
}

public void testCopyingDatafeedDoesNotCauseStackOverflow() {
DatafeedConfig datafeed = createTestInstance();
for (int i = 0; i < 100000; i++) {
datafeed = new DatafeedConfig.Builder(datafeed).build();
}
}

public static String randomValidDatafeedId() {
CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray());
return generator.ofCodePointsLength(random(), 10, 10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -580,6 +580,13 @@ public void testEarliestValidTimestamp_GivenDataCountsAndLatency() {
assertThat(builder.build().earliestValidTimestamp(dataCounts), equalTo(123455789L));
}

public void testCopyingJobDoesNotCauseStackOverflow() {
Job job = createRandomizedJob();
for (int i = 0; i < 100000; i++) {
job = new Job.Builder(job).build();
}
}

public static Job.Builder buildJobBuilder(String id, Date date) {
Job.Builder builder = new Job.Builder(id);
builder.setCreateTime(date);
Expand Down

0 comments on commit ea7c8d4

Please sign in to comment.