Skip to content
This repository has been archived by the owner on Aug 2, 2022. It is now read-only.

Support ES 7.10.0 #313

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,9 @@ jobs:
echo "RUN /usr/share/elasticsearch/bin/elasticsearch-plugin install --batch file:/tmp/opendistro-anomaly-detection-$plugin_version.zip" >> Dockerfile

docker build -t odfe-ad:test .
echo "::set-env name=imagePresent::true"
echo "imagePresent=true" >> $GITHUB_ENV
else
echo "::set-env name=imagePresent::false"
echo "imagePresent=false" >> $GITHUB_ENV
fi

- name: Run Docker Image
Expand All @@ -70,10 +70,10 @@ jobs:
if [ $security -gt 0 ]
then
echo "Security plugin is available"
./gradlew :integTestRunner --tests "com.amazon.opendistroforelasticsearch.ad.rest.*IT" --tests "com.amazon.opendistroforelasticsearch.ad.e2e.*IT" -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername="docker-cluster" -Dhttps=true -Duser=admin -Dpassword=admin -PnumNodes=2
./gradlew integTest -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername="docker-cluster" -Dhttps=true -Duser=admin -Dpassword=admin
else
echo "Security plugin is NOT available"
./gradlew :integTestRunner --tests "com.amazon.opendistroforelasticsearch.ad.rest.*IT" --tests "com.amazon.opendistroforelasticsearch.ad.e2e.*IT" -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername="docker-cluster" -PnumNodes=2
./gradlew integTest -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername="docker-cluster"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ES7.10 doesn't support PnumNodes ? How to test cluster with multiple nodes?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PnumNodes is used for local test, not for remote cluster.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need add new task for local testing with multi nodes

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually it never worked. I commented in the issue: #194. Let's use that to track the multi node testing changes

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Got it, let's fix in a separate PR

fi
- name: Upload Coverage Report
uses: codecov/codecov-action@v1
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/esad-cli-test-and-build-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ jobs:
exit 1
- uses: actions/checkout@v2
- name: golangci-lint
uses: golangci/golangci-lint-action@v1
uses: golangci/golangci-lint-action@v2.3.0
with:
version: v1.28
working-directory: cli
Expand Down
94 changes: 64 additions & 30 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,13 @@
* permissions and limitations under the License.
*/

import java.util.concurrent.Callable
import org.elasticsearch.gradle.test.RestIntegTestTask

buildscript {
ext {
es_group = "org.elasticsearch"
es_version = '7.9.1'
es_version = '7.10.0'
}

repositories {
Expand Down Expand Up @@ -44,7 +47,7 @@ repositories {
}

ext {
opendistroVersion = '1.11.0'
opendistroVersion = '1.12.0'
isSnapshot = "true" == System.getProperty("build.snapshot", "true")
}

Expand All @@ -53,6 +56,7 @@ version = "${opendistroVersion}.0"
apply plugin: 'java'
apply plugin: 'idea'
apply plugin: 'elasticsearch.esplugin'
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'base'
apply plugin: 'jacoco'
apply plugin: 'eclipse'
Expand Down Expand Up @@ -115,6 +119,7 @@ testingConventions.enabled = false
licenseHeaders.enabled = true
dependencyLicenses.enabled = false
thirdPartyAudit.enabled = false
loggerUsageCheck.enabled = false

// See package README.md for details on using these tasks.
def _numNodes = findProperty('numNodes') as Integer ?: 1
Expand All @@ -127,35 +132,50 @@ test {
systemProperty 'tests.security.manager', 'false'
}

task integTest(type: RestIntegTestTask) {
description = "Run tests against a cluster"
testClassesDirs = sourceSets.test.output.classesDirs
classpath = sourceSets.test.runtimeClasspath
}
tasks.named("check").configure { dependsOn(integTest) }

integTest {
runner {
systemProperty 'tests.security.manager', 'false'
systemProperty 'java.io.tmpdir', es_tmp_dir.absolutePath

systemProperty "https", System.getProperty("https")
systemProperty "user", System.getProperty("user")
systemProperty "password", System.getProperty("password")

// The 'doFirst' delays till execution time.
doFirst {
// Tell the test JVM if the cluster JVM is running under a debugger so that tests can
// use longer timeouts for requests.
def isDebuggingCluster = getDebug() || System.getProperty("test.debug") != null
systemProperty 'cluster.debug', isDebuggingCluster
// Set number of nodes system property to be used in tests
systemProperty 'cluster.number_of_nodes', "${_numNodes}"
// There seems to be an issue when running multi node run or integ tasks with unicast_hosts
// not being written, the waitForAllConditions ensures it's written
getClusters().forEach { cluster ->
cluster.waitForAllConditions()
}
dependsOn "bundlePlugin"
systemProperty 'tests.security.manager', 'false'
systemProperty 'java.io.tmpdir', es_tmp_dir.absolutePath

systemProperty "https", System.getProperty("https")
systemProperty "user", System.getProperty("user")
systemProperty "password", System.getProperty("password")

// Only rest case can run with remote cluster
if (System.getProperty("tests.rest.cluster") != null) {
filter {
includeTestsMatching "com.amazon.opendistroforelasticsearch.ad.rest.*IT"
includeTestsMatching "com.amazon.opendistroforelasticsearch.ad.e2e.*IT"
}
}

// The --debug-jvm command-line option makes the cluster debuggable; this makes the tests debuggable
if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
// The 'doFirst' delays till execution time.
doFirst {
// Tell the test JVM if the cluster JVM is running under a debugger so that tests can
// use longer timeouts for requests.
def isDebuggingCluster = getDebug() || System.getProperty("test.debug") != null
systemProperty 'cluster.debug', isDebuggingCluster
// Set number of nodes system property to be used in tests
systemProperty 'cluster.number_of_nodes', "${_numNodes}"
// There seems to be an issue when running multi node run or integ tasks with unicast_hosts
// not being written, the waitForAllConditions ensures it's written
getClusters().forEach { cluster ->
cluster.waitForAllConditions()
}
}

// The --debug-jvm command-line option makes the cluster debuggable; this makes the tests debuggable
if (System.getProperty("test.debug") != null) {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

}

testClusters.integTest {
Expand All @@ -172,7 +192,19 @@ testClusters.integTest {
debugPort += 1
}
}
plugin(fileTree("src/test/resources/job-scheduler").getSingleFile())
plugin(project.tasks.bundlePlugin.archiveFile)

plugin(provider(new Callable<RegularFile>(){
@Override
RegularFile call() throws Exception {
return new RegularFile() {
@Override
File getAsFile() {
return fileTree("src/test/resources/job-scheduler").getSingleFile()
}
}
}
}))

// As of ES 7.7.0 the opendistro-anomaly-detection plugin is being added to the list of plugins for the testCluster during build before
// the opendistro-job-scheduler plugin, which is causing build failures. From the stack trace, this looks like a bug.
Expand All @@ -188,7 +220,7 @@ testClusters.integTest {
// ./bin/elasticsearch-plugin install --batch file:opendistro-anomaly-detection.zip file:opendistro-job-scheduler.zip
//
// After 7.7:
// ./bin/elasticsearch-plugin install --batch file:opendistro-job-scheduler.zip file:opendistro-anomaly-detection.zip
// ./bin/elasticsearch-plugin install --batch file:opendistro-job-scheduler.zip file:opendistro-anomaly-detection.zip
//
// A temporary hack is to reorder the plugins list after evaluation but prior to task execution when the plugins are installed.
nodes.each { node ->
Expand All @@ -207,6 +239,8 @@ run {
cluster.waitForAllConditions()
}
}

useCluster testClusters.integTest
weicongs-amazon marked this conversation as resolved.
Show resolved Hide resolved
}

evaluationDependsOnChildren()
Expand Down Expand Up @@ -311,8 +345,8 @@ checkstyle {
dependencies {
compile "org.elasticsearch:elasticsearch:${es_version}"
compileOnly "org.elasticsearch.plugin:elasticsearch-scripting-painless-spi:${versions.elasticsearch}"
compileOnly "com.amazon.opendistroforelasticsearch:opendistro-job-scheduler-spi:1.11.0.0"
compile "com.amazon.opendistroforelasticsearch:common-utils:1.11.0.1"
compileOnly "com.amazon.opendistroforelasticsearch:opendistro-job-scheduler-spi:1.12.0.0"
compile "com.amazon.opendistroforelasticsearch:common-utils:1.12.0.0"
compile group: 'com.google.guava', name: 'guava', version:'29.0-jre'
compile group: 'org.apache.commons', name: 'commons-math3', version: '3.6.1'
compile group: 'com.google.code.gson', name: 'gson', version: '2.8.6'
Expand Down
2 changes: 1 addition & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
## Version 1.12.0.0 Release Notes

Compatible with Elasticsearch 7.10.0

### Enhancements

* Improve profile API ([#298](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/298))
* Add checkpoint index retention for multi entity detector ([#283](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/283))
* Stashing context for Stats API to allow users to query from RestAPI ([#300](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/300))
* add HC detector request/failure stats ([#307](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/307))

### Bug Fixes

* Fix edge case where entities found for preview is empty ([#296](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/296))
* fix null user in detector ([#301](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/301))
* fix fatal error of missing method parseString ([#302](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/302))
* remove clock Guice binding ([#305](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/305))
* filter out empty value for entity features ([#306](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/306))
* Fix for upgrading mapping ([#309](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/309))
* fix double nan error when parse to json ([#310](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/310))
* Fix issue where data hole exists for Preview API ([#312](https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/312))
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ private void stopAdJob(String detectorId) {
.xContent()
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, response.getSourceAsString())
) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser);
if (job.isEnabled()) {
AnomalyDetectorJob newJob = new AnomalyDetectorJob(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -629,7 +629,7 @@ public ScheduledJobRunner getJobRunner() {
@Override
public ScheduledJobParser getJobParser() {
return (parser, id, jobDocVersion) -> {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
return AnomalyDetectorJob.parse(parser);
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ private void calculateTotalResponsesToWait(
.xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getDetectorResponse.getSourceAsString())
) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, xContentParser.nextToken(), xContentParser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, xContentParser.nextToken(), xContentParser);
AnomalyDetector detector = AnomalyDetector.parse(xContentParser, detectorId);
boolean isMultiEntityDetector = detector.isMultientityDetector();

Expand Down Expand Up @@ -186,7 +186,7 @@ private void prepareProfile(
.xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString())
) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser);
long enabledTimeMs = job.getEnabledTime().toEpochMilli();

Expand Down Expand Up @@ -325,7 +325,7 @@ private ActionListener<GetResponse> onGetDetectorState(
.xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString())
) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
DetectorInternalState detectorState = DetectorInternalState.parse(parser);
long lastUpdateTimeMs = detectorState.getLastUpdateTime().toEpochMilli();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public void profile(
.xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString())
) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
AnomalyDetector detector = AnomalyDetector.parse(parser, detectorId);
List<String> categoryField = detector.getCategoryField();
if (categoryField == null || categoryField.size() == 0) {
Expand Down Expand Up @@ -181,7 +181,7 @@ private void getJob(
.xContent()
.createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString())
) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser);

if (profilesToCollect.contains(EntityProfileName.MODELS)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ private ActionListener<GetResponse> onGetDetectorResponse(String adID, ActionLis
try (
XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, xc)
) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
AnomalyDetector detector = AnomalyDetector.parse(parser, response.getId());
// end execution if all features are disabled
if (detector.getEnabledFeatureIds().isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,7 @@ protected Cancellable getHourlyCron() {
return hourlyCron;
}

@Override
public String executorName() {
private String executorName() {
weicongs-amazon marked this conversation as resolved.
Show resolved Hide resolved
return ThreadPool.Names.GENERIC;
weicongs-amazon marked this conversation as resolved.
Show resolved Hide resolved
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -439,8 +439,7 @@ public void offMaster() {
}
}

@Override
public String executorName() {
private String executorName() {
weicongs-amazon marked this conversation as resolved.
Show resolved Hide resolved
return ThreadPool.Names.MANAGEMENT;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
xContentBuilder.field(UI_METADATA_FIELD, uiMetadata);
}
if (lastUpdateTime != null) {
xContentBuilder.timeField(LAST_UPDATE_TIME_FIELD, LAST_UPDATE_TIME_FIELD, lastUpdateTime.toEpochMilli());
xContentBuilder.field(LAST_UPDATE_TIME_FIELD, lastUpdateTime.toEpochMilli());
}
if (categoryFields != null) {
xContentBuilder.field(CATEGORY_FIELD, categoryFields.toArray());
Expand Down Expand Up @@ -343,7 +343,7 @@ public static AnomalyDetector parse(

List<String> categoryField = null;

ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
String fieldName = parser.currentName();
parser.nextToken();
Expand All @@ -359,7 +359,7 @@ public static AnomalyDetector parse(
timeField = parser.text();
break;
case INDICES_FIELD:
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
indices.add(parser.text());
}
Expand All @@ -371,7 +371,7 @@ public static AnomalyDetector parse(
schemaVersion = parser.intValue();
break;
case FILTER_QUERY_FIELD:
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
try {
filterQuery = parseInnerQueryBuilder(parser);
} catch (IllegalArgumentException e) {
Expand All @@ -384,7 +384,7 @@ public static AnomalyDetector parse(
detectionInterval = TimeConfiguration.parse(parser);
break;
case FEATURE_ATTRIBUTES_FIELD:
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
features.add(Feature.parse(parser));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public static AnomalyDetectorExecutionInput parse(XContentParser parser, String
Instant periodEnd = null;
AnomalyDetector detector = null;

ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
String fieldName = parser.currentName();
parser.nextToken();
Expand Down
Loading