, AutoCloseable {
@@ -33,15 +31,24 @@ public VersionPropertiesBuildService(ProviderFactory providerFactory) {
try {
File propertiesInputFile = new File(infoPath, "version.properties");
properties = VersionPropertiesLoader.loadBuildSrcVersion(propertiesInputFile, providerFactory);
- properties.computeIfAbsent("minimumJava", s -> resolveMinimumJavaVersion(infoPath));
+ properties.computeIfAbsent("minimumRuntimeJava", s -> resolveMinimumRuntimeJavaVersion(infoPath));
+ properties.computeIfAbsent("minimumCompilerJava", s -> resolveMinimumCompilerJavaVersion(infoPath));
} catch (IOException e) {
throw new GradleException("Cannot load VersionPropertiesBuildService", e);
}
}
- private JavaVersion resolveMinimumJavaVersion(File infoPath) {
+ private JavaVersion resolveMinimumRuntimeJavaVersion(File infoPath) {
+ return resolveJavaVersion(infoPath, "src/main/resources/minimumRuntimeVersion");
+ }
+
+ private JavaVersion resolveMinimumCompilerJavaVersion(File infoPath) {
+ return resolveJavaVersion(infoPath, "src/main/resources/minimumCompilerVersion");
+ }
+
+ private JavaVersion resolveJavaVersion(File infoPath, String path) {
final JavaVersion minimumJavaVersion;
- File minimumJavaInfoSource = new File(infoPath, "src/main/resources/minimumRuntimeVersion");
+ File minimumJavaInfoSource = new File(infoPath, path);
try {
String versionString = FileUtils.readFileToString(minimumJavaInfoSource);
minimumJavaVersion = JavaVersion.toVersion(versionString);
diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java
new file mode 100644
index 0000000000000..09d32d79a508c
--- /dev/null
+++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java
@@ -0,0 +1,335 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.gradle.internal.conventions.precommit;
+
+import com.diffplug.gradle.spotless.SpotlessExtension;
+import com.diffplug.gradle.spotless.SpotlessPlugin;
+
+import org.gradle.api.Plugin;
+import org.gradle.api.Project;
+
+import java.util.List;
+
+/**
+ * This plugin configures formatting for Java source using Spotless
+ * for Gradle. Since the act of formatting existing source can interfere
+ * with developers' workflows, we don't automatically format all code
+ * (yet). Instead, we maintain a list of projects that are excluded from
+ * formatting, until we reach a point where we can comfortably format them
+ * in one go without too much disruption.
+ *
+ * Any new sub-projects must not be added to the exclusions list!
+ *
+ *
To perform a reformat, run:
+ *
+ *
./gradlew spotlessApply
+ *
+ * To check the current format, run:
+ *
+ *
./gradlew spotlessJavaCheck
+ *
+ * This is also carried out by the `precommit` task.
+ *
+ *
See also the Spotless project page.
+ */
+public class FormattingPrecommitPlugin implements Plugin {
+
+ @Override
+ public void apply(Project project) {
+ final boolean shouldFormatProject = PROJECT_PATHS_TO_EXCLUDE.contains(project.getPath()) == false
+ || project.getProviders().systemProperty("es.format.everything").forUseAtConfigurationTime().isPresent();
+
+ if (shouldFormatProject) {
+ project.getPlugins().apply(PrecommitTaskPlugin.class);
+ project.getPlugins().apply(SpotlessPlugin.class);
+
+ project.getExtensions().getByType(SpotlessExtension.class).java(java -> {
+ String importOrderPath = "build-conventions/elastic.importorder";
+ String formatterConfigPath = "build-conventions/formatterConfig.xml";
+
+ // When applied to e.g. `:build-tools`, we need to modify the path to our config files
+ if (project.getRootProject().file(importOrderPath).exists() == false) {
+ importOrderPath = "../" + importOrderPath;
+ formatterConfigPath = "../" + formatterConfigPath;
+ }
+
+ java.target(getTargets(project.getPath()));
+
+ // Use `@formatter:off` and `@formatter:on` to toggle formatting - ONLY IF STRICTLY NECESSARY
+ java.toggleOffOn("@formatter:off", "@formatter:on");
+
+ java.removeUnusedImports();
+
+ // We enforce a standard order for imports
+ java.importOrderFile(project.getRootProject().file(importOrderPath));
+
+ // Most formatting is done through the Eclipse formatter
+ java.eclipse().configFile(project.getRootProject().file(formatterConfigPath));
+
+ // Ensure blank lines are actually empty. Since formatters are applied in
+ // order, apply this one last, otherwise non-empty blank lines can creep
+ // in.
+ java.trimTrailingWhitespace();
+ });
+
+ project.getTasks().named("precommit").configure(precommitTask -> precommitTask.dependsOn("spotlessJavaCheck"));
+ }
+ }
+
+ @SuppressWarnings("CheckStyle")
+ private Object[] getTargets(String projectPath) {
+ if (projectPath.equals(":server")) {
+ return new String[] {
+ "src/*/java/org/elasticsearch/action/admin/cluster/repositories/**/*.java",
+ "src/*/java/org/elasticsearch/action/admin/cluster/snapshots/**/*.java",
+ "src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java",
+ "src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java",
+ "src/*/java/org/elasticsearch/index/IndexMode.java",
+ "src/*/java/org/elasticsearch/index/IndexRouting.java",
+ "src/*/java/org/elasticsearch/index/TimeSeriesModeTests.java",
+ "src/*/java/org/elasticsearch/index/snapshots/**/*.java",
+ "src/*/java/org/elasticsearch/repositories/**/*.java",
+ "src/*/java/org/elasticsearch/search/aggregations/**/*.java",
+ "src/*/java/org/elasticsearch/snapshots/**/*.java" };
+ } else {
+ // Normally this isn"t necessary, but we have Java sources in
+ // non-standard places
+ return new String[] { "src/**/*.java" };
+ }
+ }
+
+ // Do not add new sub-projects here!
+ private static final List PROJECT_PATHS_TO_EXCLUDE = List.of(
+ ":client:benchmark",
+ ":client:client-benchmark-noop-api-plugin",
+ ":client:rest",
+ ":client:rest-high-level",
+ ":client:rest-high-level:qa:ssl-enabled",
+ ":client:sniffer",
+ ":client:test",
+ ":distribution:archives:integ-test-zip",
+ ":distribution:docker",
+ ":docs",
+ ":example-plugins:custom-settings",
+ ":example-plugins:custom-significance-heuristic",
+ ":example-plugins:custom-suggester",
+ ":example-plugins:painless-whitelist",
+ ":example-plugins:rescore",
+ ":example-plugins:rest-handler",
+ ":example-plugins:script-expert-scoring",
+ ":example-plugins:security-authorization-engine",
+ ":libs:elasticsearch-cli",
+ ":libs:elasticsearch-core",
+ ":libs:elasticsearch-dissect",
+ ":libs:elasticsearch-geo",
+ ":libs:elasticsearch-grok",
+ ":libs:elasticsearch-lz4",
+ ":libs:elasticsearch-nio",
+ ":libs:elasticsearch-plugin-classloader",
+ ":libs:elasticsearch-secure-sm",
+ ":libs:elasticsearch-ssl-config",
+ ":libs:elasticsearch-x-content",
+ ":modules:analysis-common",
+ ":modules:ingest-common",
+ ":modules:ingest-geoip",
+ ":modules:ingest-geoip:qa:file-based-update",
+ ":modules:ingest-user-agent",
+ ":modules:lang-expression",
+ ":modules:lang-mustache",
+ ":modules:lang-painless",
+ ":modules:lang-painless:spi",
+ ":modules:mapper-extras",
+ ":modules:parent-join",
+ ":modules:percolator",
+ ":modules:rank-eval",
+ ":modules:reindex",
+ ":modules:repository-url",
+ ":modules:systemd",
+ ":modules:tasks",
+ ":modules:transport-netty4",
+ ":plugins:analysis-icu",
+ ":plugins:analysis-kuromoji",
+ ":plugins:analysis-nori",
+ ":plugins:analysis-phonetic",
+ ":plugins:analysis-smartcn",
+ ":plugins:analysis-stempel",
+ ":plugins:analysis-ukrainian",
+ ":plugins:discovery-azure-classic",
+ ":plugins:discovery-ec2",
+ ":plugins:discovery-ec2:qa:amazon-ec2",
+ ":plugins:discovery-gce",
+ ":plugins:discovery-gce:qa:gce",
+ ":plugins:ingest-attachment",
+ ":plugins:mapper-annotated-text",
+ ":plugins:mapper-murmur3",
+ ":plugins:mapper-size",
+ ":plugins:repository-azure",
+ ":plugins:repository-gcs",
+ ":plugins:repository-hdfs",
+ ":plugins:repository-hdfs:hadoop-common",
+ ":plugins:repository-s3",
+ ":plugins:store-smb",
+ ":plugins:transport-nio",
+ ":qa:ccs-rolling-upgrade-remote-cluster",
+ ":qa:ccs-unavailable-clusters",
+ ":qa:die-with-dignity",
+ ":qa:evil-tests",
+ ":qa:full-cluster-restart",
+ ":qa:logging-config",
+ ":qa:mixed-cluster",
+ ":qa:multi-cluster-search",
+ ":qa:no-bootstrap-tests",
+ ":qa:remote-clusters",
+ ":qa:repository-multi-version",
+ ":qa:rolling-upgrade",
+ ":qa:smoke-test-http",
+ ":qa:smoke-test-ingest-with-all-dependencies",
+ ":qa:smoke-test-multinode",
+ ":qa:smoke-test-plugins",
+ ":qa:snapshot-based-recoveries",
+ ":qa:snapshot-based-recoveries:azure",
+ ":qa:snapshot-based-recoveries:fs",
+ ":qa:snapshot-based-recoveries:gcs",
+ ":qa:snapshot-based-recoveries:s3",
+ ":qa:verify-version-constants",
+ ":rest-api-spec",
+ ":test:fixtures:geoip-fixture",
+ ":test:fixtures:krb5kdc-fixture",
+ ":test:fixtures:old-elasticsearch",
+ ":test:framework",
+ ":test:logger-usage",
+ ":x-pack:docs",
+ ":x-pack:license-tools",
+ ":x-pack:plugin",
+ ":x-pack:plugin:async-search",
+ ":x-pack:plugin:async-search:qa",
+ ":x-pack:plugin:async-search:qa:security",
+ ":x-pack:plugin:autoscaling:qa:rest",
+ ":x-pack:plugin:ccr",
+ ":x-pack:plugin:ccr:qa",
+ ":x-pack:plugin:ccr:qa:downgrade-to-basic-license",
+ ":x-pack:plugin:ccr:qa:multi-cluster",
+ ":x-pack:plugin:ccr:qa:non-compliant-license",
+ ":x-pack:plugin:ccr:qa:rest",
+ ":x-pack:plugin:ccr:qa:restart",
+ ":x-pack:plugin:ccr:qa:security",
+ ":x-pack:plugin:core",
+ ":x-pack:plugin:data-streams:qa:multi-node",
+ ":x-pack:plugin:data-streams:qa:rest",
+ ":x-pack:plugin:deprecation",
+ ":x-pack:plugin:enrich:qa:common",
+ ":x-pack:plugin:enrich:qa:rest",
+ ":x-pack:plugin:enrich:qa:rest-with-advanced-security",
+ ":x-pack:plugin:enrich:qa:rest-with-security",
+ ":x-pack:plugin:eql",
+ ":x-pack:plugin:eql:qa",
+ ":x-pack:plugin:eql:qa:common",
+ ":x-pack:plugin:eql:qa:mixed-node",
+ ":x-pack:plugin:eql:qa:multi-cluster-with-security",
+ ":x-pack:plugin:eql:qa:rest",
+ ":x-pack:plugin:eql:qa:security",
+ ":x-pack:plugin:fleet:qa:rest",
+ ":x-pack:plugin:graph",
+ ":x-pack:plugin:graph:qa:with-security",
+ ":x-pack:plugin:identity-provider",
+ ":x-pack:plugin:identity-provider:qa:idp-rest-tests",
+ ":x-pack:plugin:ilm",
+ ":x-pack:plugin:ilm:qa:multi-cluster",
+ ":x-pack:plugin:ilm:qa:multi-node",
+ ":x-pack:plugin:ilm:qa:rest",
+ ":x-pack:plugin:ilm:qa:with-security",
+ ":x-pack:plugin:mapper-constant-keyword",
+ ":x-pack:plugin:mapper-flattened",
+ ":x-pack:plugin:ml",
+ ":x-pack:plugin:ml:qa:basic-multi-node",
+ ":x-pack:plugin:ml:qa:disabled",
+ ":x-pack:plugin:ml:qa:ml-with-security",
+ ":x-pack:plugin:ml:qa:native-multi-node-tests",
+ ":x-pack:plugin:ml:qa:no-bootstrap-tests",
+ ":x-pack:plugin:ml:qa:single-node-tests",
+ ":x-pack:plugin:monitoring",
+ ":x-pack:plugin:ql",
+ ":x-pack:plugin:repository-encrypted:qa:azure",
+ ":x-pack:plugin:repository-encrypted:qa:gcs",
+ ":x-pack:plugin:repository-encrypted:qa:s3",
+ ":x-pack:plugin:rollup:qa:rest",
+ ":x-pack:plugin:search-business-rules",
+ ":x-pack:plugin:searchable-snapshots:qa:rest",
+ ":x-pack:plugin:security",
+ ":x-pack:plugin:security:cli",
+ ":x-pack:plugin:security:qa:basic-enable-security",
+ ":x-pack:plugin:security:qa:security-basic",
+ ":x-pack:plugin:security:qa:security-disabled",
+ ":x-pack:plugin:security:qa:security-not-enabled",
+ ":x-pack:plugin:security:qa:security-trial",
+ ":x-pack:plugin:security:qa:service-account",
+ ":x-pack:plugin:security:qa:smoke-test-all-realms",
+ ":x-pack:plugin:security:qa:tls-basic",
+ ":x-pack:plugin:shutdown:qa:multi-node",
+ ":x-pack:plugin:snapshot-repo-test-kit:qa:rest",
+ ":x-pack:plugin:spatial",
+ ":x-pack:plugin:sql",
+ ":x-pack:plugin:sql:jdbc",
+ ":x-pack:plugin:sql:qa",
+ ":x-pack:plugin:sql:qa:jdbc",
+ ":x-pack:plugin:sql:qa:jdbc:security",
+ ":x-pack:plugin:sql:qa:mixed-node",
+ ":x-pack:plugin:sql:qa:security",
+ ":x-pack:plugin:sql:qa:server:multi-node",
+ ":x-pack:plugin:sql:qa:server:single-node",
+ ":x-pack:plugin:sql:sql-action",
+ ":x-pack:plugin:sql:sql-cli",
+ ":x-pack:plugin:sql:sql-client",
+ ":x-pack:plugin:sql:sql-proto",
+ ":x-pack:plugin:stack:qa:rest",
+ ":x-pack:plugin:text-structure:qa:text-structure-with-security",
+ ":x-pack:plugin:transform",
+ ":x-pack:plugin:transform:qa:multi-cluster-tests-with-security",
+ ":x-pack:plugin:transform:qa:multi-node-tests",
+ ":x-pack:plugin:transform:qa:single-node-tests",
+ ":x-pack:plugin:vector-tile:qa:multi-cluster",
+ ":x-pack:plugin:vectors",
+ ":x-pack:plugin:watcher",
+ ":x-pack:plugin:watcher:qa:rest",
+ ":x-pack:plugin:watcher:qa:with-monitoring",
+ ":x-pack:plugin:watcher:qa:with-security",
+ ":x-pack:plugin:wildcard",
+ ":x-pack:qa",
+ ":x-pack:qa:core-rest-tests-with-security",
+ ":x-pack:qa:evil-tests",
+ ":x-pack:qa:full-cluster-restart",
+ ":x-pack:qa:kerberos-tests",
+ ":x-pack:qa:mixed-tier-cluster",
+ ":x-pack:qa:multi-cluster-search-security",
+ ":x-pack:qa:multi-node",
+ ":x-pack:qa:oidc-op-tests",
+ ":x-pack:qa:openldap-tests",
+ ":x-pack:qa:password-protected-keystore",
+ ":x-pack:qa:reindex-tests-with-security",
+ ":x-pack:qa:rolling-upgrade",
+ ":x-pack:qa:rolling-upgrade-multi-cluster",
+ ":x-pack:qa:runtime-fields:core-with-mapped",
+ ":x-pack:qa:runtime-fields:core-with-search",
+ ":x-pack:qa:runtime-fields:with-security",
+ ":x-pack:qa:saml-idp-tests",
+ ":x-pack:qa:security-example-spi-extension",
+ ":x-pack:qa:security-setup-password-tests",
+ ":x-pack:qa:security-tools-tests",
+ ":x-pack:qa:smoke-test-plugins",
+ ":x-pack:qa:smoke-test-plugins-ssl",
+ ":x-pack:qa:smoke-test-security-with-mustache",
+ ":x-pack:qa:third-party:active-directory",
+ ":x-pack:qa:third-party:jira",
+ ":x-pack:qa:third-party:pagerduty",
+ ":x-pack:qa:third-party:slack",
+ ":x-pack:test:idp-fixture",
+ ":x-pack:test:smb-fixture"
+ );
+}
diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle
index 33046d6097c80..f4f4677a8a5e5 100644
--- a/build-tools-internal/build.gradle
+++ b/build-tools-internal/build.gradle
@@ -6,13 +6,6 @@
* Side Public License, v 1.
*/
-
-import org.elasticsearch.gradle.internal.conventions.VersionPropertiesLoader
-import org.apache.tools.ant.taskdefs.condition.Os
-import org.gradle.plugins.ide.eclipse.model.AccessRule
-import org.gradle.plugins.ide.eclipse.model.SourceFolder
-import org.gradle.plugins.ide.eclipse.model.ProjectDependency
-
plugins {
id 'java-gradle-plugin'
id 'groovy-gradle-plugin'
@@ -20,6 +13,7 @@ plugins {
id 'elasticsearch.build-tools'
id 'elasticsearch.eclipse'
id 'elasticsearch.versions'
+ id 'elasticsearch.formatting'
}
group = 'org.elasticsearch.gradle'
@@ -169,7 +163,7 @@ gradlePlugin {
* Java version *
*****************************************************************************/
-def minCompilerJava = versions.get("minimumJava")
+def minCompilerJava = versions.get("minimumCompilerJava")
targetCompatibility = minCompilerJava
sourceCompatibility = minCompilerJava
@@ -234,13 +228,17 @@ dependencies {
compileOnly "com.puppycrawl.tools:checkstyle:${versions.getProperty('checkstyle')}"
runtimeOnly "org.elasticsearch.gradle:reaper:$version"
testImplementation "com.puppycrawl.tools:checkstyle:${versions.getProperty('checkstyle')}"
- testImplementation "junit:junit:${versions.getProperty('junit')}"
+// testImplementation "junit:junit:${versions.getProperty('junit')}"
testImplementation 'com.github.tomakehurst:wiremock-jre8-standalone:2.23.2'
testImplementation 'org.mockito:mockito-core:1.9.5'
testImplementation "org.hamcrest:hamcrest:${versions.getProperty('hamcrest')}"
testImplementation testFixtures("org.elasticsearch.gradle:build-tools:$version")
+ testImplementation(platform("org.junit:junit-bom:${versions.getProperty('junit5')}"))
+ testImplementation("org.junit.jupiter:junit-jupiter") {
+ because 'allows to write and run Jupiter tests'
+ }
integTestImplementation(platform("org.junit:junit-bom:${versions.getProperty('junit5')}"))
integTestImplementation("org.junit.jupiter:junit-jupiter") {
because 'allows to write and run Jupiter tests'
@@ -251,7 +249,7 @@ dependencies {
testRuntimeOnly("org.junit.vintage:junit-vintage-engine") {
because 'allows JUnit 3 and JUnit 4 tests to run'
}
- integTestRuntimeOnly("org.junit.platform:junit-platform-launcher") {
+ testRuntimeOnly("org.junit.platform:junit-platform-launcher") {
because 'allows tests to run from IDEs that bundle older version of launcher'
}
@@ -265,9 +263,13 @@ dependencies {
}
// required as we rely on junit4 rules
integTestImplementation "org.spockframework:spock-junit4"
+ testImplementation "org.spockframework:spock-junit4"
integTestImplementation "org.xmlunit:xmlunit-core:2.8.2"
}
+tasks.named('test').configure {
+ useJUnitPlatform()
+}
tasks.register("integTest", Test) {
inputs.dir(file("src/testKit")).withPropertyName("testkit dir").withPathSensitivity(PathSensitivity.RELATIVE)
systemProperty 'test.version_under_test', version
diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy
index a03f6ef8c9e79..7aa1adbc4865f 100644
--- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy
+++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy
@@ -66,18 +66,16 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF
"""
when:
def result = gradleRunner(":distribution:bwc:${bwcProject}:buildBwcDarwinTar",
- ":distribution:bwc:${bwcProject}:buildBwcOssDarwinTar",
+ ":distribution:bwc:${bwcProject}:buildBwcDarwinTar",
"-DtestRemoteRepo=" + remoteGitRepo,
"-Dbwc.remote=origin",
"-Dbwc.dist.version=${bwcDistVersion}-SNAPSHOT")
.build()
then:
result.task(":distribution:bwc:${bwcProject}:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS
- result.task(":distribution:bwc:${bwcProject}:buildBwcOssDarwinTar").outcome == TaskOutcome.SUCCESS
and: "assemble task triggered"
assertOutputContains(result.output, "[$bwcDistVersion] > Task :distribution:archives:darwin-tar:${expectedAssembleTaskName}")
- assertOutputContains(result.output, "[$bwcDistVersion] > Task :distribution:archives:oss-darwin-tar:${expectedAssembleTaskName}")
where:
bwcDistVersion | bwcProject | expectedAssembleTaskName
diff --git a/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/BuildPluginIT.java b/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/BuildPluginIT.java
index 06288f6af10a4..94a08f27d0be0 100644
--- a/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/BuildPluginIT.java
+++ b/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/BuildPluginIT.java
@@ -8,7 +8,6 @@
package org.elasticsearch.gradle.internal;
import org.apache.commons.io.IOUtils;
-import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.internal.test.GradleIntegrationTestCase;
import org.gradle.testkit.runner.BuildResult;
import org.junit.Rule;
diff --git a/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/test/TestClasspathUtils.java b/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/test/TestClasspathUtils.java
index 28dbb48e1a00f..ef768173bf28f 100644
--- a/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/test/TestClasspathUtils.java
+++ b/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/internal/test/TestClasspathUtils.java
@@ -8,8 +8,6 @@
package org.elasticsearch.gradle.internal.test;
-import java.io.File;
-import java.io.IOException;
import net.bytebuddy.ByteBuddy;
import net.bytebuddy.description.modifier.Ownership;
import net.bytebuddy.description.modifier.Visibility;
@@ -21,6 +19,9 @@
import net.bytebuddy.implementation.Implementation;
import net.bytebuddy.implementation.bytecode.ByteCodeAppender;
+import java.io.File;
+import java.io.IOException;
+
import static org.junit.Assert.fail;
public class TestClasspathUtils {
@@ -30,18 +31,16 @@ public static void setupJarJdkClasspath(File projectRoot) {
}
public static void setupJarJdkClasspath(File projectRoot, String errorMessage) {
- generateJdkJarHellCheck(projectRoot,
- ExceptionMethod.throwing(IllegalStateException.class, errorMessage));
+ generateJdkJarHellCheck(projectRoot, ExceptionMethod.throwing(IllegalStateException.class, errorMessage));
}
private static void generateJdkJarHellCheck(File projectRoot, Implementation mainImplementation) {
- DynamicType.Unloaded> dynamicType = new ByteBuddy()
- .subclass(Object.class)
- .name("org.elasticsearch.jdk.JdkJarHellCheck")
- .defineMethod("main", void.class, Visibility.PUBLIC, Ownership.STATIC)
- .withParameters(String[].class)
- .intercept(mainImplementation)
- .make();
+ DynamicType.Unloaded> dynamicType = new ByteBuddy().subclass(Object.class)
+ .name("org.elasticsearch.jdk.JdkJarHellCheck")
+ .defineMethod("main", void.class, Visibility.PUBLIC, Ownership.STATIC)
+ .withParameters(String[].class)
+ .intercept(mainImplementation)
+ .make();
try {
dynamicType.toJar(targetFile(projectRoot));
} catch (IOException e) {
@@ -52,15 +51,14 @@ private static void generateJdkJarHellCheck(File projectRoot, Implementation mai
private static File targetFile(File projectRoot) {
File targetFile = new File(
- projectRoot,
- "sample_jars/build/testrepo/org/elasticsearch/elasticsearch-core/current/elasticsearch-core-current.jar"
+ projectRoot,
+ "sample_jars/build/testrepo/org/elasticsearch/elasticsearch-core/current/elasticsearch-core-current.jar"
);
targetFile.getParentFile().mkdirs();
return targetFile;
}
-
private static class InconsistentParameterReferenceMethod implements net.bytebuddy.implementation.Implementation {
@Override
public ByteCodeAppender appender(Target implementationTarget) {
diff --git a/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/jarhell/ThirdPartyAuditTaskIT.java b/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/jarhell/ThirdPartyAuditTaskIT.java
index 5a3a968c3f6a8..71cf615d7db0a 100644
--- a/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/jarhell/ThirdPartyAuditTaskIT.java
+++ b/build-tools-internal/src/integTest/java/org/elasticsearch/gradle/jarhell/ThirdPartyAuditTaskIT.java
@@ -30,13 +30,13 @@ public void setUp() throws Exception {
public void testElasticsearchIgnored() {
BuildResult result = getGradleRunner().withArguments(
- ":clean",
- ":empty",
- "-s",
- "-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j",
- "-PcompileOnlyVersion=0.0.1",
- "-PcompileGroup=elasticsearch.gradle:dummy-io",
- "-PcompileVersion=0.0.1"
+ ":clean",
+ ":empty",
+ "-s",
+ "-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j",
+ "-PcompileOnlyVersion=0.0.1",
+ "-PcompileGroup=elasticsearch.gradle:dummy-io",
+ "-PcompileVersion=0.0.1"
).build();
assertTaskNoSource(result, ":empty");
assertNoDeprecationWarning(result);
@@ -46,13 +46,13 @@ public void testViolationFoundAndCompileOnlyIgnored() {
setupJarJdkClasspath(getProjectDir());
BuildResult result = getGradleRunner().withArguments(
- ":clean",
- ":absurd",
- "-s",
- "-PcompileOnlyGroup=other.gradle:broken-log4j",
- "-PcompileOnlyVersion=0.0.1",
- "-PcompileGroup=other.gradle:dummy-io",
- "-PcompileVersion=0.0.1"
+ ":clean",
+ ":absurd",
+ "-s",
+ "-PcompileOnlyGroup=other.gradle:broken-log4j",
+ "-PcompileOnlyVersion=0.0.1",
+ "-PcompileGroup=other.gradle:dummy-io",
+ "-PcompileVersion=0.0.1"
).buildAndFail();
assertTaskFailed(result, ":absurd");
@@ -64,47 +64,47 @@ public void testViolationFoundAndCompileOnlyIgnored() {
public void testClassNotFoundAndCompileOnlyIgnored() {
setupJarJdkClasspath(getProjectDir());
BuildResult result = getGradleRunner().withArguments(
- ":clean",
- ":absurd",
- "-s",
- "-PcompileGroup=other.gradle:broken-log4j",
- "-PcompileVersion=0.0.1",
- "-PcompileOnlyGroup=other.gradle:dummy-io",
- "-PcompileOnlyVersion=0.0.1"
+ ":clean",
+ ":absurd",
+ "-s",
+ "-PcompileGroup=other.gradle:broken-log4j",
+ "-PcompileVersion=0.0.1",
+ "-PcompileOnlyGroup=other.gradle:dummy-io",
+ "-PcompileOnlyVersion=0.0.1"
).buildAndFail();
assertTaskFailed(result, ":absurd");
assertOutputContains(
- result.getOutput(),
- "Missing classes:",
- " * org.apache.logging.log4j.LogManager",
- "> Audit of third party dependencies failed"
+ result.getOutput(),
+ "Missing classes:",
+ " * org.apache.logging.log4j.LogManager",
+ "> Audit of third party dependencies failed"
);
assertOutputMissing(result.getOutput(), "Classes with violations:");
assertNoDeprecationWarning(result);
}
public void testJarHellWithJDK() {
- setupJarJdkClasspath(getProjectDir(), "> Audit of third party dependencies failed:" +
- " Jar Hell with the JDK:" +
- " * java.lang.String"
+ setupJarJdkClasspath(
+ getProjectDir(),
+ "> Audit of third party dependencies failed:" + " Jar Hell with the JDK:" + " * java.lang.String"
);
BuildResult result = getGradleRunner().withArguments(
- ":clean",
- ":absurd",
- "-s",
- "-PcompileGroup=other.gradle:jarhellJdk",
- "-PcompileVersion=0.0.1",
- "-PcompileOnlyGroup=other.gradle:dummy-io",
- "-PcompileOnlyVersion=0.0.1"
+ ":clean",
+ ":absurd",
+ "-s",
+ "-PcompileGroup=other.gradle:jarhellJdk",
+ "-PcompileVersion=0.0.1",
+ "-PcompileOnlyGroup=other.gradle:dummy-io",
+ "-PcompileOnlyVersion=0.0.1"
).buildAndFail();
assertTaskFailed(result, ":absurd");
assertOutputContains(
- result.getOutput(),
- "> Audit of third party dependencies failed:",
- " Jar Hell with the JDK:",
- " * java.lang.String"
+ result.getOutput(),
+ "> Audit of third party dependencies failed:",
+ " Jar Hell with the JDK:",
+ " * java.lang.String"
);
assertOutputMissing(result.getOutput(), "Classes with violations:");
assertNoDeprecationWarning(result);
@@ -112,13 +112,13 @@ public void testJarHellWithJDK() {
public void testElasticsearchIgnoredWithViolations() {
BuildResult result = getGradleRunner().withArguments(
- ":clean",
- ":absurd",
- "-s",
- "-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j",
- "-PcompileOnlyVersion=0.0.1",
- "-PcompileGroup=elasticsearch.gradle:dummy-io",
- "-PcompileVersion=0.0.1"
+ ":clean",
+ ":absurd",
+ "-s",
+ "-PcompileOnlyGroup=elasticsearch.gradle:broken-log4j",
+ "-PcompileOnlyVersion=0.0.1",
+ "-PcompileGroup=elasticsearch.gradle:dummy-io",
+ "-PcompileVersion=0.0.1"
).build();
assertTaskNoSource(result, ":absurd");
assertNoDeprecationWarning(result);
diff --git a/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle b/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle
deleted file mode 100644
index 7147f5e09610b..0000000000000
--- a/build-tools-internal/src/main/groovy/elasticsearch.formatting.gradle
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import org.elasticsearch.gradle.internal.ElasticsearchJavaPlugin
-
-/*
- * This script plugin configures formatting for Java source using Spotless
- * for Gradle. Since the act of formatting existing source can interfere
- * with developers' workflows, we don't automatically format all code
- * (yet). Instead, we maintain a list of projects that are excluded from
- * formatting, until we reach a point where we can comfortably format them
- * in one go without too much disruption.
- *
- * Any new sub-projects must not be added to the exclusions list!
- *
- * To perform a reformat, run:
- *
- * ./gradlew spotlessApply
- *
- * To check the current format, run:
- *
- * ./gradlew spotlessJavaCheck
- *
- * This is also carried out by the `precommit` task.
- *
- * For more about Spotless, see:
- *
- * https://github.com/diffplug/spotless/tree/master/plugin-gradle
- */
-
-// Do not add new sub-projects here!
-def projectPathsToExclude = [
- ':client:benchmark',
- ':client:client-benchmark-noop-api-plugin',
- ':client:rest',
- ':client:rest-high-level',
- ':client:sniffer',
- ':client:test',
- ':example-plugins:custom-settings',
- ':example-plugins:custom-significance-heuristic',
- ':example-plugins:custom-suggester',
- ':example-plugins:painless-whitelist',
- ':example-plugins:rescore',
- ':example-plugins:rest-handler',
- ':example-plugins:script-expert-scoring',
- ':example-plugins:security-authorization-engine',
- ':libs:elasticsearch-cli',
- ':libs:elasticsearch-core',
- ':libs:elasticsearch-dissect',
- ':libs:elasticsearch-geo',
- ':libs:elasticsearch-grok',
- ':libs:elasticsearch-lz4',
- ':libs:elasticsearch-nio',
- ':libs:elasticsearch-plugin-classloader',
- ':libs:elasticsearch-secure-sm',
- ':libs:elasticsearch-ssl-config',
- ':libs:elasticsearch-x-content',
- ':modules:analysis-common',
- ':modules:ingest-common',
- ':modules:ingest-geoip',
- ':modules:ingest-user-agent',
- ':modules:lang-expression',
- ':modules:lang-mustache',
- ':modules:lang-painless',
- ':modules:lang-painless:spi',
- ':modules:mapper-extras',
- ':modules:parent-join',
- ':modules:percolator',
- ':modules:rank-eval',
- ':modules:reindex',
- ':modules:repository-url',
- ':modules:systemd',
- ':modules:tasks',
- ':modules:transport-netty4',
- ':plugins:analysis-icu',
- ':plugins:analysis-kuromoji',
- ':plugins:analysis-nori',
- ':plugins:analysis-phonetic',
- ':plugins:analysis-smartcn',
- ':plugins:analysis-stempel',
- ':plugins:analysis-ukrainian',
- ':plugins:discovery-azure-classic',
- ':plugins:discovery-ec2',
- ':plugins:discovery-ec2:qa:amazon-ec2',
- ':plugins:discovery-gce',
- ':plugins:discovery-gce:qa:gce',
- ':plugins:ingest-attachment',
- ':plugins:mapper-annotated-text',
- ':plugins:mapper-murmur3',
- ':plugins:mapper-size',
- ':plugins:repository-azure',
- ':plugins:repository-gcs',
- ':plugins:repository-hdfs',
- ':plugins:repository-hdfs:hadoop-common',
- ':plugins:repository-s3',
- ':plugins:store-smb',
- ':plugins:transport-nio',
- ':qa:die-with-dignity',
- ':rest-api-spec',
- ':test:fixtures:geoip-fixture',
- ':test:fixtures:krb5kdc-fixture',
- ':test:fixtures:old-elasticsearch',
- ':test:framework',
- ':test:logger-usage',
- ':x-pack:license-tools',
- ':x-pack:plugin',
- ':x-pack:plugin:async-search',
- ':x-pack:plugin:async-search:qa',
- ':x-pack:plugin:async-search:qa:security',
- ':x-pack:plugin:autoscaling:qa:rest',
- ':x-pack:plugin:ccr',
- ':x-pack:plugin:ccr:qa',
- ':x-pack:plugin:ccr:qa:rest',
- ':x-pack:plugin:core',
- ':x-pack:plugin:data-streams:qa:multi-node',
- ':x-pack:plugin:data-streams:qa:rest',
- ':x-pack:plugin:deprecation',
- ':x-pack:plugin:enrich:qa:common',
- ':x-pack:plugin:enrich:qa:rest',
- ':x-pack:plugin:enrich:qa:rest-with-advanced-security',
- ':x-pack:plugin:enrich:qa:rest-with-security',
- ':x-pack:plugin:eql',
- ':x-pack:plugin:eql:qa',
- ':x-pack:plugin:eql:qa:common',
- ':x-pack:plugin:eql:qa:rest',
- ':x-pack:plugin:eql:qa:security',
- ':x-pack:plugin:fleet:qa:rest',
- ':x-pack:plugin:graph',
- ':x-pack:plugin:graph:qa:with-security',
- ':x-pack:plugin:identity-provider',
- ':x-pack:plugin:identity-provider:qa:idp-rest-tests',
- ':x-pack:plugin:ilm',
- ':x-pack:plugin:ilm:qa:multi-node',
- ':x-pack:plugin:ilm:qa:rest',
- ':x-pack:plugin:ilm:qa:with-security',
- ':x-pack:plugin:mapper-constant-keyword',
- ':x-pack:plugin:mapper-flattened',
- ':x-pack:plugin:ml',
- ':x-pack:plugin:ml:qa:basic-multi-node',
- ':x-pack:plugin:ml:qa:disabled',
- ':x-pack:plugin:ml:qa:ml-with-security',
- ':x-pack:plugin:ml:qa:native-multi-node-tests',
- ':x-pack:plugin:ml:qa:single-node-tests',
- ':x-pack:plugin:monitoring',
- ':x-pack:plugin:ql',
- ':x-pack:plugin:repository-encrypted:qa:azure',
- ':x-pack:plugin:repository-encrypted:qa:gcs',
- ':x-pack:plugin:repository-encrypted:qa:s3',
- ':x-pack:plugin:search-business-rules',
- ':x-pack:plugin:security',
- ':x-pack:plugin:security:cli',
- ':x-pack:plugin:security:qa:basic-enable-security',
- ':x-pack:plugin:security:qa:security-basic',
- ':x-pack:plugin:security:qa:security-disabled',
- ':x-pack:plugin:security:qa:security-not-enabled',
- ':x-pack:plugin:security:qa:security-trial',
- ':x-pack:plugin:security:qa:service-account',
- ':x-pack:plugin:security:qa:smoke-test-all-realms',
- ':x-pack:plugin:security:qa:tls-basic',
- ':x-pack:plugin:shutdown:qa:multi-node',
- ':x-pack:plugin:spatial',
- ':x-pack:plugin:sql',
- ':x-pack:plugin:sql:jdbc',
- ':x-pack:plugin:sql:qa',
- ':x-pack:plugin:sql:qa:jdbc',
- ':x-pack:plugin:sql:qa:jdbc:security',
- ':x-pack:plugin:sql:qa:security',
- ':x-pack:plugin:sql:sql-action',
- ':x-pack:plugin:sql:sql-cli',
- ':x-pack:plugin:sql:sql-client',
- ':x-pack:plugin:sql:sql-proto',
- ':x-pack:plugin:stack:qa:rest',
- ':x-pack:plugin:text-structure:qa:text-structure-with-security',
- ':x-pack:plugin:transform',
- ':x-pack:plugin:transform:qa:multi-node-tests',
- ':x-pack:plugin:transform:qa:single-node-tests',
- ':x-pack:plugin:vectors',
- ':x-pack:plugin:watcher',
- ':x-pack:plugin:watcher:qa:rest',
- ':x-pack:plugin:watcher:qa:with-monitoring',
- ':x-pack:plugin:watcher:qa:with-security',
- ':x-pack:plugin:wildcard',
- ':x-pack:qa',
- ':x-pack:qa:runtime-fields:core-with-mapped',
- ':x-pack:qa:runtime-fields:core-with-search',
- ':x-pack:qa:runtime-fields:with-security',
- ':x-pack:qa:security-example-spi-extension',
- ':x-pack:test:idp-fixture',
- ':x-pack:test:smb-fixture'
-]
-
-subprojects {
- plugins.withType(ElasticsearchJavaPlugin).whenPluginAdded {
- if (projectPathsToExclude.contains(project.path) == false ||
- providers.systemProperty("es.format.everything").forUseAtConfigurationTime().isPresent()) {
- project.apply plugin: "com.diffplug.spotless"
-
-
- spotless {
- java {
- if (project.path == ':server') {
- target 'src/*/java/org/elasticsearch/action/admin/cluster/repositories/**/*.java',
- 'src/*/java/org/elasticsearch/action/admin/cluster/snapshots/**/*.java',
- 'src/test/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java',
- 'src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java',
- 'src/*/java/org/elasticsearch/index/snapshots/**/*.java',
- 'src/*/java/org/elasticsearch/repositories/**/*.java',
- 'src/*/java/org/elasticsearch/search/aggregations/**/*.java',
- 'src/*/java/org/elasticsearch/snapshots/**/*.java'
- } else {
- // Normally this isn't necessary, but we have Java sources in
- // non-standard places
- target 'src/**/*.java'
- }
-
- toggleOffOn('@formatter:off', '@formatter:on') // use `@formatter:off` and `@formatter:on` to toggle formatting - ONLY IF STRICTLY NECESSARY
- removeUnusedImports()
- importOrderFile rootProject.file('build-tools-internal/elastic.importorder')
- eclipse().configFile rootProject.file('build-tools-internal/formatterConfig.xml')
- trimTrailingWhitespace()
-
- // Sometimes Spotless will report a "misbehaving rule which can't make up its
- // mind" and will recommend enabling the `paddedCell()` setting. If you
- // enabled this setting and run the format check again,
- // Spotless will write files to
- // `$PROJECT/build/spotless-diagnose-java/` to aid diagnosis. It writes
- // different copies of the formatted files, so that you can see how they
- // differ and infer what is the problem.
-
- // The `paddedCell()` option is disabled for normal operation so that any
- // misbehaviour is detected, and not just suppressed. You can enabled the
- // option from the command line by running Gradle with `-Dspotless.paddedcell`.
- if (providers.systemProperty('spotless.paddedcell').forUseAtConfigurationTime().isPresent()) {
- paddedCell()
- }
- }
- }
-
- tasks.named("precommit").configure { dependsOn 'spotlessJavaCheck' }
- }
- }
-}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java
index b8654741500dd..a27ed14e4bca3 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BuildPlugin.java
@@ -21,9 +21,10 @@
import org.gradle.api.tasks.bundling.Jar;
import org.gradle.initialization.layout.BuildLayout;
-import javax.inject.Inject;
import java.io.File;
+import javax.inject.Inject;
+
/**
* Encapsulates build configuration for elasticsearch projects.
*/
@@ -37,7 +38,7 @@ public class BuildPlugin implements Plugin {
private final ProjectLayout projectLayout;
@Inject
- BuildPlugin(BuildLayout buildLayout, ObjectFactory objectFactory, ProviderFactory providerFactory, ProjectLayout projectLayout){
+ BuildPlugin(BuildLayout buildLayout, ObjectFactory objectFactory, ProviderFactory providerFactory, ProjectLayout projectLayout) {
this.buildLayout = buildLayout;
this.objectFactory = objectFactory;
this.providerFactory = providerFactory;
@@ -62,7 +63,6 @@ public void apply(final Project project) {
configureLicenseAndNotice(project);
}
-
public void configureLicenseAndNotice(final Project project) {
final ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class);
RegularFileProperty licenseFileProperty = objectFactory.fileProperty();
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcGitExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcGitExtension.java
index d076b4aa001f2..4f4b8f0dbfdeb 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcGitExtension.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcGitExtension.java
@@ -13,9 +13,10 @@
import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
-import javax.inject.Inject;
import java.io.File;
+import javax.inject.Inject;
+
public class BwcGitExtension {
private Provider bwcVersion;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java
index f79ada677e8c8..7a57aabfe5f1b 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java
@@ -11,8 +11,8 @@
import org.apache.commons.io.FileUtils;
import org.apache.tools.ant.taskdefs.condition.Os;
import org.elasticsearch.gradle.LoggedExec;
-import org.gradle.api.Action;
import org.elasticsearch.gradle.Version;
+import org.gradle.api.Action;
import org.gradle.api.GradleException;
import org.gradle.api.Project;
import org.gradle.api.Task;
@@ -45,10 +45,10 @@ public class BwcSetupExtension {
private Provider checkoutDir;
public BwcSetupExtension(
- Project project,
- Provider unreleasedVersionInfo,
- Provider bwcTaskThrottleProvider,
- Provider checkoutDir
+ Project project,
+ Provider unreleasedVersionInfo,
+ Provider bwcTaskThrottleProvider,
+ Provider checkoutDir
) {
this.project = project;
this.unreleasedVersionInfo = unreleasedVersionInfo;
@@ -115,9 +115,9 @@ public void execute(Task t) {
}
private String minimumCompilerVersionPath(Version bwcVersion) {
- return (bwcVersion.onOrAfter(BUILD_TOOL_MINIMUM_VERSION)) ?
- "build-tools-internal/" + MINIMUM_COMPILER_VERSION_PATH :
- "buildSrc/" + MINIMUM_COMPILER_VERSION_PATH;
+ return (bwcVersion.onOrAfter(BUILD_TOOL_MINIMUM_VERSION))
+ ? "build-tools-internal/" + MINIMUM_COMPILER_VERSION_PATH
+ : "buildSrc/" + MINIMUM_COMPILER_VERSION_PATH;
}
private static class IndentingOutputStream extends OutputStream {
@@ -132,7 +132,7 @@ private static class IndentingOutputStream extends OutputStream {
@Override
public void write(int b) throws IOException {
- int[] arr = {b};
+ int[] arr = { b };
write(arr, 0, 1);
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java
index da0a09e5fca9e..55be56626bb7c 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java
@@ -343,11 +343,11 @@ public List getIndexCompatible() {
}
public void withIndexCompatiple(BiConsumer versionAction) {
- getIndexCompatible().forEach(v -> versionAction.accept(v, "v"+v.toString()));
+ getIndexCompatible().forEach(v -> versionAction.accept(v, "v" + v.toString()));
}
public void withIndexCompatiple(Predicate filter, BiConsumer versionAction) {
- getIndexCompatible().stream().filter(filter).forEach(v -> versionAction.accept(v, "v"+v.toString()));
+ getIndexCompatible().stream().filter(filter).forEach(v -> versionAction.accept(v, "v" + v.toString()));
}
public List getWireCompatible() {
@@ -364,11 +364,11 @@ public List getWireCompatible() {
}
public void withWireCompatiple(BiConsumer versionAction) {
- getWireCompatible().forEach(v -> versionAction.accept(v, "v"+v.toString()));
+ getWireCompatible().forEach(v -> versionAction.accept(v, "v" + v.toString()));
}
public void withWireCompatiple(Predicate filter, BiConsumer versionAction) {
- getWireCompatible().stream().filter(filter).forEach(v -> versionAction.accept(v, "v"+v.toString()));
+ getWireCompatible().stream().filter(filter).forEach(v -> versionAction.accept(v, "v" + v.toString()));
}
private List filterSupportedVersions(List wireCompat) {
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ConcatFilesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ConcatFilesTask.java
index f1f33feb781ba..450a81bbebba9 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ConcatFilesTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ConcatFilesTask.java
@@ -7,6 +7,14 @@
*/
package org.elasticsearch.gradle.internal;
+import org.gradle.api.DefaultTask;
+import org.gradle.api.file.FileTree;
+import org.gradle.api.tasks.Input;
+import org.gradle.api.tasks.InputFiles;
+import org.gradle.api.tasks.Optional;
+import org.gradle.api.tasks.OutputFile;
+import org.gradle.api.tasks.TaskAction;
+
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@@ -16,14 +24,6 @@
import java.util.LinkedHashSet;
import java.util.List;
-import org.gradle.api.DefaultTask;
-import org.gradle.api.file.FileTree;
-import org.gradle.api.tasks.Input;
-import org.gradle.api.tasks.InputFiles;
-import org.gradle.api.tasks.Optional;
-import org.gradle.api.tasks.OutputFile;
-import org.gradle.api.tasks.TaskAction;
-
/**
* Concatenates a list of files into one and removes duplicate lines.
*/
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DependenciesGraphTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DependenciesGraphTask.java
index 98002f0615ce4..c1c4dff63556c 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DependenciesGraphTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DependenciesGraphTask.java
@@ -14,12 +14,12 @@
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
+import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.DependencySet;
import org.gradle.api.artifacts.ProjectDependency;
-import org.gradle.api.DefaultTask;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import org.gradle.api.tasks.TaskAction;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java
index 38978390ad37f..3a023b9d99b95 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java
@@ -10,17 +10,14 @@
import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitTaskPlugin;
-import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.internal.info.BuildParams;
+import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.util.GradleUtils;
-import org.gradle.api.Action;
import org.gradle.api.JavaVersion;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
-import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.ResolutionStrategy;
import org.gradle.api.plugins.JavaBasePlugin;
-import org.gradle.api.plugins.JavaPlugin;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.SourceSet;
@@ -31,9 +28,6 @@
import org.gradle.api.tasks.compile.JavaCompile;
import java.util.List;
-import java.util.Objects;
-import java.util.function.Predicate;
-import java.util.stream.Stream;
/**
* A wrapper around Gradle's Java Base plugin that applies our
@@ -95,16 +89,17 @@ public static void configureConfigurations(Project project) {
private static void disableTransitiveDependenciesForSourceSet(Project project, SourceSet sourceSet) {
List sourceSetConfigurationNames = List.of(
- sourceSet.getApiConfigurationName(),
- sourceSet.getImplementationConfigurationName(),
- sourceSet.getImplementationConfigurationName(),
- sourceSet.getCompileOnlyConfigurationName(),
- sourceSet.getRuntimeOnlyConfigurationName()
+ sourceSet.getApiConfigurationName(),
+ sourceSet.getImplementationConfigurationName(),
+ sourceSet.getImplementationConfigurationName(),
+ sourceSet.getCompileOnlyConfigurationName(),
+ sourceSet.getRuntimeOnlyConfigurationName()
);
- project.getConfigurations().matching(c -> sourceSetConfigurationNames.contains(c.getName()))
- .configureEach(GradleUtils::disableTransitiveDependencies);
- }
+ project.getConfigurations()
+ .matching(c -> sourceSetConfigurationNames.contains(c.getName()))
+ .configureEach(GradleUtils::disableTransitiveDependencies);
+ }
/**
* Adds compiler settings to the project
@@ -139,14 +134,10 @@ public static void configureCompile(Project project) {
compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask));
});
// also apply release flag to groovy, which is used in build-tools
- project.getTasks()
- .withType(GroovyCompile.class)
- .configureEach(
- compileTask -> {
- // TODO: this probably shouldn't apply to groovy at all?
- compileTask.getOptions().getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask));
- }
- );
+ project.getTasks().withType(GroovyCompile.class).configureEach(compileTask -> {
+ // TODO: this probably shouldn't apply to groovy at all?
+ compileTask.getOptions().getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask));
+ });
}
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java
index 02a1c235108c1..9ea74fb7754ac 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaPlugin.java
@@ -8,44 +8,29 @@
package org.elasticsearch.gradle.internal;
-import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar;
import nebula.plugin.info.InfoBrokerPlugin;
+
+import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar;
+
import org.elasticsearch.gradle.VersionProperties;
-import org.elasticsearch.gradle.internal.info.BuildParams;
-import org.elasticsearch.gradle.util.GradleUtils;
import org.elasticsearch.gradle.internal.conventions.util.Util;
+import org.elasticsearch.gradle.internal.info.BuildParams;
import org.gradle.api.Action;
-import org.gradle.api.JavaVersion;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
-import org.gradle.api.artifacts.ModuleDependency;
-import org.gradle.api.artifacts.ProjectDependency;
-import org.gradle.api.artifacts.ResolutionStrategy;
import org.gradle.api.plugins.BasePlugin;
import org.gradle.api.plugins.JavaLibraryPlugin;
import org.gradle.api.plugins.JavaPlugin;
-import org.gradle.api.plugins.JavaPluginExtension;
-import org.gradle.api.provider.Provider;
-import org.gradle.api.tasks.SourceSet;
-import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.bundling.Jar;
-import org.gradle.api.tasks.compile.AbstractCompile;
-import org.gradle.api.tasks.compile.CompileOptions;
-import org.gradle.api.tasks.compile.GroovyCompile;
-import org.gradle.api.tasks.compile.JavaCompile;
import org.gradle.api.tasks.javadoc.Javadoc;
import org.gradle.external.javadoc.CoreJavadocOptions;
import org.gradle.language.base.plugins.LifecycleBasePlugin;
import java.io.File;
-import java.util.List;
import java.util.Map;
-import java.util.Objects;
-import java.util.function.Consumer;
-import java.util.stream.Stream;
import static org.elasticsearch.gradle.internal.conventions.util.Util.toStringable;
@@ -59,7 +44,7 @@ public void apply(Project project) {
project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class);
project.getPluginManager().apply(JavaLibraryPlugin.class);
-// configureConfigurations(project);
+ // configureConfigurations(project);
configureJars(project);
configureJarManifest(project);
configureJavadoc(project);
@@ -77,41 +62,37 @@ private static void testCompileOnlyDeps(Project project) {
* Adds additional manifest info to jars
*/
static void configureJars(Project project) {
- project.getTasks().withType(Jar.class).configureEach(
- jarTask -> {
- // we put all our distributable files under distributions
- jarTask.getDestinationDirectory().set(new File(project.getBuildDir(), "distributions"));
- // fixup the jar manifest
- // Explicitly using an Action interface as java lambdas
- // are not supported by Gradle up-to-date checks
- jarTask.doFirst(new Action() {
- @Override
- public void execute(Task task) {
- // this doFirst is added before the info plugin, therefore it will run
- // after the doFirst added by the info plugin, and we can override attributes
- jarTask.getManifest()
- .attributes(
- Map.of("Build-Date", BuildParams.getBuildDate(), "Build-Java-Version", BuildParams.getGradleJavaVersion()
- )
- );
- }
- });
- }
- );
+ project.getTasks().withType(Jar.class).configureEach(jarTask -> {
+ // we put all our distributable files under distributions
+ jarTask.getDestinationDirectory().set(new File(project.getBuildDir(), "distributions"));
+ // fixup the jar manifest
+ // Explicitly using an Action interface as java lambdas
+ // are not supported by Gradle up-to-date checks
+ jarTask.doFirst(new Action() {
+ @Override
+ public void execute(Task task) {
+ // this doFirst is added before the info plugin, therefore it will run
+ // after the doFirst added by the info plugin, and we can override attributes
+ jarTask.getManifest()
+ .attributes(
+ Map.of("Build-Date", BuildParams.getBuildDate(), "Build-Java-Version", BuildParams.getGradleJavaVersion())
+ );
+ }
+ });
+ });
project.getPluginManager().withPlugin("com.github.johnrengelman.shadow", p -> {
project.getTasks().withType(ShadowJar.class).configureEach(shadowJar -> {
- /*
- * Replace the default "-all" classifier with null
- * which will leave the classifier off of the file name.
- */
- shadowJar.getArchiveClassifier().set((String) null);
- /*
- * Not all cases need service files merged but it is
- * better to be safe
- */
- shadowJar.mergeServiceFiles();
- }
- );
+ /*
+ * Replace the default "-all" classifier with null
+ * which will leave the classifier off of the file name.
+ */
+ shadowJar.getArchiveClassifier().set((String) null);
+ /*
+ * Not all cases need service files merged but it is
+ * better to be safe
+ */
+ shadowJar.mergeServiceFiles();
+ });
// Add "original" classifier to the non-shadowed JAR to distinguish it from the shadow JAR
project.getTasks().named(JavaPlugin.JAR_TASK_NAME, Jar.class).configure(jar -> jar.getArchiveClassifier().set("original"));
// Make sure we assemble the shadow jar
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java
index f5aa5fd61d5f8..840a35b4dea6c 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java
@@ -9,14 +9,15 @@
package org.elasticsearch.gradle.internal;
import com.github.jengelman.gradle.plugins.shadow.ShadowBasePlugin;
+
import org.elasticsearch.gradle.OS;
-import org.elasticsearch.gradle.internal.test.SimpleCommandLineArgumentProvider;
-import org.elasticsearch.gradle.test.GradleTestPolicySetupPlugin;
-import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider;
+import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.elasticsearch.gradle.internal.info.BuildParams;
import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener;
-import org.elasticsearch.gradle.internal.conventions.util.Util;
+import org.elasticsearch.gradle.internal.test.SimpleCommandLineArgumentProvider;
+import org.elasticsearch.gradle.test.GradleTestPolicySetupPlugin;
+import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider;
import org.gradle.api.Action;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmptyDirTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmptyDirTask.java
index 471d9d80ce5cf..bbcaabe18905e 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmptyDirTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmptyDirTask.java
@@ -7,16 +7,16 @@
*/
package org.elasticsearch.gradle.internal;
-import java.io.File;
-
-import javax.inject.Inject;
-
import org.gradle.api.DefaultTask;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.TaskAction;
import org.gradle.internal.file.Chmod;
+import java.io.File;
+
+import javax.inject.Inject;
+
/**
* Creates an empty directory.
*/
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java
index ff5692bd3d959..1dce3a7092d85 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java
@@ -25,13 +25,14 @@
import org.gradle.process.ExecResult;
import org.gradle.process.ExecSpec;
-import javax.inject.Inject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.file.Files;
+import javax.inject.Inject;
+
import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static java.util.Arrays.asList;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java
index 3c6bc089274b8..e3718a95e7ef5 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal;
import org.elasticsearch.gradle.VersionProperties;
+import org.elasticsearch.gradle.internal.conventions.GUtils;
import org.elasticsearch.gradle.internal.conventions.LicensingPlugin;
import org.gradle.api.Action;
import org.gradle.api.GradleException;
@@ -19,7 +20,6 @@
import org.gradle.api.tasks.Copy;
import org.gradle.api.tasks.TaskProvider;
-import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
@@ -29,7 +29,8 @@
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
-import org.elasticsearch.gradle.internal.conventions.GUtils;
+
+import javax.inject.Inject;
public class InternalDistributionArchiveCheckPlugin implements InternalPlugin {
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java
index a633d16667646..b23fb215bcffc 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java
@@ -21,8 +21,8 @@
import org.gradle.api.tasks.bundling.Zip;
import java.io.File;
-import static org.elasticsearch.gradle.internal.conventions.GUtils.capitalize;
+import static org.elasticsearch.gradle.internal.conventions.GUtils.capitalize;
import static org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT;
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java
index 2bf1e33e3bc1c..a41ce13daf24f 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java
@@ -22,13 +22,14 @@
import org.gradle.api.tasks.TaskProvider;
import org.gradle.language.base.plugins.LifecycleBasePlugin;
-import javax.inject.Inject;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+
import static java.util.Arrays.asList;
import static java.util.Arrays.stream;
@@ -165,18 +166,22 @@ private void registerDistributionArchiveArtifact(Project bwcProject, Distributio
private static List resolveArchiveProjects(File checkoutDir, Version bwcVersion) {
List projects = new ArrayList<>();
- // All active BWC branches publish default and oss variants of rpm and deb packages
- projects.addAll(asList("deb", "rpm", "oss-deb", "oss-rpm"));
-
- if (bwcVersion.onOrAfter("7.0.0")) { // starting with 7.0 we bundle a jdk which means we have platform-specific archives
- projects.addAll(asList("oss-windows-zip", "windows-zip", "oss-darwin-tar", "darwin-tar", "oss-linux-tar", "linux-tar"));
-
- // We support aarch64 for linux and mac starting from 7.12
- if (bwcVersion.onOrAfter("7.12.0")) {
- projects.addAll(asList("oss-darwin-aarch64-tar", "oss-linux-aarch64-tar", "darwin-aarch64-tar", "linux-aarch64-tar"));
+ if (bwcVersion.onOrAfter("7.13.0")) {
+ projects.addAll(asList("deb", "rpm"));
+ projects.addAll(asList("windows-zip", "darwin-tar", "linux-tar"));
+ projects.addAll(asList("darwin-aarch64-tar", "linux-aarch64-tar"));
+ } else {
+ projects.addAll(asList("deb", "rpm", "oss-deb", "oss-rpm"));
+ if (bwcVersion.onOrAfter("7.0.0")) { // starting with 7.0 we bundle a jdk which means we have platform-specific archives
+ projects.addAll(asList("oss-windows-zip", "windows-zip", "oss-darwin-tar", "darwin-tar", "oss-linux-tar", "linux-tar"));
+
+ // We support aarch64 for linux and mac starting from 7.12
+ if (bwcVersion.onOrAfter("7.12.0")) {
+ projects.addAll(asList("oss-darwin-aarch64-tar", "oss-linux-aarch64-tar", "darwin-aarch64-tar", "linux-aarch64-tar"));
+ }
+ } else { // prior to 7.0 we published only a single zip and tar archives for oss and default distributions
+ projects.addAll(asList("oss-zip", "zip", "tar", "oss-tar"));
}
- } else { // prior to 7.0 we published only a single zip and tar archives for oss and default distributions
- projects.addAll(asList("oss-zip", "zip", "tar", "oss-tar"));
}
return projects.stream().map(name -> {
@@ -243,7 +248,7 @@ static void createBuildBwcTask(
public void execute(Task task) {
if (expectedOutputFile.exists() == false) {
throw new InvalidUserDataException(
- "Building " + bwcVersion.get() + " didn't generate expected artifact " + expectedOutputFile
+ "Building " + bwcVersion.get() + " didn't generate expected artifact " + expectedOutputFile
);
}
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
index 00c857faf65a0..3c6dccf9c9179 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionDownloadPlugin.java
@@ -75,7 +75,8 @@ private void registerInternalDistributionResolutions(NamedDomainObjectContainer<
}));
resolutions.register("bwc", distributionResolution -> distributionResolution.setResolver((project, distribution) -> {
- BwcVersions.UnreleasedVersionInfo unreleasedInfo = BuildParams.getBwcVersions().unreleasedInfo(Version.fromString(distribution.getVersion()));
+ BwcVersions.UnreleasedVersionInfo unreleasedInfo = BuildParams.getBwcVersions()
+ .unreleasedInfo(Version.fromString(distribution.getVersion()));
if (unreleasedInfo != null) {
if (distribution.getBundledJdk() == false) {
throw new GradleException(
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalPluginBuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalPluginBuildPlugin.java
index 9b1c590e8108d..1df86edb894e2 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalPluginBuildPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalPluginBuildPlugin.java
@@ -9,12 +9,16 @@
package org.elasticsearch.gradle.internal;
import groovy.lang.Closure;
+
+import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.elasticsearch.gradle.internal.precommit.TestingConventionsTasks;
import org.elasticsearch.gradle.internal.test.RestTestBasePlugin;
-import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
+import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
+import org.elasticsearch.gradle.testclusters.TestClustersPlugin;
import org.elasticsearch.gradle.util.GradleUtils;
+import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.api.tasks.bundling.Zip;
@@ -77,9 +81,30 @@ public void doCall() {
if (isModule == false || isXPackModule) {
addNoticeGeneration(p, extension);
}
+
+ NamedDomainObjectContainer testClusters = (NamedDomainObjectContainer) project
+ .getExtensions()
+ .getByName(TestClustersPlugin.EXTENSION_NAME);
+ p.getExtensions().getByType(PluginPropertiesExtension.class).getExtendedPlugins().forEach(pluginName -> {
+ // Auto add any dependent modules
+ findModulePath(project, pluginName).ifPresent(
+ path -> testClusters.configureEach(elasticsearchCluster -> elasticsearchCluster.module(path))
+ );
+ });
});
}
+ Optional findModulePath(Project project, String pluginName) {
+ return project.getRootProject()
+ .getAllprojects()
+ .stream()
+ .filter(p -> GradleUtils.isModuleProject(p.getPath()))
+ .filter(p -> p.getPlugins().hasPlugin(PluginBuildPlugin.class))
+ .filter(p -> p.getExtensions().getByType(PluginPropertiesExtension.class).getName().equals(pluginName))
+ .findFirst()
+ .map(Project::getPath);
+ }
+
/**
* Configure the pom for the main jar of this plugin
*/
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactExtension.java
index c87f29698d687..fae845b229651 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactExtension.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestArtifactExtension.java
@@ -9,7 +9,6 @@
package org.elasticsearch.gradle.internal;
import org.gradle.api.Project;
-import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.Dependency;
import org.gradle.api.artifacts.dsl.DependencyHandler;
import org.gradle.api.plugins.BasePluginExtension;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java
index 45ede27be9ac4..a1008babb3987 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java
@@ -20,7 +20,6 @@
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
import org.gradle.api.attributes.Attribute;
import org.gradle.api.internal.artifacts.ArtifactAttributes;
-import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/NoticeTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/NoticeTask.java
index b51f0603833a8..f660b00dfd333 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/NoticeTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/NoticeTask.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal;
import org.codehaus.groovy.runtime.StringGroovyMethods;
+import org.elasticsearch.gradle.util.FileUtils;
import org.gradle.api.DefaultTask;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileTree;
@@ -19,7 +20,6 @@
import org.gradle.api.tasks.OutputFile;
import org.gradle.api.tasks.TaskAction;
-import org.elasticsearch.gradle.util.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RepositoriesSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RepositoriesSetupPlugin.java
index e0da9de8f5256..098b7923bcbde 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RepositoriesSetupPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RepositoriesSetupPlugin.java
@@ -15,11 +15,6 @@
import org.gradle.api.artifacts.dsl.RepositoryHandler;
import org.gradle.api.artifacts.repositories.MavenArtifactRepository;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ResolveAllDependencies.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ResolveAllDependencies.java
index 818519dad3bda..214ce9ecaa0a9 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ResolveAllDependencies.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ResolveAllDependencies.java
@@ -16,10 +16,11 @@
import org.gradle.api.tasks.TaskAction;
import org.gradle.internal.deprecation.DeprecatableConfiguration;
-import javax.inject.Inject;
import java.util.Collection;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+
import static org.elasticsearch.gradle.DistributionDownloadPlugin.DISTRO_EXTRACTED_CONFIG_PREFIX;
import static org.elasticsearch.gradle.internal.rest.compat.YamlRestCompatTestPlugin.BWC_MINOR_CONFIG_NAME;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java
index 3449b4d9e40ee..bb3b4da6db815 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerBuildTask.java
@@ -34,6 +34,7 @@
import java.nio.file.Files;
import java.util.Arrays;
import java.util.List;
+
import javax.inject.Inject;
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java
index d5b0d20290dca..552b3bec8674c 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportService.java
@@ -17,7 +17,6 @@
import org.gradle.process.ExecOperations;
import org.gradle.process.ExecResult;
-import javax.inject.Inject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
@@ -32,6 +31,8 @@
import java.util.Optional;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+
/**
* Build service for detecting available Docker installation and checking for compatibility with Elasticsearch Docker image build
* requirements. This includes a minimum version requirement, as well as the ability to run privileged commands.
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java
index feb0a9105a98a..3077bcd70e10a 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java
@@ -8,8 +8,8 @@
package org.elasticsearch.gradle.internal.info;
import org.apache.commons.io.IOUtils;
-import org.elasticsearch.gradle.internal.BwcVersions;
import org.elasticsearch.gradle.OS;
+import org.elasticsearch.gradle.internal.BwcVersions;
import org.elasticsearch.gradle.internal.conventions.info.GitInfo;
import org.elasticsearch.gradle.internal.conventions.info.ParallelDetector;
import org.elasticsearch.gradle.internal.conventions.util.Util;
@@ -29,7 +29,6 @@
import org.gradle.jvm.toolchain.internal.JavaInstallationRegistry;
import org.gradle.util.GradleVersion;
-import javax.inject.Inject;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
@@ -50,6 +49,8 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import javax.inject.Inject;
+
public class GlobalBuildInfoPlugin implements Plugin {
private static final Logger LOGGER = Logging.getLogger(GlobalBuildInfoPlugin.class);
private static final String DEFAULT_VERSION_JAVA_FILE_PATH = "server/src/main/java/org/elasticsearch/Version.java";
@@ -344,10 +345,9 @@ private static int findDefaultParallel(Project project) {
return _defaultParallel;
}
-
public static String getResourceContents(String resourcePath) {
try (
- BufferedReader reader = new BufferedReader(new InputStreamReader(GlobalBuildInfoPlugin.class.getResourceAsStream(resourcePath)))
+ BufferedReader reader = new BufferedReader(new InputStreamReader(GlobalBuildInfoPlugin.class.getResourceAsStream(resourcePath)))
) {
StringBuilder b = new StringBuilder();
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
@@ -373,7 +373,7 @@ private static class ErrorTraceMetadataDetector implements JvmMetadataDetector {
@Override
public JvmInstallationMetadata getMetadata(File file) {
JvmInstallationMetadata metadata = delegate.getMetadata(file);
- if(metadata instanceof JvmInstallationMetadata.FailureInstallationMetadata) {
+ if (metadata instanceof JvmInstallationMetadata.FailureInstallationMetadata) {
throw new GradleException("Jvm Metadata cannot be resolved for " + metadata.getJavaHome().toString());
}
return metadata;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsPrecommitPlugin.java
index df39b9088c973..7f240b6d4cb81 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsPrecommitPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsPrecommitPlugin.java
@@ -16,9 +16,10 @@
import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.tasks.TaskProvider;
-import javax.inject.Inject;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+
public class FilePermissionsPrecommitPlugin extends PrecommitPlugin implements InternalPlugin {
public static final String FILEPERMISSIONS_TASK_NAME = "filepermissions";
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java
index 55a5e095cb9b0..ee3e58fd0552e 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java
@@ -7,15 +7,6 @@
*/
package org.elasticsearch.gradle.internal.precommit;
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.attribute.PosixFileAttributeView;
-import java.nio.file.attribute.PosixFilePermission;
-import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
-
import org.apache.tools.ant.taskdefs.condition.Os;
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
@@ -32,6 +23,15 @@
import org.gradle.api.tasks.util.PatternFilterable;
import org.gradle.api.tasks.util.PatternSet;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.attribute.PosixFileAttributeView;
+import java.nio.file.attribute.PosixFilePermission;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
import javax.inject.Inject;
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java
index 24d5bb7177e04..163e9f1e13d07 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java
@@ -11,10 +11,11 @@
import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis;
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin;
import groovy.lang.Closure;
+
import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask;
-import org.elasticsearch.gradle.internal.info.BuildParams;
import org.elasticsearch.gradle.internal.InternalPlugin;
import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin;
+import org.elasticsearch.gradle.internal.info.BuildParams;
import org.elasticsearch.gradle.util.GradleUtils;
import org.gradle.api.JavaVersion;
import org.gradle.api.Project;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsPrecommitPlugin.java
index 4a3a4f0c5745d..0301918390955 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsPrecommitPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsPrecommitPlugin.java
@@ -16,9 +16,10 @@
import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.tasks.TaskProvider;
-import javax.inject.Inject;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+
public class ForbiddenPatternsPrecommitPlugin extends PrecommitPlugin implements InternalPlugin {
public static final String FORBIDDEN_PATTERNS_TASK_NAME = "forbiddenPatterns";
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java
index 6e17a31c4a9a2..e4d732149e0eb 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java
@@ -28,7 +28,6 @@
import org.gradle.api.tasks.util.PatternFilterable;
import org.gradle.api.tasks.util.PatternSet;
-import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
@@ -46,6 +45,8 @@
import java.util.stream.IntStream;
import java.util.stream.Stream;
+import javax.inject.Inject;
+
/**
* Checks for patterns in source files for the project which are forbidden.
*/
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LicenseAnalyzer.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LicenseAnalyzer.java
index 7fe579026349b..df61f2f84a7fa 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LicenseAnalyzer.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LicenseAnalyzer.java
@@ -152,14 +152,27 @@ public class LicenseAnalyzer {
new LicenseMatcher("EDL-1.0", true, false, Pattern.compile("Eclipse Distribution License - v 1.0", Pattern.DOTALL)),
new LicenseMatcher("LGPL-2.1", true, true, Pattern.compile("GNU LESSER GENERAL PUBLIC LICENSE.*Version 2.1", Pattern.DOTALL)),
new LicenseMatcher("LGPL-3.0", true, true, Pattern.compile("GNU LESSER GENERAL PUBLIC LICENSE.*Version 3", Pattern.DOTALL)),
- new LicenseMatcher("GeoLite", false, false,
- Pattern.compile(("The Elastic GeoIP Database Service uses the GeoLite2 Data created " +
- "and licensed by MaxMind,\nwhich is governed by MaxMind’s GeoLite2 End User License Agreement, " +
- "available at https://www.maxmind.com/en/geolite2/eula.\n").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL)),
- new LicenseMatcher("GeoIp-Database-Service", false, false,
- Pattern.compile(("By using the GeoIP Database Service, you agree to the Elastic GeoIP Database Service Agreement,\n" +
- "available at www.elastic.co/elastic-geoip-database-service-terms.").replaceAll("\\s+", "\\\\s*"), Pattern.DOTALL))};
-
+ new LicenseMatcher(
+ "GeoLite",
+ false,
+ false,
+ Pattern.compile(
+ ("The Elastic GeoIP Database Service uses the GeoLite2 Data created "
+ + "and licensed by MaxMind,\nwhich is governed by MaxMind’s GeoLite2 End User License Agreement, "
+ + "available at https://www.maxmind.com/en/geolite2/eula.\n").replaceAll("\\s+", "\\\\s*"),
+ Pattern.DOTALL
+ )
+ ),
+ new LicenseMatcher(
+ "GeoIp-Database-Service",
+ false,
+ false,
+ Pattern.compile(
+ ("By using the GeoIP Database Service, you agree to the Elastic GeoIP Database Service Agreement,\n"
+ + "available at www.elastic.co/elastic-geoip-database-service-terms.").replaceAll("\\s+", "\\\\s*"),
+ Pattern.DOTALL
+ )
+ ) };
public static LicenseInfo licenseType(File licenseFile) {
for (LicenseMatcher matcher : matchers) {
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java
index a1a025eeea6d3..0e653912693d8 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/LoggerUsageTask.java
@@ -11,7 +11,6 @@
import org.elasticsearch.gradle.LoggedExec;
import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitTask;
import org.gradle.api.file.FileCollection;
-import org.gradle.api.plugins.JavaPluginConvention;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.tasks.CacheableTask;
import org.gradle.api.tasks.Classpath;
@@ -23,9 +22,10 @@
import org.gradle.api.tasks.TaskAction;
import org.gradle.process.ExecOperations;
-import javax.inject.Inject;
import java.io.File;
+import javax.inject.Inject;
+
/**
* Runs LoggerUsageCheck on a set of directories.
*/
@@ -63,7 +63,8 @@ public void setClasspath(FileCollection classpath) {
@PathSensitive(PathSensitivity.RELATIVE)
@SkipWhenEmpty
public FileCollection getClassDirectories() {
- return getProject().getExtensions().getByType(JavaPluginExtension.class)
+ return getProject().getExtensions()
+ .getByType(JavaPluginExtension.class)
.getSourceSets()
.stream()
// Don't pick up all source sets like the java9 ones as logger-check doesn't support the class format
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditTask.java
index d196fa3a3f054..a96ea02873643 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditTask.java
@@ -32,7 +32,6 @@
import org.gradle.workers.WorkParameters;
import org.gradle.workers.WorkerExecutor;
-import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
@@ -47,11 +46,12 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.TreeMap;
import java.util.TreeSet;
import java.util.function.Consumer;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+
/**
* Checks for split packages with dependencies. These are not allowed in a future modularized world.
*/
@@ -163,8 +163,10 @@ public void execute() {
LOGGER.error(String.join(System.lineSeparator(), msg));
}
if (splitPackages.isEmpty() == false) {
- throw new GradleException("Verification failed: Split packages found! See errors above for details.\n" +
- "DO NOT ADD THESE SPLIT PACKAGES TO THE IGNORE LIST! Choose a new package name for the classes added.");
+ throw new GradleException(
+ "Verification failed: Split packages found! See errors above for details.\n"
+ + "DO NOT ADD THESE SPLIT PACKAGES TO THE IGNORE LIST! Choose a new package name for the classes added."
+ );
}
try {
@@ -184,7 +186,10 @@ private Map> getDependencyPackages() {
if (LOGGER.isInfoEnabled()) {
List msg = new ArrayList<>();
msg.add("Packages from dependencies:");
- packages.entrySet().stream().sorted(Map.Entry.comparingByKey()).forEach(e -> msg.add(" -" + e.getKey() + " -> " + e.getValue()));
+ packages.entrySet()
+ .stream()
+ .sorted(Map.Entry.comparingByKey())
+ .forEach(e -> msg.add(" -" + e.getKey() + " -> " + e.getValue()));
LOGGER.info(String.join(System.lineSeparator(), msg));
}
return packages;
@@ -198,9 +203,16 @@ private Map> findSplitPackages(Set dependencyPackage
String packageName = getPackageName(path);
String className = path.subpath(path.getNameCount() - 1, path.getNameCount()).toString();
className = className.substring(0, className.length() - ".java".length());
- LOGGER.info("Inspecting " + path + System.lineSeparator()
- + " package: " + packageName + System.lineSeparator()
- + " class: " + className);
+ LOGGER.info(
+ "Inspecting "
+ + path
+ + System.lineSeparator()
+ + " package: "
+ + packageName
+ + System.lineSeparator()
+ + " class: "
+ + className
+ );
if (dependencyPackages.contains(packageName)) {
splitPackages.computeIfAbsent(packageName, k -> new TreeSet<>()).add(packageName + "." + className);
}
@@ -212,7 +224,10 @@ private Map> findSplitPackages(Set dependencyPackage
if (LOGGER.isInfoEnabled()) {
List msg = new ArrayList<>();
msg.add("Split packages:");
- splitPackages.entrySet().stream().sorted(Map.Entry.comparingByKey()).forEach(e -> msg.add(" -" + e.getKey() + " -> " + e.getValue()));
+ splitPackages.entrySet()
+ .stream()
+ .sorted(Map.Entry.comparingByKey())
+ .forEach(e -> msg.add(" -" + e.getKey() + " -> " + e.getValue()));
LOGGER.info(String.join(System.lineSeparator(), msg));
}
return splitPackages;
@@ -319,10 +334,15 @@ private String formatDependency(File dependencyFile) {
interface Parameters extends WorkParameters {
Property getProjectPath();
+
MapProperty getProjectBuildDirs();
+
ConfigurableFileCollection getClasspath();
+
SetProperty getSrcDirs();
+
SetProperty getIgnoreClasses();
+
RegularFileProperty getMarkerFile();
}
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/TestingConventionsTasks.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/TestingConventionsTasks.java
index 0177564e6ebb8..ea6160f422dec 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/TestingConventionsTasks.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/TestingConventionsTasks.java
@@ -8,8 +8,9 @@
package org.elasticsearch.gradle.internal.precommit;
import groovy.lang.Closure;
-import org.elasticsearch.gradle.util.GradleUtils;
+
import org.elasticsearch.gradle.internal.conventions.util.Util;
+import org.elasticsearch.gradle.util.GradleUtils;
import org.gradle.api.DefaultTask;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Task;
@@ -24,7 +25,6 @@
import org.gradle.api.tasks.TaskAction;
import org.gradle.api.tasks.testing.Test;
-import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.lang.annotation.Annotation;
@@ -48,6 +48,8 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import javax.inject.Inject;
+
public class TestingConventionsTasks extends DefaultTask {
private static final String TEST_METHOD_PREFIX = "test";
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java
index e8ce2ab6e45b0..380ab863c8584 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java
@@ -8,11 +8,11 @@
package org.elasticsearch.gradle.internal.precommit;
-import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask;
import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin;
-import org.elasticsearch.gradle.internal.info.BuildParams;
+import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask;
import org.elasticsearch.gradle.internal.InternalPlugin;
import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin;
+import org.elasticsearch.gradle.internal.info.BuildParams;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java
index 69cff34969a24..67f046806e5d6 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java
@@ -8,6 +8,7 @@
package org.elasticsearch.gradle.internal.precommit;
import de.thetaphi.forbiddenapis.cli.CliMain;
+
import org.apache.commons.io.output.NullOutputStream;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonAgainstSchemaTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonAgainstSchemaTask.java
index db6d2c1135b90..20e0a2e0e1a89 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonAgainstSchemaTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonAgainstSchemaTask.java
@@ -15,6 +15,7 @@
import com.networknt.schema.SchemaValidatorsConfig;
import com.networknt.schema.SpecVersion;
import com.networknt.schema.ValidationMessage;
+
import org.gradle.api.DefaultTask;
import org.gradle.api.UncheckedIOException;
import org.gradle.api.file.FileCollection;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonNoKeywordsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonNoKeywordsTask.java
index f0b90fc3bb03e..72f7420a720bc 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonNoKeywordsTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ValidateJsonNoKeywordsTask.java
@@ -12,6 +12,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.gradle.api.DefaultTask;
import org.gradle.api.GradleException;
import org.gradle.api.file.FileCollection;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java
index 0769996c50a88..70fafc303bcd3 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/GenerateReleaseNotesTask.java
@@ -36,6 +36,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
+
import javax.inject.Inject;
import static java.util.Comparator.naturalOrder;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTask.java
index 9b76468060aaf..45d85ec61cec0 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTask.java
@@ -19,7 +19,6 @@
import org.gradle.api.logging.Logging;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.tasks.Internal;
-import org.gradle.api.tasks.SkipWhenEmpty;
import org.gradle.api.tasks.TaskAction;
import org.gradle.api.tasks.options.Option;
import org.gradle.process.ExecOperations;
@@ -30,6 +29,7 @@
import java.util.TreeSet;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+
import javax.inject.Inject;
/**
@@ -146,10 +146,9 @@ static void findAndDeleteFiles(
* @return filenames for changelog files in previous releases, without any path
*/
private static Set findAllFilesInEarlierVersions(GitWrapper gitWrapper, QualifiedVersion version) {
- return findPreviousVersion(gitWrapper, version)
- .flatMap(earlierVersion -> gitWrapper.listFiles("v" + earlierVersion, "docs/changelog"))
- .map(line -> Path.of(line).getFileName().toString())
- .collect(Collectors.toSet());
+ return findPreviousVersion(gitWrapper, version).flatMap(
+ earlierVersion -> gitWrapper.listFiles("v" + earlierVersion, "docs/changelog")
+ ).map(line -> Path.of(line).getFileName().toString()).collect(Collectors.toSet());
}
/**
@@ -166,8 +165,7 @@ static Stream findPreviousVersion(GitWrapper gitWrapper, Quali
final int majorSeries = version.getMinor() == 0 && version.getRevision() == 0 ? version.getMajor() - 1 : version.getMajor();
final String tagPattern = "v" + majorSeries + ".*";
- return gitWrapper.listVersions(tagPattern)
- .filter(v -> v.isBefore(version));
+ return gitWrapper.listVersions(tagPattern).filter(v -> v.isBefore(version));
}
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java
index 052f68172d43a..8f08da371ec4b 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java
@@ -22,6 +22,7 @@
import org.gradle.api.tasks.util.PatternSet;
import java.io.File;
+
import javax.inject.Inject;
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java
index 149e8411dffaa..14114314ad4de 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ValidateChangelogEntryTask.java
@@ -20,6 +20,7 @@
import java.net.URI;
import java.util.Map;
import java.util.stream.Collectors;
+
import javax.inject.Inject;
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java
index cb5e7197f0e55..8d03ff609d685 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java
@@ -68,6 +68,7 @@
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
+
import javax.inject.Inject;
/**
@@ -126,7 +127,7 @@ private static boolean doesNotHaveCatOperation(ObjectNode doNodeValue) {
}
public void skipTest(String fullTestName, String reason) {
- //The tests are defined by 3 parts a/b/c where
+ // The tests are defined by 3 parts a/b/c where
// a = the folder name
// b = the file name without the .yml extension
// c = the test name inside the .yml
@@ -135,15 +136,19 @@ public void skipTest(String fullTestName, String reason) {
// So we also need to support a1/a2/a3/b/c1/c2/c3
String[] testParts = fullTestName.split("/");
- if(testParts.length < 3 ){
- throw new IllegalArgumentException("To skip tests, all 3 parts [folder/file/test name] must be defined. found [" + fullTestName + "]");
+ if (testParts.length < 3) {
+ throw new IllegalArgumentException(
+ "To skip tests, all 3 parts [folder/file/test name] must be defined. found [" + fullTestName + "]"
+ );
}
PatternSet skippedPatternSet = patternSetFactory.create();
- //create file patterns for all a1/a2/a3/b.yml possibilities.
- for(int i = testParts.length - 1; i > 1; i-- ){
+ // create file patterns for all a1/a2/a3/b.yml possibilities.
+ for (int i = testParts.length - 1; i > 1; i--) {
final String lastPart = testParts[i];
- String filePattern = "**/" + Arrays.stream(testParts).takeWhile(x -> x.equals(lastPart) == false).collect(Collectors.joining("/")) + ".yml";
+ String filePattern = "**/"
+ + Arrays.stream(testParts).takeWhile(x -> x.equals(lastPart) == false).collect(Collectors.joining("/"))
+ + ".yml";
skippedPatternSet.include(filePattern);
}
@@ -305,7 +310,6 @@ public void replaceValueTextByKeyValue(String key, String oldValue, Object newVa
transformations.add(new ReplaceTextual(key, oldValue, MAPPER.convertValue(newValue, TextNode.class), testName));
}
-
/**
* Removes the key/value of a match assertion all project REST tests for the matching subkey.
* For example "match":{"_type": "foo"} to "match":{}
@@ -425,7 +429,7 @@ public void transform() throws IOException {
Map skippedFilesWithReason = new HashMap<>();
skippedTestByFilePatternTransformations.forEach((filePattern, reason) -> {
- //resolve file pattern to concrete files
+ // resolve file pattern to concrete files
for (File file : getTestFiles().matching(filePattern).getFiles()) {
skippedFilesWithReason.put(file, reason);
}
@@ -433,7 +437,7 @@ public void transform() throws IOException {
Map>> skippedFilesWithTestAndReason = new HashMap<>();
skippedTestByTestNameTransformations.forEach((filePattern, testWithReason) -> {
- //resolve file pattern to concrete files
+ // resolve file pattern to concrete files
for (File file : getTestFiles().matching(filePattern).getFiles()) {
skippedFilesWithTestAndReason.put(file, testWithReason);
}
@@ -446,12 +450,14 @@ public void transform() throws IOException {
List tests = READER.readValues(yamlParser).readAll();
List transformRestTests;
if (skippedFilesWithReason.containsKey(file)) {
- //skip all the tests in the file
- transformRestTests = transformer.transformRestTests(new LinkedList<>(tests),
- Collections.singletonList(new Skip(skippedFilesWithReason.get(file))));
+ // skip all the tests in the file
+ transformRestTests = transformer.transformRestTests(
+ new LinkedList<>(tests),
+ Collections.singletonList(new Skip(skippedFilesWithReason.get(file)))
+ );
} else {
if (skippedFilesWithTestAndReason.containsKey(file)) {
- //skip the named tests for this file
+ // skip the named tests for this file
skippedFilesWithTestAndReason.get(file).forEach(fullTestNameAndReasonPair -> {
String prefix = file.getName().replace(".yml", "/");
String singleTestName = fullTestNameAndReasonPair.getLeft().replaceAll(".*" + prefix, "");
@@ -488,7 +494,8 @@ public List> getTransformations() {
@Input
public String getSkippedTestByFilePatternTransformations() {
- return skippedTestByFilePatternTransformations.keySet().stream()
+ return skippedTestByFilePatternTransformations.keySet()
+ .stream()
.map(key -> String.join(",", key.getIncludes()) + skippedTestByFilePatternTransformations.get(key))
.collect(Collectors.joining());
}
@@ -496,7 +503,8 @@ public String getSkippedTestByFilePatternTransformations() {
@Input
public String getSkippedTestByTestNameTransformations() {
- return skippedTestByTestNameTransformations.keySet().stream()
+ return skippedTestByTestNameTransformations.keySet()
+ .stream()
.map(key -> String.join(",", key.getIncludes()) + skippedTestByTestNameTransformations.get(key))
.collect(Collectors.joining());
}
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java
index 87bd5f78e70bc..4f75f5cd297f6 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java
@@ -214,7 +214,6 @@ public void apply(Project project) {
testTask.onlyIf(t -> isEnabled(project));
});
-
setupTestDependenciesDefaults(project, yamlCompatTestSourceSet);
// setup IDE
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java
index 02490232e1fd8..fa82d40dbc8db 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java
@@ -118,7 +118,7 @@ public void apply(Project project) {
TaskProvider destructiveTask = configureTestTask(project, taskname, distribution, t -> {
t.onlyIf(t2 -> distribution.isDocker() == false || dockerSupport.get().getDockerAvailability().isAvailable);
addDistributionSysprop(t, DISTRIBUTION_SYSPROP, distribution::getFilepath);
- //addDistributionSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString());
+ // addDistributionSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString());
t.exclude("**/PackageUpgradeTests.class");
}, depsTask);
@@ -366,9 +366,12 @@ private List configureDistributions(Project project)
List currentDistros = new ArrayList<>();
for (Architecture architecture : Architecture.values()) {
- ALL_INTERNAL.stream().forEach(type -> currentDistros.add(
- createDistro(distributions, architecture, type, null, true, VersionProperties.getElasticsearch())
- ));
+ ALL_INTERNAL.stream()
+ .forEach(
+ type -> currentDistros.add(
+ createDistro(distributions, architecture, type, null, true, VersionProperties.getElasticsearch())
+ )
+ );
}
for (Architecture architecture : Architecture.values()) {
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java
index f14cc4fd658e0..7be77a69e8fc5 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java
@@ -70,7 +70,9 @@ public void apply(Project project) {
}
});
- project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure(check -> check.dependsOn(project.getTasks().withType(RestIntegTestTask.class)));
+ project.getTasks()
+ .named(JavaBasePlugin.CHECK_TASK_NAME)
+ .configure(check -> check.dependsOn(project.getTasks().withType(RestIntegTestTask.class)));
project.getTasks()
.withType(StandaloneRestIntegTestTask.class)
.configureEach(t -> t.finalizedBy(project.getTasks().withType(FixtureStop.class)));
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneRestTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneRestTestPlugin.java
index 5b508588e018c..b945ee0c6e925 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneRestTestPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneRestTestPlugin.java
@@ -20,7 +20,6 @@
import org.gradle.api.tasks.SourceSetContainer;
import org.gradle.api.tasks.testing.Test;
import org.gradle.plugins.ide.eclipse.model.EclipseModel;
-
import org.gradle.plugins.ide.idea.model.IdeaModel;
import java.util.Arrays;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneTestPlugin.java
index a7bfd81695d14..e9f10f0f0485a 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneTestPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/StandaloneTestPlugin.java
@@ -8,7 +8,6 @@
package org.elasticsearch.gradle.internal.test;
-import org.elasticsearch.gradle.internal.ElasticsearchJavaPlugin;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
import org.gradle.api.plugins.JavaBasePlugin;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java
index 80b7144e45e30..88a558c2e590d 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/TestWithSslPlugin.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test;
import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask;
+import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.elasticsearch.gradle.internal.info.BuildParams;
import org.elasticsearch.gradle.internal.precommit.FilePermissionsPrecommitPlugin;
import org.elasticsearch.gradle.internal.precommit.ForbiddenPatternsPrecommitPlugin;
@@ -16,7 +17,6 @@
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
import org.elasticsearch.gradle.testclusters.TestClustersAware;
import org.elasticsearch.gradle.testclusters.TestClustersPlugin;
-import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java
index dad4aeea9cf8c..e6a4a0a7a1397 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java
@@ -7,10 +7,7 @@
*/
package org.elasticsearch.gradle.internal.test.rest;
-import org.elasticsearch.gradle.VersionProperties;
-import org.elasticsearch.gradle.internal.info.BuildParams;
import org.gradle.api.DefaultTask;
-import org.gradle.api.file.ArchiveOperations;
import org.gradle.api.file.DirectoryProperty;
import org.gradle.api.file.FileCollection;
import org.gradle.api.file.FileSystemOperations;
@@ -28,13 +25,14 @@
import org.gradle.api.tasks.util.PatternSet;
import org.gradle.internal.Factory;
-import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.function.Function;
import java.util.stream.Collectors;
+import javax.inject.Inject;
+
import static org.elasticsearch.gradle.util.GradleUtils.getProjectPathFromTask;
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java
index 17a494bb336d9..4513c64d91183 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java
@@ -30,6 +30,7 @@
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
+
import javax.inject.Inject;
import static org.elasticsearch.gradle.util.GradleUtils.getProjectPathFromTask;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesExtension.java
index c294883f578ff..8b08c23ac287d 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesExtension.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesExtension.java
@@ -7,7 +7,6 @@
*/
package org.elasticsearch.gradle.internal.test.rest;
-import org.elasticsearch.gradle.internal.info.BuildParams;
import org.gradle.api.Action;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.provider.ListProperty;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesPlugin.java
index 90ed8cd0baef2..2c2ede493b5b5 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestResourcesPlugin.java
@@ -89,12 +89,11 @@ public void apply(Project project) {
Configuration testConfig = project.getConfigurations().create("restTestConfig");
Configuration xpackTestConfig = project.getConfigurations().create("restXpackTestConfig");
// core
- Dependency restTestdependency = project.getDependencies()
- .project(Map.of("path", ":rest-api-spec", "configuration", "restTests"));
+ Dependency restTestdependency = project.getDependencies().project(Map.of("path", ":rest-api-spec", "configuration", "restTests"));
project.getDependencies().add(testConfig.getName(), restTestdependency);
// x-pack
Dependency restXPackTestdependency = project.getDependencies()
- .project(Map.of("path", ":x-pack:plugin", "configuration", "restXpackTests"));
+ .project(Map.of("path", ":x-pack:plugin", "configuration", "restXpackTests"));
project.getDependencies().add(xpackTestConfig.getName(), restXPackTestdependency);
project.getConfigurations().create("restTests");
@@ -115,8 +114,7 @@ public void apply(Project project) {
// api
Configuration specConfig = project.getConfigurations().create("restSpec"); // name chosen for passivity
- Dependency restSpecDependency = project.getDependencies()
- .project(Map.of("path", ":rest-api-spec", "configuration", "restSpecs"));
+ Dependency restSpecDependency = project.getDependencies().project(Map.of("path", ":rest-api-spec", "configuration", "restSpecs"));
project.getDependencies().add(specConfig.getName(), restSpecDependency);
project.getConfigurations().create("restSpecs");
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestUtil.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestUtil.java
index 42965436daaeb..7f859ad7a28cd 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestUtil.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestUtil.java
@@ -24,8 +24,7 @@
*/
public class RestTestUtil {
- private RestTestUtil() {
- }
+ private RestTestUtil() {}
/**
* Creates a {@link RestIntegTestTask} task with the source set of the same name
@@ -42,9 +41,7 @@ public static Provider registerTestTask(Project project, Sour
return project.getTasks().register(taskName, RestIntegTestTask.class, testTask -> {
testTask.setGroup(JavaBasePlugin.VERIFICATION_GROUP);
testTask.setDescription("Runs the REST tests against an external cluster");
- project.getPlugins().withType(JavaPlugin.class, t ->
- testTask.mustRunAfter(project.getTasks().named("test"))
- );
+ project.getPlugins().withType(JavaPlugin.class, t -> testTask.mustRunAfter(project.getTasks().named("test")));
testTask.setTestClassesDirs(sourceSet.getOutput().getClassesDirs());
testTask.setClasspath(sourceSet.getRuntimeClasspath());
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java
index 67219dc96973d..d4c367fe0b882 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/ReplaceByKey.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform;
import com.fasterxml.jackson.databind.JsonNode;
+
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.Optional;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/RestTestTransform.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/RestTestTransform.java
index bd650e7afae47..5d99355ee9163 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/RestTestTransform.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/RestTestTransform.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform;
import com.fasterxml.jackson.databind.JsonNode;
+
import org.gradle.api.Named;
import org.gradle.api.tasks.Input;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDo.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDo.java
index 8c8139a7ad0c5..b2dd272a67bfe 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDo.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDo.java
@@ -10,6 +10,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.ReplaceByKey;
import org.gradle.api.tasks.Internal;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/FeatureInjector.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/FeatureInjector.java
index cebe04ab5817a..ecd87c607947d 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/FeatureInjector.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/FeatureInjector.java
@@ -13,13 +13,15 @@
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformGlobalSetup;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformGlobalTeardown;
import org.gradle.api.tasks.Internal;
-import javax.annotation.Nullable;
import java.util.Iterator;
+import javax.annotation.Nullable;
+
/**
* A parent class for transformations that are backed by a feature. This will inject the necessary "feature" into the
* global setup and teardown section. See also org.elasticsearch.test.rest.yaml.Features for a list of possible features.
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/headers/InjectHeaders.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/headers/InjectHeaders.java
index c61a0a860d1ec..f0a32a37cdffb 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/headers/InjectHeaders.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/headers/InjectHeaders.java
@@ -11,6 +11,7 @@
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentObject;
import org.elasticsearch.gradle.internal.test.rest.transform.feature.FeatureInjector;
@@ -19,7 +20,6 @@
import java.util.Map;
import java.util.Set;
-import java.util.function.BiConsumer;
import java.util.function.Function;
/**
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLength.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLength.java
index 9b871cbad03a3..1e912d800dd78 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLength.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLength.java
@@ -10,6 +10,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.ReplaceByKey;
import org.gradle.api.tasks.Internal;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java
index cb2c20a96fb2e..cfaf795a5780e 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatch.java
@@ -12,6 +12,7 @@
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestContext;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentArray;
import org.gradle.api.tasks.Input;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatch.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatch.java
index 79292860f4153..853ae5c88979a 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatch.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatch.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.match;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestContext;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentObject;
import org.gradle.api.tasks.Input;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatch.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatch.java
index a5eee30d54181..eb47ff469fe4f 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatch.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatch.java
@@ -10,6 +10,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.ReplaceByKey;
import org.gradle.api.tasks.Internal;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java
index d44d40e1c01fc..abadf9efdf85d 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatch.java
@@ -10,6 +10,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.ReplaceByKey;
import org.gradle.api.tasks.Internal;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/Skip.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/Skip.java
index 8b8b021f7ecc4..c8a7e0eddab83 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/Skip.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/Skip.java
@@ -17,15 +17,10 @@
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentObject;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformGlobalSetup;
-import org.elasticsearch.gradle.internal.test.rest.transform.feature.FeatureInjector;
import org.gradle.api.tasks.Input;
-import org.gradle.api.tasks.Internal;
import org.jetbrains.annotations.Nullable;
import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.Function;
/**
* A {@link RestTestTransform} that injects a skip into a REST test.
@@ -91,7 +86,6 @@ private void addSkip(ArrayNode skipParent) {
}
}
-
@Override
public void transformTest(ObjectNode parent) {
if (testName.isBlank() == false) {
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java
index 12c403c7ace21..c99c01c6d9223 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextual.java
@@ -11,6 +11,7 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestContext;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentObject;
import org.gradle.api.tasks.Input;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarnings.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarnings.java
index a3ee65cd8a628..b95881643fbb9 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarnings.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarnings.java
@@ -11,6 +11,7 @@
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestContext;
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransformByParentObject;
import org.elasticsearch.gradle.internal.test.rest.transform.feature.FeatureInjector;
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarnings.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarnings.java
index bd816f6a680f8..a00901a2b1418 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarnings.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarnings.java
@@ -39,6 +39,7 @@ public class RemoveWarnings implements RestTestTransformByParentObject {
public RemoveWarnings(Set warnings) {
this.warnings = warnings;
}
+
/**
* @param warnings The allowed warnings to inject
* @param testName The testName to inject
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java
index d19b803d09e5a..0112e34315952 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java
@@ -13,11 +13,12 @@
import com.avast.gradle.dockercompose.tasks.ComposeDown;
import com.avast.gradle.dockercompose.tasks.ComposePull;
import com.avast.gradle.dockercompose.tasks.ComposeUp;
-import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider;
+
import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin;
import org.elasticsearch.gradle.internal.docker.DockerSupportService;
import org.elasticsearch.gradle.internal.info.BuildParams;
import org.elasticsearch.gradle.internal.precommit.TestingConventionsTasks;
+import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider;
import org.elasticsearch.gradle.util.GradleUtils;
import org.gradle.api.Action;
import org.gradle.api.DefaultTask;
@@ -34,7 +35,6 @@
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.testing.Test;
-import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
@@ -42,6 +42,8 @@
import java.util.Collections;
import java.util.function.BiConsumer;
+import javax.inject.Inject;
+
public class TestFixturesPlugin implements Plugin {
private static final Logger LOGGER = Logging.getLogger(TestFixturesPlugin.class);
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantMachine.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantMachine.java
index df2bcc3e2a96a..f829818855009 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantMachine.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantMachine.java
@@ -10,8 +10,8 @@
import org.apache.commons.io.output.TeeOutputStream;
import org.elasticsearch.gradle.LoggedExec;
-import org.elasticsearch.gradle.internal.LoggingOutputStream;
import org.elasticsearch.gradle.ReaperService;
+import org.elasticsearch.gradle.internal.LoggingOutputStream;
import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.gradle.api.Action;
import org.gradle.api.Project;
@@ -20,7 +20,6 @@
import org.gradle.internal.logging.progress.ProgressLoggerFactory;
import org.gradle.process.ExecOperations;
-import javax.inject.Inject;
import java.io.File;
import java.io.OutputStream;
import java.nio.file.Paths;
@@ -28,6 +27,8 @@
import java.util.Objects;
import java.util.function.UnaryOperator;
+import javax.inject.Inject;
+
/**
* An helper to manage a vagrant box.
*
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java
index ef3a8d9b701f4..64798ec5ccab5 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/AbstractDistributionDownloadPluginTests.java
@@ -9,7 +9,6 @@
package org.elasticsearch.gradle;
import org.elasticsearch.gradle.internal.BwcVersions;
-import org.elasticsearch.gradle.internal.info.BuildParams;
import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/ConcatFilesTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/ConcatFilesTaskTests.java
index 4fce1cebb45de..835b96ddaf02e 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/ConcatFilesTaskTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/ConcatFilesTaskTests.java
@@ -7,16 +7,16 @@
*/
package org.elasticsearch.gradle.internal;
+import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
+import org.gradle.api.Project;
+import org.gradle.testfixtures.ProjectBuilder;
+
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
-import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
-import org.gradle.api.Project;
-import org.gradle.testfixtures.ProjectBuilder;
-
public class ConcatFilesTaskTests extends GradleUnitTestCase {
public void testHeaderAdded() throws IOException {
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/EmptyDirTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/EmptyDirTaskTests.java
index 4afdf131a1740..b8f3f6d3dd53b 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/EmptyDirTaskTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/EmptyDirTaskTests.java
@@ -7,15 +7,16 @@
*/
package org.elasticsearch.gradle.internal;
-import java.io.File;
-import java.io.IOException;
-
import com.carrotsearch.randomizedtesting.RandomizedTest;
+
import org.apache.tools.ant.taskdefs.condition.Os;
import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
import org.gradle.api.Project;
import org.gradle.testfixtures.ProjectBuilder;
+import java.io.File;
+import java.io.IOException;
+
public class EmptyDirTaskTests extends GradleUnitTestCase {
public void testCreateEmptyDir() throws Exception {
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTaskTests.java
index c18562eb0ffb1..1ad3685a8df52 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTaskTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTaskTests.java
@@ -7,13 +7,8 @@
*/
package org.elasticsearch.gradle.internal.precommit;
-import java.io.File;
-import java.nio.charset.Charset;
-import java.nio.file.Files;
-import java.util.List;
-import java.util.stream.Collectors;
-
import com.carrotsearch.randomizedtesting.RandomizedTest;
+
import org.apache.tools.ant.taskdefs.condition.Os;
import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
import org.elasticsearch.gradle.util.GradleUtils;
@@ -23,6 +18,12 @@
import org.gradle.testfixtures.ProjectBuilder;
import org.junit.Assert;
+import java.io.File;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.util.List;
+import java.util.stream.Collectors;
+
public class FilePermissionsTaskTests extends GradleUnitTestCase {
public void testCheckPermissionsWhenAnExecutableFileExists() throws Exception {
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java
index 1fdc5aecc9903..601d3b8ed4870 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/BreakingChangesGeneratorTest.java
@@ -16,9 +16,7 @@
import java.util.List;
import java.util.Objects;
-import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertThat;
public class BreakingChangesGeneratorTest {
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTaskTests.java
index be3a4738c035a..6fe6b105f1cab 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTaskTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/PruneChangelogsTaskTests.java
@@ -12,7 +12,6 @@
import org.elasticsearch.gradle.internal.release.PruneChangelogsTask.DeleteHelper;
import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
import org.gradle.api.GradleException;
-import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
@@ -23,7 +22,6 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import static org.elasticsearch.gradle.OS.LINUX;
import static org.elasticsearch.gradle.OS.WINDOWS;
import static org.elasticsearch.gradle.internal.release.PruneChangelogsTask.findAndDeleteFiles;
import static org.elasticsearch.gradle.internal.release.PruneChangelogsTask.findPreviousVersion;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/AssertObjectNodes.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/AssertObjectNodes.java
index 139e99bc85806..ae3a61f4a57f9 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/AssertObjectNodes.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/AssertObjectNodes.java
@@ -13,6 +13,7 @@
import com.fasterxml.jackson.databind.SequenceWriter;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+
import org.junit.ComparisonFailure;
import java.io.ByteArrayOutputStream;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/TransformTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/TransformTests.java
index b0a5e02e1b93b..4de55b9ac1a7c 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/TransformTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/TransformTests.java
@@ -186,7 +186,12 @@ protected void validateBodyHasWarnings(String featureName, List test
validateBodyHasWarnings(featureName, null, tests, expectedWarnings);
}
- protected void validateBodyHasWarnings(String featureName, String testName, List tests, Collection expectedWarnings) {
+ protected void validateBodyHasWarnings(
+ String featureName,
+ String testName,
+ List tests,
+ Collection expectedWarnings
+ ) {
AtomicBoolean actuallyDidSomething = new AtomicBoolean(false);
tests.forEach(test -> {
Iterator> testsIterator = test.fields();
@@ -202,9 +207,9 @@ protected void validateBodyHasWarnings(String featureName, String testName, List
ObjectNode doSection = (ObjectNode) testSection.get("do");
assertThat(doSection.get(featureName), CoreMatchers.notNullValue());
ArrayNode warningsNode = (ArrayNode) doSection.get(featureName);
- List actual = new ArrayList<>();
+ List actual = new ArrayList<>();
warningsNode.forEach(node -> actual.add(node.asText()));
- String[] expected = expectedWarnings.toArray(new String[]{});
+ String[] expected = expectedWarnings.toArray(new String[] {});
assertThat(actual, Matchers.containsInAnyOrder(expected));
actuallyDidSomething.set(true);
}
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDoTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDoTests.java
index c11effe4c5a5f..82a9719ed83b4 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDoTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/do_/ReplaceKeyInDoTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.do_;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes;
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/InjectFeatureTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/InjectFeatureTests.java
index d643db88774f2..05811f81a2b6c 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/InjectFeatureTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/feature/InjectFeatureTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.feature;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java
index 07881a0f48678..b1210aa728a04 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/header/InjectHeaderTests.java
@@ -9,12 +9,12 @@
package org.elasticsearch.gradle.internal.test.rest.transform.header;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.feature.InjectFeatureTests;
import org.elasticsearch.gradle.internal.test.rest.transform.headers.InjectHeaders;
import org.junit.Test;
-import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@@ -60,7 +60,6 @@ public void testInjectHeadersWithPreExisting() throws Exception {
validateBodyHasHeaders(transformedTests, headers);
}
-
@Test
public void testNotInjectingHeaders() throws Exception {
String testName = "/rest/transform/header/with_operation_to_skip_adding_headers.yml";
@@ -68,8 +67,9 @@ public void testNotInjectingHeaders() throws Exception {
validateSetupExist(tests);
validateBodyHasHeaders(tests, Map.of("foo", "bar"));
- List> transformations =
- Collections.singletonList(new InjectHeaders(headers, Set.of(InjectHeaderTests::applyCondition)));
+ List> transformations = Collections.singletonList(
+ new InjectHeaders(headers, Set.of(InjectHeaderTests::applyCondition))
+ );
List transformedTests = transformTests(tests, transformations);
printTest(testName, transformedTests);
validateSetupAndTearDown(transformedTests);
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLengthTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLengthTests.java
index 98bdb9280f534..3c2606157be6c 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLengthTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/length/ReplaceKeyInLengthTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.length;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes;
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java
index 48f7317362e51..ce70b03f28dbe 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/AddMatchTests.java
@@ -13,6 +13,7 @@
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatchTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatchTests.java
index 2e463a5cdfe54..15497053a9b1e 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatchTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/RemoveMatchTests.java
@@ -14,6 +14,7 @@
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatchTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatchTests.java
index 650b70450d0ca..9151fc13b8843 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatchTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceKeyInMatchTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.match;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes;
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java
index 667430307e072..6c8ed274c4fbd 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java
@@ -12,6 +12,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+
import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes;
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/SkipTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/SkipTests.java
index db01db056dffe..32225164a0d4b 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/SkipTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/skip/SkipTests.java
@@ -11,9 +11,7 @@
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes;
-import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
-import org.elasticsearch.gradle.internal.test.rest.transform.match.ReplaceKeyInMatch;
import org.junit.Test;
import java.util.Collections;
@@ -21,7 +19,6 @@
public class SkipTests extends TransformTests {
-
@Test
public void testAddGlobalSetup() throws Exception {
String test_original = "/rest/transform/skip/without_setup_original.yml";
@@ -30,10 +27,7 @@ public void testAddGlobalSetup() throws Exception {
String test_transformed = "/rest/transform/skip/without_setup_transformed.yml";
List expectedTransformation = getTests(test_transformed);
- List transformedTests = transformTests(
- tests,
- Collections.singletonList(new Skip("my reason"))
- );
+ List transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason")));
AssertObjectNodes.areEqual(transformedTests, expectedTransformation);
}
@@ -46,10 +40,7 @@ public void testModifyGlobalSetupWithSkip() throws Exception {
String test_transformed = "/rest/transform/skip/without_setup_transformed.yml";
List expectedTransformation = getTests(test_transformed);
- List transformedTests = transformTests(
- tests,
- Collections.singletonList(new Skip("my reason"))
- );
+ List transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason")));
AssertObjectNodes.areEqual(transformedTests, expectedTransformation);
}
@@ -62,10 +53,7 @@ public void testModifyGlobalSetupWithoutSkip() throws Exception {
String test_transformed = "/rest/transform/skip/with_setup_no_skip_transformed.yml";
List expectedTransformation = getTests(test_transformed);
- List transformedTests = transformTests(
- tests,
- Collections.singletonList(new Skip("my reason"))
- );
+ List transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason")));
AssertObjectNodes.areEqual(transformedTests, expectedTransformation);
}
@@ -78,10 +66,7 @@ public void testModifyGlobalSetupWithFeatures() throws Exception {
String test_transformed = "/rest/transform/skip/with_features_transformed.yml";
List expectedTransformation = getTests(test_transformed);
- List transformedTests = transformTests(
- tests,
- Collections.singletonList(new Skip("my reason"))
- );
+ List transformedTests = transformTests(tests, Collections.singletonList(new Skip("my reason")));
AssertObjectNodes.areEqual(transformedTests, expectedTransformation);
}
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java
index 933427e79922d..f4af8e8320c62 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/text/ReplaceTextualTests.java
@@ -12,6 +12,7 @@
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+
import org.elasticsearch.gradle.internal.test.rest.transform.AssertObjectNodes;
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsRegexTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsRegexTests.java
index 9d24b57f5689e..7c37a6137476f 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsRegexTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsRegexTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.warnings;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.feature.InjectFeatureTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsTests.java
index b958b07773bca..a6e7a42d8639f 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectAllowedWarningsTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.warnings;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.feature.InjectFeatureTests;
import org.junit.Test;
@@ -61,7 +62,12 @@ public void testInjectAllowedWarningsWithPreExistingForSingleTest() throws Excep
List transformedTests = transformTests(tests, getTransformationsForTest("Test with existing allowed warnings"));
printTest(testName, transformedTests);
validateSetupAndTearDown(transformedTests);
- validateBodyHasWarnings(ALLOWED_WARNINGS, "Test with existing allowed warnings", transformedTests, Set.of("a", "b", "added warning"));
+ validateBodyHasWarnings(
+ ALLOWED_WARNINGS,
+ "Test with existing allowed warnings",
+ transformedTests,
+ Set.of("a", "b", "added warning")
+ );
validateBodyHasWarnings(ALLOWED_WARNINGS, "Test with existing allowed warnings not to change", transformedTests, Set.of("a", "b"));
}
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsRegexTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsRegexTests.java
index 6107387d796fc..70629620ac10e 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsRegexTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsRegexTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.warnings;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.feature.InjectFeatureTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsTests.java
index a4dbc1c0f49a2..b29e26974fe98 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/InjectWarningsTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.warnings;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.feature.InjectFeatureTests;
import org.junit.Test;
diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarningsTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarningsTests.java
index cc6e1c256a15a..6ff9430d79c62 100644
--- a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarningsTests.java
+++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/warnings/RemoveWarningsTests.java
@@ -9,6 +9,7 @@
package org.elasticsearch.gradle.internal.test.rest.transform.warnings;
import com.fasterxml.jackson.databind.node.ObjectNode;
+
import org.elasticsearch.gradle.internal.test.rest.transform.RestTestTransform;
import org.elasticsearch.gradle.internal.test.rest.transform.TransformTests;
import org.junit.Test;
diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties
index 31bf7f6cfa41d..544cc47d4a9c0 100644
--- a/build-tools-internal/version.properties
+++ b/build-tools-internal/version.properties
@@ -1,5 +1,5 @@
elasticsearch = 8.0.0
-lucene = 9.0.0-snapshot-ba75dc5e6bf
+lucene = 9.0.0-snapshot-cfd9f9f98f7
bundled_jdk_vendor = adoptium
bundled_jdk = 17+35
diff --git a/build-tools/build.gradle b/build-tools/build.gradle
index 5ee1e777cb7d8..39042d0925d6a 100644
--- a/build-tools/build.gradle
+++ b/build-tools/build.gradle
@@ -5,7 +5,6 @@
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
-import org.elasticsearch.gradle.internal.conventions.VersionPropertiesLoader
plugins {
id 'java-gradle-plugin'
@@ -15,14 +14,15 @@ plugins {
id 'elasticsearch.build-tools'
id 'elasticsearch.eclipse'
id 'elasticsearch.versions'
+ id 'elasticsearch.formatting'
}
description = "The elasticsearch build tools"
group = "org.elasticsearch.gradle"
version = versions.getProperty("elasticsearch")
-targetCompatibility = versions.get("minimumJava")
-sourceCompatibility = versions.get("minimumJava")
+targetCompatibility = versions.get("minimumRuntimeJava")
+sourceCompatibility = versions.get("minimumRuntimeJava")
gradlePlugin {
// We already configure publication and we don't need or want the one that comes
diff --git a/build-tools/reaper/build.gradle b/build-tools/reaper/build.gradle
index 90a82aac3eb95..450eb53705bbf 100644
--- a/build-tools/reaper/build.gradle
+++ b/build-tools/reaper/build.gradle
@@ -2,12 +2,13 @@ plugins {
id 'java'
id 'elasticsearch.eclipse'
id 'elasticsearch.versions'
+ id 'elasticsearch.formatting'
}
group = "org.elasticsearch.gradle"
version = versions.getProperty("elasticsearch")
-targetCompatibility = versions.get("minimumJava")
-sourceCompatibility = versions.get("minimumJava")
+targetCompatibility = versions.get("minimumRuntimeJava")
+sourceCompatibility = versions.get("minimumRuntimeJava")
tasks.named("jar").configure {
archiveFileName = "${project.name}.jar"
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java
index b9afb80bb5311..9de5d161116f0 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java
@@ -23,9 +23,10 @@
import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
-import javax.inject.Inject;
import java.util.Comparator;
+import javax.inject.Inject;
+
/**
* A plugin to manage getting and extracting distributions of Elasticsearch.
*
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java
index a9d077e456285..3f1543908889c 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/LoggedExec.java
@@ -21,7 +21,6 @@
import org.gradle.process.ExecSpec;
import org.gradle.process.JavaExecSpec;
-import javax.inject.Inject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
@@ -34,6 +33,8 @@
import java.util.function.Function;
import java.util.regex.Pattern;
+import javax.inject.Inject;
+
/**
* A wrapper around gradle's Exec task to capture output and log on error.
*/
@@ -68,7 +69,7 @@ public void execute(Task task) {
"Process '%s %s' finished with non-zero exit value %d",
LoggedExec.this.getExecutable(),
LoggedExec.this.getArgs(),
- exitValue
+ exitValue
)
);
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java
index a76a79feba0cf..7fa1e451fff0b 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/ReaperPlugin.java
@@ -12,9 +12,10 @@
import org.gradle.api.Project;
import org.gradle.api.file.ProjectLayout;
-import javax.inject.Inject;
import java.io.File;
+import javax.inject.Inject;
+
/**
* A plugin to handle reaping external services spawned by a build if Gradle dies.
*/
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellPlugin.java
index 4528e0dbaaf93..55556fc9133b8 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellPlugin.java
@@ -22,10 +22,11 @@ public class JarHellPlugin implements Plugin {
@Override
public void apply(Project project) {
TaskProvider extends Task> jarHellTask = createTask(project);
- project.getPluginManager().withPlugin(
+ project.getPluginManager()
+ .withPlugin(
"lifecycle-base",
- p -> project.getTasks().named(LifecycleBasePlugin.CHECK_TASK_NAME).configure(t -> t.dependsOn(jarHellTask))
- );
+ p -> project.getTasks().named(LifecycleBasePlugin.CHECK_TASK_NAME).configure(t -> t.dependsOn(jarHellTask))
+ );
}
private TaskProvider extends Task> createTask(Project project) {
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellTask.java
index d487b242669d0..d2668e149ebfa 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/jarhell/JarHellTask.java
@@ -19,12 +19,13 @@
import org.gradle.api.tasks.TaskAction;
import org.gradle.process.ExecOperations;
-import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
+import javax.inject.Inject;
+
/**
* Runs CheckJarHell on a classpath.
*/
@@ -37,7 +38,6 @@ public class JarHellTask extends DefaultTask {
private ExecOperations execOperations;
private ProjectLayout projectLayout;
-
@Inject
public JarHellTask(ExecOperations execOperations, ProjectLayout projectLayout) {
this.execOperations = execOperations;
@@ -51,7 +51,7 @@ public File getSuccessMarker() {
}
@TaskAction
- public void runJarHellCheck() throws IOException{
+ public void runJarHellCheck() throws IOException {
LoggedExec.javaexec(execOperations, spec -> {
spec.environment("CLASSPATH", getJarHellRuntimeClasspath().plus(getClasspath()).getAsPath());
spec.getMainClass().set("org.elasticsearch.jdk.JarHell");
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java
index f872f7c58d9be..12c40c1da1130 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java
@@ -73,13 +73,6 @@ public void apply(final Project project) {
final var bundleTask = createBundleTasks(project, extension);
project.afterEvaluate(project1 -> {
- project1.getExtensions().getByType(PluginPropertiesExtension.class).getExtendedPlugins().forEach(pluginName -> {
- // Auto add dependent modules to the test cluster
- if (project1.findProject(":modules:" + pluginName) != null) {
- NamedDomainObjectContainer testClusters = testClusters(project, "testClusters");
- testClusters.configureEach(elasticsearchCluster -> elasticsearchCluster.module(":modules:" + pluginName));
- }
- });
final var extension1 = project1.getExtensions().getByType(PluginPropertiesExtension.class);
configurePublishing(project1, extension1);
var name = extension1.getName();
@@ -120,7 +113,7 @@ public void apply(final Project project) {
// allow running ES with this plugin in the foreground of a build
var testClusters = testClusters(project, TestClustersPlugin.EXTENSION_NAME);
- var runCluster = testClusters.register("runtTask", c -> {
+ var runCluster = testClusters.register("runTask", c -> {
if (GradleUtils.isModuleProject(project.getPath())) {
c.module(bundleTask.flatMap((Transformer, Zip>) zip -> zip.getArchiveFile()));
} else {
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.java
index 71cd1fd7c5e8e..be8dae2f8fd3b 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.java
@@ -9,7 +9,6 @@
package org.elasticsearch.gradle.plugin;
import org.gradle.api.Project;
-import org.gradle.api.file.RegularFile;
import org.gradle.api.file.RegularFileProperty;
import org.gradle.api.plugins.ExtraPropertiesExtension;
@@ -142,9 +141,9 @@ public File getLicenseFile() {
public void setLicenseFile(File licenseFile) {
ExtraPropertiesExtension extraProperties = this.project.getExtensions().getExtraProperties();
- RegularFileProperty regularFileProperty = extraProperties.has("licenseFile") ?
- (RegularFileProperty)extraProperties.get("licenseFile") :
- project.getObjects().fileProperty();
+ RegularFileProperty regularFileProperty = extraProperties.has("licenseFile")
+ ? (RegularFileProperty) extraProperties.get("licenseFile")
+ : project.getObjects().fileProperty();
regularFileProperty.set(licenseFile);
this.licenseFile = licenseFile;
}
@@ -155,9 +154,9 @@ public File getNoticeFile() {
public void setNoticeFile(File noticeFile) {
ExtraPropertiesExtension extraProperties = this.project.getExtensions().getExtraProperties();
- RegularFileProperty regularFileProperty = extraProperties.has("noticeFile") ?
- (RegularFileProperty)extraProperties.get("noticeFile") :
- project.getObjects().fileProperty();
+ RegularFileProperty regularFileProperty = extraProperties.has("noticeFile")
+ ? (RegularFileProperty) extraProperties.get("noticeFile")
+ : project.getObjects().fileProperty();
regularFileProperty.set(noticeFile);
this.noticeFile = noticeFile;
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/JavaRestTestPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/JavaRestTestPlugin.java
index 505c3b6ce04f4..92cb8f25bf7a7 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/test/JavaRestTestPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/JavaRestTestPlugin.java
@@ -68,7 +68,7 @@ public void apply(Project project) {
// Register plugin bundle with test cluster
project.getPlugins().withType(PluginBuildPlugin.class, p -> {
TaskProvider bundle = project.getTasks().withType(Zip.class).named(BUNDLE_PLUGIN_TASK_NAME);
- clusterProvider.configure( c-> c.plugin(bundle.flatMap(Zip::getArchiveFile)));
+ clusterProvider.configure(c -> c.plugin(bundle.flatMap(Zip::getArchiveFile)));
javaRestTestTask.configure(t -> t.dependsOn(bundle));
});
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java
index 15878ebb57d8b..59144576333f2 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java
@@ -14,7 +14,6 @@
import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask;
import org.elasticsearch.gradle.testclusters.TestClustersPlugin;
import org.elasticsearch.gradle.transform.UnzipTransform;
-import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.NamedDomainObjectProvider;
import org.gradle.api.Plugin;
@@ -26,9 +25,7 @@
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
import org.gradle.api.attributes.Attribute;
import org.gradle.api.internal.artifacts.ArtifactAttributes;
-import org.gradle.api.plugins.BasePlugin;
import org.gradle.api.plugins.JavaBasePlugin;
-import org.gradle.api.plugins.JavaPlugin;
import org.gradle.api.tasks.Copy;
import org.gradle.api.tasks.SourceSet;
import org.gradle.api.tasks.SourceSetContainer;
@@ -110,9 +107,9 @@ private static void setupDefaultDependencies(
}
private TaskProvider setupTestTask(
- Project project,
- SourceSet testSourceSet,
- NamedDomainObjectProvider clusterProvider
+ Project project,
+ SourceSet testSourceSet,
+ NamedDomainObjectProvider clusterProvider
) {
return project.getTasks().register(YAML_REST_TEST, StandaloneRestIntegTestTask.class, task -> {
task.useCluster(clusterProvider.get());
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java
index d3f7b21cff29b..79d24184201c6 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java
@@ -322,7 +322,7 @@ private void commonNodeConfig() {
if (node.getTestDistribution().equals(TestDistribution.INTEG_TEST)) {
node.defaultConfig.put("xpack.security.enabled", "false");
} else {
- if (node.getVersion().onOrAfter("8.0.0")) {
+ if (node.getVersion().onOrAfter("7.16.0")) {
node.defaultConfig.put("cluster.deprecation_indexing.enabled", "false");
}
}
@@ -383,6 +383,11 @@ public void nextNodeToNextVersion() {
node.goToNextVersion();
commonNodeConfig();
nodeIndex += 1;
+ if (node.getTestDistribution().equals(TestDistribution.DEFAULT)) {
+ if (node.getVersion().onOrAfter("7.16.0")) {
+ node.setting("cluster.deprecation_indexing.enabled", "false");
+ }
+ }
node.start();
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java
index 05ada3c5c3124..2292867051f03 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java
@@ -14,13 +14,13 @@
import org.elasticsearch.gradle.FileSupplier;
import org.elasticsearch.gradle.LazyPropertyList;
import org.elasticsearch.gradle.LazyPropertyMap;
-import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
import org.elasticsearch.gradle.LoggedExec;
import org.elasticsearch.gradle.OS;
import org.elasticsearch.gradle.PropertyNormalization;
import org.elasticsearch.gradle.ReaperService;
import org.elasticsearch.gradle.Version;
import org.elasticsearch.gradle.VersionProperties;
+import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
import org.elasticsearch.gradle.transform.UnzipTransform;
import org.elasticsearch.gradle.util.Pair;
import org.gradle.api.Action;
@@ -650,8 +650,8 @@ private void copyExtraConfigFiles() {
*/
private void copyExtraJars() {
List extraJarFiles = this.extraJarConfigurations.stream()
- .flatMap(fileCollection -> fileCollection.getFiles().stream())
- .collect(Collectors.toList());
+ .flatMap(fileCollection -> fileCollection.getFiles().stream())
+ .collect(Collectors.toList());
if (extraJarFiles.isEmpty() == false) {
logToProcessStdout("Setting up " + this.extraJarConfigurations.size() + " additional jar dependencies");
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
index 402a6a67b5725..19ab0764d24f2 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java
@@ -88,7 +88,6 @@ public String getDataDir() {
@Override
public void beforeStart() {
- int debugPort = 5005;
int httpPort = 9200;
int transportPort = 9300;
Map additionalSettings = System.getProperties()
@@ -120,16 +119,15 @@ public void beforeStart() {
if (dataDir != null) {
node.setDataPath(getDataPath.apply(node));
}
- if (debug) {
- logger.lifecycle("Running elasticsearch in debug mode, {} expecting running debug server on port {}", node, debugPort);
- node.jvmArgs("-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=" + debugPort);
- debugPort += 1;
- }
if (keystorePassword.length() > 0) {
node.keystorePassword(keystorePassword);
}
}
}
+
+ if (debug) {
+ enableDebug();
+ }
}
@TaskAction
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
index 677b322bab9c3..9bb5d8e0f3f1c 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/StandaloneRestIntegTestTask.java
@@ -15,6 +15,7 @@
import org.gradle.api.tasks.Internal;
import org.gradle.api.tasks.Nested;
import org.gradle.api.tasks.WorkResult;
+import org.gradle.api.tasks.options.Option;
import org.gradle.api.tasks.testing.Test;
import org.gradle.internal.resources.ResourceLock;
import org.gradle.internal.resources.SharedResource;
@@ -36,6 +37,7 @@
public class StandaloneRestIntegTestTask extends Test implements TestClustersAware, FileSystemOperationsAware {
private Collection clusters = new HashSet<>();
+ private boolean debugServer = false;
public StandaloneRestIntegTestTask() {
this.getOutputs()
@@ -62,6 +64,11 @@ public StandaloneRestIntegTestTask() {
);
}
+ @Option(option = "debug-server-jvm", description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch.")
+ public void setDebugServer(boolean enabled) {
+ this.debugServer = enabled;
+ }
+
@Override
public int getMaxParallelForks() {
return 1;
@@ -91,4 +98,11 @@ public List getSharedResources() {
public WorkResult delete(Object... objects) {
return getFileSystemOperations().delete(d -> d.delete(objects));
}
+
+ @Override
+ public void beforeStart() {
+ if (debugServer) {
+ enableDebug();
+ }
+ }
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
index 9d4447e9254ae..18f88b0dc4afc 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java
@@ -7,7 +7,6 @@
*/
package org.elasticsearch.gradle.testclusters;
-import org.gradle.api.Action;
import org.gradle.api.Task;
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.provider.Provider;
@@ -26,8 +25,8 @@ default void useCluster(ElasticsearchCluster cluster) {
throw new TestClustersException("Task " + getPath() + " can't use test cluster from" + " another project " + cluster);
}
- cluster.getNodes().all(node -> node.getDistributions().stream()
- .forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze()))));
+ cluster.getNodes()
+ .all(node -> node.getDistributions().stream().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze()))));
cluster.getNodes().all(node -> dependsOn((Callable>) node::getPluginAndModuleConfigurations));
getClusters().add(cluster);
}
@@ -37,4 +36,15 @@ default void useCluster(Provider cluster) {
}
default void beforeStart() {}
+
+ default void enableDebug() {
+ int debugPort = 5007;
+ for (ElasticsearchCluster cluster : getClusters()) {
+ for (ElasticsearchNode node : cluster.getNodes()) {
+ getLogger().lifecycle("Running elasticsearch in debug mode, {} expecting running debug server on port {}", node, debugPort);
+ node.jvmArgs("-agentlib:jdwp=transport=dt_socket,server=n,suspend=y,address=" + debugPort);
+ debugPort += 1;
+ }
+ }
+ }
}
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java
index 2149abe09fece..1a50dc41a1bb0 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java
@@ -11,7 +11,6 @@
import org.elasticsearch.gradle.ReaperPlugin;
import org.elasticsearch.gradle.ReaperService;
import org.elasticsearch.gradle.util.GradleUtils;
-import org.gradle.api.Action;
import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
@@ -29,9 +28,10 @@
import org.gradle.internal.jvm.Jvm;
import org.gradle.process.ExecOperations;
-import javax.inject.Inject;
import java.io.File;
+import javax.inject.Inject;
+
import static org.elasticsearch.gradle.util.GradleUtils.noop;
public class TestClustersPlugin implements Plugin {
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/WaitForHttpResource.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/WaitForHttpResource.java
index bef5578f7bca3..03368e74cdb75 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/WaitForHttpResource.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/WaitForHttpResource.java
@@ -11,10 +11,6 @@
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.KeyManager;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.TrustManagerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
@@ -37,6 +33,11 @@
import java.util.Set;
import java.util.concurrent.TimeUnit;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.KeyManager;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.TrustManagerFactory;
+
/**
* A utility to wait for a specific HTTP resource to be available, optionally with customized TLS trusted CAs.
* This is logically similar to using the Ant Get task to retrieve a resource, but with the difference that it can
diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java b/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java
index ec94818dead4c..ba8a3c65672a6 100644
--- a/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java
+++ b/build-tools/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java
@@ -50,7 +50,7 @@ public static SourceSetContainer getJavaSourceSets(Project project) {
}
public static void maybeConfigure(TaskContainer tasks, String name, Action super Task> config) {
- tasks.matching(t -> t.getName().equals(name)).configureEach( t-> config.execute(t));
+ tasks.matching(t -> t.getName().equals(name)).configureEach(t -> config.execute(t));
}
public static void maybeConfigure(
@@ -203,8 +203,7 @@ public static String getProjectPathFromTask(String taskPath) {
}
public static boolean isModuleProject(String projectPath) {
- return projectPath.contains("modules:")
- || projectPath.startsWith(":x-pack:plugin");
+ return projectPath.contains("modules:") || projectPath.startsWith(":x-pack:plugin");
}
public static void disableTransitiveDependencies(Configuration config) {
diff --git a/build-tools/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java b/build-tools/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java
index 389d18ed6f297..d1146e3c7261a 100644
--- a/build-tools/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java
+++ b/build-tools/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java
@@ -8,14 +8,12 @@
package org.elasticsearch.gradle;
-import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
import org.elasticsearch.gradle.ElasticsearchDistribution.Platform;
import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
+import org.elasticsearch.gradle.internal.test.GradleUnitTestCase;
+import org.gradle.api.NamedDomainObjectContainer;
import org.gradle.api.Project;
import org.gradle.testfixtures.ProjectBuilder;
-import org.gradle.api.NamedDomainObjectContainer;
-
-import java.io.File;
import static org.hamcrest.core.StringContains.containsString;
@@ -130,14 +128,13 @@ private ElasticsearchDistribution checkDistro(
return distribution;
}
-
protected ElasticsearchDistribution createDistro(
- Project project,
- String name,
- String version,
- ElasticsearchDistributionType type,
- ElasticsearchDistribution.Platform platform,
- Boolean bundledJdk
+ Project project,
+ String name,
+ String version,
+ ElasticsearchDistributionType type,
+ ElasticsearchDistribution.Platform platform,
+ Boolean bundledJdk
) {
NamedDomainObjectContainer distros = DistributionDownloadPlugin.getContainer(project);
return distros.create(name, distro -> {
@@ -158,10 +155,10 @@ protected ElasticsearchDistribution createDistro(
protected Project createProject() {
rootProject = ProjectBuilder.builder().build();
-// Project distributionProject = ProjectBuilder.builder().withParent(rootProject).withName("distribution").build();
-// archivesProject = ProjectBuilder.builder().withParent(distributionProject).withName("archives").build();
-// packagesProject = ProjectBuilder.builder().withParent(distributionProject).withName("packages").build();
-// bwcProject = ProjectBuilder.builder().withParent(distributionProject).withName("bwc").build();
+ // Project distributionProject = ProjectBuilder.builder().withParent(rootProject).withName("distribution").build();
+ // archivesProject = ProjectBuilder.builder().withParent(distributionProject).withName("archives").build();
+ // packagesProject = ProjectBuilder.builder().withParent(distributionProject).withName("packages").build();
+ // bwcProject = ProjectBuilder.builder().withParent(distributionProject).withName("bwc").build();
Project project = ProjectBuilder.builder().withParent(rootProject).build();
project.getPlugins().apply("elasticsearch.distribution-download");
return project;
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/BaseTestCase.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/BaseTestCase.java
index f429c7035c463..8aaa74b828b2a 100644
--- a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/BaseTestCase.java
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/BaseTestCase.java
@@ -7,11 +7,13 @@
*/
package org.elasticsearch.gradle.internal.test;
+import junit.framework.AssertionFailedError;
+
import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
-import junit.framework.AssertionFailedError;
+
import org.junit.Assert;
import org.junit.runner.RunWith;
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleIntegrationTestCase.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleIntegrationTestCase.java
index 7d47088d4740b..5f83b678fdd80 100644
--- a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleIntegrationTestCase.java
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleIntegrationTestCase.java
@@ -66,12 +66,14 @@ protected GradleRunner getGradleRunner() {
} catch (IOException e) {
throw new UncheckedIOException(e);
}
- return new InternalAwareGradleRunner(GradleRunner.create()
- .withProjectDir(getProjectDir())
- .withPluginClasspath()
- .withTestKitDir(testkit)
- .forwardOutput()
- .withDebug(ManagementFactory.getRuntimeMXBean().getInputArguments().toString().indexOf("-agentlib:jdwp") > 0));
+ return new InternalAwareGradleRunner(
+ GradleRunner.create()
+ .withProjectDir(getProjectDir())
+ .withPluginClasspath()
+ .withTestKitDir(testkit)
+ .forwardOutput()
+ .withDebug(ManagementFactory.getRuntimeMXBean().getInputArguments().toString().indexOf("-agentlib:jdwp") > 0)
+ );
}
protected File getBuildDir(String name) {
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleUnitTestCase.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleUnitTestCase.java
index accf1f18e7743..c2025a13f46d2 100644
--- a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleUnitTestCase.java
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/GradleUnitTestCase.java
@@ -11,6 +11,7 @@
import com.carrotsearch.randomizedtesting.RandomizedRunner;
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
+
import org.junit.runner.RunWith;
@RunWith(RandomizedRunner.class)
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/InternalAwareGradleRunner.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/InternalAwareGradleRunner.java
index e30455aa0b406..2ea1eb9042108 100644
--- a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/InternalAwareGradleRunner.java
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/InternalAwareGradleRunner.java
@@ -72,8 +72,7 @@ public List getArguments() {
@Override
public GradleRunner withArguments(List arguments) {
- List collect = Stream.concat(arguments.stream(), Stream.of("-Dtest.external=true"))
- .collect(Collectors.toList());
+ List collect = Stream.concat(arguments.stream(), Stream.of("-Dtest.external=true")).collect(Collectors.toList());
delegate.withArguments(collect);
return this;
}
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java
index 8d2ae78396435..940d8277a5dba 100644
--- a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/NormalizeOutputGradleRunner.java
@@ -122,7 +122,6 @@ public GradleRunner withEnvironment(Map map) {
return this;
}
-
@Override
public GradleRunner forwardStdOutput(Writer writer) {
delegate.forwardStdOutput(writer);
diff --git a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/TestUtils.java b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/TestUtils.java
index 11cf2a3d6a099..8ef6ac9fd97d7 100644
--- a/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/TestUtils.java
+++ b/build-tools/src/testFixtures/java/org/elasticsearch/gradle/internal/test/TestUtils.java
@@ -12,7 +12,6 @@
import java.io.IOException;
import java.util.stream.Collectors;
-
public class TestUtils {
public static String normalizeString(String input, File projectRootDir) {
@@ -20,10 +19,10 @@ public static String normalizeString(String input, File projectRootDir) {
String normalizedPathPrefix = projectRootDir.getCanonicalPath().replaceAll("\\\\", "/");
System.out.println("normalizedPathPrefix = " + normalizedPathPrefix);
return input.lines()
- .map(it -> it.replaceAll("\\\\", "/"))
- .map(it -> it.replaceAll(normalizedPathPrefix, "."))
- .map(it -> it.replaceAll("Gradle Test Executor \\d", "Gradle Test Executor 1"))
- .collect(Collectors.joining("\n"));
+ .map(it -> it.replaceAll("\\\\", "/"))
+ .map(it -> it.replaceAll(normalizedPathPrefix, "."))
+ .map(it -> it.replaceAll("Gradle Test Executor \\d", "Gradle Test Executor 1"))
+ .collect(Collectors.joining("\n"));
} catch (IOException e) {
throw new RuntimeException(e);
}
diff --git a/build.gradle b/build.gradle
index a6be800654390..1569cec42e9a5 100644
--- a/build.gradle
+++ b/build.gradle
@@ -40,14 +40,12 @@ plugins {
id 'elasticsearch.runtime-jdk-provision'
id 'elasticsearch.ide'
id 'elasticsearch.forbidden-dependencies'
- id 'elasticsearch.formatting'
id 'elasticsearch.local-distribution'
id 'elasticsearch.fips'
id 'elasticsearch.internal-testclusters'
id 'elasticsearch.run'
id 'elasticsearch.release-tools'
id 'elasticsearch.versions'
- id "com.diffplug.spotless" version "5.15.1" apply false
}
/**
@@ -338,6 +336,7 @@ allprojects {
}
}
+ apply plugin: 'elasticsearch.formatting'
}
@@ -423,7 +422,17 @@ tasks.register("buildReleaseArtifacts").configure {
group = 'build'
description = 'Builds all artifacts required for release manager'
- dependsOn allprojects.findAll { it.path.startsWith(':distribution:docker') == false && it.path.startsWith(':ml-cpp') == false }
+ dependsOn allprojects.findAll {
+ it.path.startsWith(':distribution:docker') == false
+ && it.path.startsWith(':ml-cpp') == false
+ && it.path.startsWith(':distribution:bwc') == false
+ }
.collect { GradleUtils.findByName(it.tasks, 'assemble') }
.findAll { it != null }
}
+
+tasks.named("spotlessApply").configure {
+ dependsOn gradle.includedBuild('build-tools').task(':spotlessApply')
+ dependsOn gradle.includedBuild('build-tools').task(':reaper:spotlessApply')
+ dependsOn gradle.includedBuild('build-tools-internal').task(':spotlessApply')
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/AsyncSearchRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/AsyncSearchRequestConverters.java
index 4d74012587340..7ba787dfdd72a 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/AsyncSearchRequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/AsyncSearchRequestConverters.java
@@ -77,6 +77,7 @@ static Request getAsyncSearch(GetAsyncSearchRequest asyncSearchRequest) {
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new RequestConverters.Params();
+ params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
if (asyncSearchRequest.getKeepAlive() != null) {
params.putParam("keep_alive", asyncSearchRequest.getKeepAlive().getStringRep());
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java
index 159ab5b08200c..8c59c2bac3d4e 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfig.java
@@ -24,6 +24,7 @@
import java.io.IOException;
import java.time.Instant;
+import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
@@ -38,6 +39,7 @@ public class TransformConfig implements ToXContentObject {
public static final ParseField DESCRIPTION = new ParseField("description");
public static final ParseField SYNC = new ParseField("sync");
public static final ParseField SETTINGS = new ParseField("settings");
+ public static final ParseField METADATA = new ParseField("_meta");
public static final ParseField VERSION = new ParseField("version");
public static final ParseField CREATE_TIME = new ParseField("create_time");
public static final ParseField RETENTION_POLICY = new ParseField("retention_policy");
@@ -51,6 +53,7 @@ public class TransformConfig implements ToXContentObject {
private final TimeValue frequency;
private final SyncConfig syncConfig;
private final SettingsConfig settings;
+ private final Map metadata;
private final PivotConfig pivotConfig;
private final LatestConfig latestConfig;
private final String description;
@@ -71,9 +74,11 @@ public class TransformConfig implements ToXContentObject {
LatestConfig latestConfig = (LatestConfig) args[6];
String description = (String) args[7];
SettingsConfig settings = (SettingsConfig) args[8];
- RetentionPolicyConfig retentionPolicyConfig = (RetentionPolicyConfig) args[9];
- Instant createTime = (Instant) args[10];
- String transformVersion = (String) args[11];
+ @SuppressWarnings("unchecked")
+ Map metadata = (Map) args[9];
+ RetentionPolicyConfig retentionPolicyConfig = (RetentionPolicyConfig) args[10];
+ Instant createTime = (Instant) args[11];
+ String transformVersion = (String) args[12];
return new TransformConfig(
id,
source,
@@ -84,6 +89,7 @@ public class TransformConfig implements ToXContentObject {
latestConfig,
description,
settings,
+ metadata,
retentionPolicyConfig,
createTime,
transformVersion
@@ -106,6 +112,7 @@ public class TransformConfig implements ToXContentObject {
PARSER.declareObject(optionalConstructorArg(), (p, c) -> LatestConfig.fromXContent(p), LATEST_TRANSFORM);
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SettingsConfig.fromXContent(p), SETTINGS);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), METADATA);
PARSER.declareNamedObject(
optionalConstructorArg(),
(p, c, n) -> p.namedObject(RetentionPolicyConfig.class, n, c),
@@ -136,7 +143,7 @@ public static TransformConfig fromXContent(final XContentParser parser) {
* @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static TransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
- return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null, null, null, null);
+ return new TransformConfig(null, source, null, null, null, pivotConfig, null, null, null, null, null, null, null);
}
/**
@@ -151,7 +158,7 @@ public static TransformConfig forPreview(final SourceConfig source, final PivotC
* @return A TransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
*/
public static TransformConfig forPreview(final SourceConfig source, final LatestConfig latestConfig) {
- return new TransformConfig(null, source, null, null, null, null, latestConfig, null, null, null, null, null);
+ return new TransformConfig(null, source, null, null, null, null, latestConfig, null, null, null, null, null, null);
}
TransformConfig(
@@ -164,6 +171,7 @@ public static TransformConfig forPreview(final SourceConfig source, final Latest
final LatestConfig latestConfig,
final String description,
final SettingsConfig settings,
+ final Map metadata,
final RetentionPolicyConfig retentionPolicyConfig,
final Instant createTime,
final String version
@@ -177,6 +185,7 @@ public static TransformConfig forPreview(final SourceConfig source, final Latest
this.latestConfig = latestConfig;
this.description = description;
this.settings = settings;
+ this.metadata = metadata;
this.retentionPolicyConfig = retentionPolicyConfig;
this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());
this.transformVersion = version == null ? null : Version.fromString(version);
@@ -228,6 +237,11 @@ public SettingsConfig getSettings() {
return settings;
}
+ @Nullable
+ public Map getMetadata() {
+ return metadata;
+ }
+
@Nullable
public RetentionPolicyConfig getRetentionPolicyConfig() {
return retentionPolicyConfig;
@@ -265,6 +279,9 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa
if (settings != null) {
builder.field(SETTINGS.getPreferredName(), settings);
}
+ if (metadata != null) {
+ builder.field(METADATA.getPreferredName(), metadata);
+ }
if (retentionPolicyConfig != null) {
builder.startObject(RETENTION_POLICY.getPreferredName());
builder.field(retentionPolicyConfig.getName(), retentionPolicyConfig);
@@ -300,6 +317,7 @@ public boolean equals(Object other) {
&& Objects.equals(this.syncConfig, that.syncConfig)
&& Objects.equals(this.transformVersion, that.transformVersion)
&& Objects.equals(this.settings, that.settings)
+ && Objects.equals(this.metadata, that.metadata)
&& Objects.equals(this.createTime, that.createTime)
&& Objects.equals(this.pivotConfig, that.pivotConfig)
&& Objects.equals(this.latestConfig, that.latestConfig)
@@ -315,6 +333,7 @@ public int hashCode() {
frequency,
syncConfig,
settings,
+ metadata,
createTime,
transformVersion,
pivotConfig,
@@ -343,6 +362,7 @@ public static class Builder {
private PivotConfig pivotConfig;
private LatestConfig latestConfig;
private SettingsConfig settings;
+ private Map metadata;
private String description;
private RetentionPolicyConfig retentionPolicyConfig;
@@ -391,6 +411,11 @@ public Builder setSettings(SettingsConfig settings) {
return this;
}
+ public Builder setMetadata(Map metadata) {
+ this.metadata = metadata;
+ return this;
+ }
+
public Builder setRetentionPolicyConfig(RetentionPolicyConfig retentionPolicyConfig) {
this.retentionPolicyConfig = retentionPolicyConfig;
return this;
@@ -407,6 +432,7 @@ public TransformConfig build() {
latestConfig,
description,
settings,
+ metadata,
retentionPolicyConfig,
null,
null
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java
index b07c09cc3dc78..a706e35ae93b2 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdate.java
@@ -17,6 +17,7 @@
import org.elasticsearch.xcontent.XContentParser;
import java.io.IOException;
+import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
@@ -39,8 +40,10 @@ public class TransformConfigUpdate implements ToXContentObject {
SyncConfig syncConfig = (SyncConfig) args[3];
String description = (String) args[4];
SettingsConfig settings = (SettingsConfig) args[5];
- RetentionPolicyConfig retentionPolicyConfig = (RetentionPolicyConfig) args[6];
- return new TransformConfigUpdate(source, dest, frequency, syncConfig, description, settings, retentionPolicyConfig);
+ @SuppressWarnings("unchecked")
+ Map metadata = (Map) args[6];
+ RetentionPolicyConfig retentionPolicyConfig = (RetentionPolicyConfig) args[7];
+ return new TransformConfigUpdate(source, dest, frequency, syncConfig, description, settings, metadata, retentionPolicyConfig);
}
);
@@ -51,6 +54,7 @@ public class TransformConfigUpdate implements ToXContentObject {
PARSER.declareNamedObject(optionalConstructorArg(), (p, c, n) -> p.namedObject(SyncConfig.class, n, c), TransformConfig.SYNC);
PARSER.declareString(optionalConstructorArg(), TransformConfig.DESCRIPTION);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> SettingsConfig.fromXContent(p), TransformConfig.SETTINGS);
+ PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), TransformConfig.METADATA);
PARSER.declareNamedObject(
optionalConstructorArg(),
(p, c, n) -> p.namedObject(RetentionPolicyConfig.class, n, c),
@@ -64,6 +68,7 @@ public class TransformConfigUpdate implements ToXContentObject {
private final SyncConfig syncConfig;
private final String description;
private final SettingsConfig settings;
+ private final Map metadata;
public TransformConfigUpdate(
final SourceConfig source,
@@ -72,6 +77,7 @@ public TransformConfigUpdate(
final SyncConfig syncConfig,
final String description,
final SettingsConfig settings,
+ final Map metadata,
final RetentionPolicyConfig retentionPolicyConfig
) {
this.source = source;
@@ -80,6 +86,7 @@ public TransformConfigUpdate(
this.syncConfig = syncConfig;
this.description = description;
this.settings = settings;
+ this.metadata = metadata;
}
public SourceConfig getSource() {
@@ -108,6 +115,11 @@ public SettingsConfig getSettings() {
return settings;
}
+ @Nullable
+ public Map getMetadata() {
+ return metadata;
+ }
+
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
@@ -131,6 +143,9 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa
if (settings != null) {
builder.field(TransformConfig.SETTINGS.getPreferredName(), settings);
}
+ if (metadata != null) {
+ builder.field(TransformConfig.METADATA.getPreferredName(), metadata);
+ }
builder.endObject();
return builder;
@@ -153,12 +168,13 @@ public boolean equals(Object other) {
&& Objects.equals(this.frequency, that.frequency)
&& Objects.equals(this.syncConfig, that.syncConfig)
&& Objects.equals(this.description, that.description)
- && Objects.equals(this.settings, that.settings);
+ && Objects.equals(this.settings, that.settings)
+ && Objects.equals(this.metadata, that.metadata);
}
@Override
public int hashCode() {
- return Objects.hash(source, dest, frequency, syncConfig, description, settings);
+ return Objects.hash(source, dest, frequency, syncConfig, description, settings, metadata);
}
@Override
@@ -182,6 +198,7 @@ public static class Builder {
private SyncConfig syncConfig;
private String description;
private SettingsConfig settings;
+ private Map metdata;
private RetentionPolicyConfig retentionPolicyConfig;
public Builder setSource(SourceConfig source) {
@@ -214,13 +231,18 @@ public Builder setSettings(SettingsConfig settings) {
return this;
}
+ public Builder setMetadata(Map metadata) {
+ this.metdata = metdata;
+ return this;
+ }
+
public Builder setRetentionPolicyConfig(RetentionPolicyConfig retentionPolicyConfig) {
this.retentionPolicyConfig = retentionPolicyConfig;
return this;
}
public TransformConfigUpdate build() {
- return new TransformConfigUpdate(source, dest, frequency, syncConfig, description, settings, retentionPolicyConfig);
+ return new TransformConfigUpdate(source, dest, frequency, syncConfig, description, settings, metdata, retentionPolicyConfig);
}
}
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/AsyncSearchRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/AsyncSearchRequestConvertersTests.java
index bb656af43e89d..7aaea4b821469 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/AsyncSearchRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/AsyncSearchRequestConvertersTests.java
@@ -37,6 +37,7 @@ public void testSubmitAsyncSearch() throws Exception {
SearchSourceBuilder searchSourceBuilder = createTestSearchSourceBuilder();
SubmitAsyncSearchRequest submitRequest = new SubmitAsyncSearchRequest(searchSourceBuilder, indices);
+ expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
// the following parameters might be overwritten by random ones later,
// but we need to set these since they are the default we send over http
setRandomSearchParams(submitRequest, expectedParams);
@@ -72,7 +73,6 @@ public void testSubmitAsyncSearch() throws Exception {
}
private static void setRandomSearchParams(SubmitAsyncSearchRequest request, Map expectedParams) {
- expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
if (randomBoolean()) {
request.setRouting(randomAlphaOfLengthBetween(3, 10));
expectedParams.put("routing", request.getRouting());
@@ -107,6 +107,7 @@ public void testGetAsyncSearch() throws Exception {
String id = randomAlphaOfLengthBetween(5, 10);
Map expectedParams = new HashMap<>();
GetAsyncSearchRequest submitRequest = new GetAsyncSearchRequest(id);
+ expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
if (randomBoolean()) {
TimeValue keepAlive = TimeValue.parseTimeValue(randomTimeValue(), "test");
submitRequest.setKeepAlive(keepAlive);
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
index e613746896d61..11e0c8f24f755 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
@@ -74,9 +74,10 @@ public void testClusterPutSettings() throws IOException {
ClusterUpdateSettingsRequest setRequest = new ClusterUpdateSettingsRequest();
setRequest.transientSettings(transientSettings);
setRequest.persistentSettings(map);
+ RequestOptions options = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build();
ClusterUpdateSettingsResponse setResponse = execute(setRequest, highLevelClient().cluster()::putSettings,
- highLevelClient().cluster()::putSettingsAsync);
+ highLevelClient().cluster()::putSettingsAsync, options);
assertAcked(setResponse);
assertThat(setResponse.getTransientSettings().get(transientSettingKey), notNullValue());
@@ -98,7 +99,7 @@ public void testClusterPutSettings() throws IOException {
resetRequest.persistentSettings("{\"" + persistentSettingKey + "\": null }", XContentType.JSON);
ClusterUpdateSettingsResponse resetResponse = execute(resetRequest, highLevelClient().cluster()::putSettings,
- highLevelClient().cluster()::putSettingsAsync);
+ highLevelClient().cluster()::putSettingsAsync, options);
assertThat(resetResponse.getTransientSettings().get(transientSettingKey), equalTo(null));
assertThat(resetResponse.getPersistentSettings().get(persistentSettingKey), equalTo(null));
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java
index adc616e28d042..7000e91b049ea 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java
@@ -240,8 +240,9 @@ protected static void clusterUpdateSettings(Settings persistentSettings,
ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest();
request.persistentSettings(persistentSettings);
request.transientSettings(transientSettings);
+ RequestOptions options = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build();
assertTrue(execute(
- request, highLevelClient().cluster()::putSettings, highLevelClient().cluster()::putSettingsAsync).isAcknowledged());
+ request, highLevelClient().cluster()::putSettings, highLevelClient().cluster()::putSettingsAsync, options).isAcknowledged());
}
protected void putConflictPipeline() throws IOException {
@@ -317,8 +318,9 @@ protected static void setupRemoteClusterConfig(String remoteClusterName) throws
ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest();
updateSettingsRequest.persistentSettings(singletonMap("cluster.remote." + remoteClusterName + ".seeds", transportAddress));
+ RequestOptions options = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build();
ClusterUpdateSettingsResponse updateSettingsResponse =
- restHighLevelClient.cluster().putSettings(updateSettingsRequest, RequestOptions.DEFAULT);
+ restHighLevelClient.cluster().putSettings(updateSettingsRequest, options);
assertThat(updateSettingsResponse.isAcknowledged(), is(true));
assertBusy(() -> {
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchIT.java
index 46614340aaaf6..db4dd91f91eb9 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/asyncsearch/AsyncSearchIT.java
@@ -8,47 +8,77 @@
package org.elasticsearch.client.asyncsearch;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.core.AcknowledgedResponse;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.aggregations.AggregationBuilders;
+import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
+import java.util.Collections;
+
+import static org.hamcrest.Matchers.equalTo;
public class AsyncSearchIT extends ESRestHighLevelClientTestCase {
public void testAsyncSearch() throws IOException {
String index = "test-index";
createIndex(index, Settings.EMPTY);
+ BulkRequest bulkRequest = new BulkRequest()
+ .add(new IndexRequest(index).id("1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
+ .add(new IndexRequest(index).id("2").source(Collections.singletonMap("foo", "bar2"), XContentType.JSON))
+ .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+ assertEquals(
+ RestStatus.OK,
+ highLevelClient().bulk(
+ bulkRequest,
+ RequestOptions.DEFAULT
+ ).status()
+ );
- SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(QueryBuilders.matchAllQuery());
+ SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
+ .query(QueryBuilders.matchAllQuery())
+ .aggregation(AggregationBuilders.terms("1").field("foo.keyword"));
SubmitAsyncSearchRequest submitRequest = new SubmitAsyncSearchRequest(sourceBuilder, index);
submitRequest.setKeepOnCompletion(true);
+ submitRequest.setWaitForCompletionTimeout(TimeValue.MAX_VALUE);
AsyncSearchResponse submitResponse = highLevelClient().asyncSearch().submit(submitRequest, RequestOptions.DEFAULT);
assertNotNull(submitResponse.getId());
+ assertFalse(submitResponse.isRunning());
assertFalse(submitResponse.isPartial());
assertTrue(submitResponse.getStartTime() > 0);
assertTrue(submitResponse.getExpirationTime() > 0);
assertNotNull(submitResponse.getSearchResponse());
- if (submitResponse.isRunning() == false) {
- assertFalse(submitResponse.isPartial());
- } else {
- assertTrue(submitResponse.isPartial());
- }
+ assertThat(submitResponse.getSearchResponse().getHits().getTotalHits().value, equalTo(2L));
+ ParsedStringTerms terms = submitResponse.getSearchResponse().getAggregations().get("1");
+ assertThat(terms.getBuckets().size(), equalTo(2));
+ assertThat(terms.getBuckets().get(0).getKeyAsString(), equalTo("bar"));
+ assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1L));
+ assertThat(terms.getBuckets().get(1).getKeyAsString(), equalTo("bar2"));
+ assertThat(terms.getBuckets().get(1).getDocCount(), equalTo(1L));
GetAsyncSearchRequest getRequest = new GetAsyncSearchRequest(submitResponse.getId());
AsyncSearchResponse getResponse = highLevelClient().asyncSearch().get(getRequest, RequestOptions.DEFAULT);
- while (getResponse.isRunning()) {
- getResponse = highLevelClient().asyncSearch().get(getRequest, RequestOptions.DEFAULT);
- }
-
assertFalse(getResponse.isRunning());
assertFalse(getResponse.isPartial());
assertTrue(getResponse.getStartTime() > 0);
assertTrue(getResponse.getExpirationTime() > 0);
- assertNotNull(getResponse.getSearchResponse());
+ assertThat(getResponse.getSearchResponse().getHits().getTotalHits().value, equalTo(2L));
+ terms = getResponse.getSearchResponse().getAggregations().get("1");
+ assertThat(terms.getBuckets().size(), equalTo(2));
+ assertThat(terms.getBuckets().get(0).getKeyAsString(), equalTo("bar"));
+ assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1L));
+ assertThat(terms.getBuckets().get(1).getKeyAsString(), equalTo("bar2"));
+ assertThat(terms.getBuckets().get(1).getDocCount(), equalTo(1L));
DeleteAsyncSearchRequest deleteRequest = new DeleteAsyncSearchRequest(submitResponse.getId());
AcknowledgedResponse deleteAsyncSearchResponse = highLevelClient().asyncSearch().delete(deleteRequest,
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java
index 7f2500656d1cd..46eede441529a 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/core/CountResponseTests.java
@@ -80,7 +80,7 @@ private static ShardSearchFailure createShardFailureTestItem() {
String nodeId = randomAlphaOfLengthBetween(5, 10);
String indexName = randomAlphaOfLengthBetween(5, 10);
searchShardTarget = new SearchShardTarget(nodeId,
- new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), randomInt()), null, null);
+ new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), randomInt()), null);
}
return new ShardSearchFailure(ex, searchShardTarget);
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java
index f608763fb7bdd..7ee97d6faec1e 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java
@@ -21,6 +21,7 @@
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
+import org.elasticsearch.client.WarningsHandler;
import org.elasticsearch.client.cluster.RemoteConnectionInfo;
import org.elasticsearch.client.cluster.RemoteInfoRequest;
import org.elasticsearch.client.cluster.RemoteInfoResponse;
@@ -35,6 +36,7 @@
import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.Template;
+import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
@@ -70,38 +72,48 @@ public void testClusterPutSettings() throws IOException {
// end::put-settings-request
// tag::put-settings-create-settings
- String persistentSettingKey =
+ String transientSettingKey =
RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey();
- int persistentSettingValue = 10;
- Settings persistentSettings =
+ int transientSettingValue = 10;
+ Settings transientSettings =
Settings.builder()
- .put(persistentSettingKey, persistentSettingValue, ByteSizeUnit.BYTES)
+ .put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES)
.build(); // <1>
+
+ String persistentSettingKey =
+ EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey();
+ String persistentSettingValue =
+ EnableAllocationDecider.Allocation.NONE.name();
+ Settings persistentSettings =
+ Settings.builder()
+ .put(persistentSettingKey, persistentSettingValue)
+ .build(); // <2>
// end::put-settings-create-settings
// tag::put-settings-request-cluster-settings
- request.persistentSettings(persistentSettings); // <1>
+ request.transientSettings(transientSettings); // <1>
+ request.persistentSettings(persistentSettings); // <2>
// end::put-settings-request-cluster-settings
{
// tag::put-settings-settings-builder
- Settings.Builder persistentSettingsBuilder =
+ Settings.Builder transientSettingsBuilder =
Settings.builder()
- .put(persistentSettingKey, persistentSettingValue, ByteSizeUnit.BYTES);
- request.persistentSettings(persistentSettingsBuilder); // <1>
+ .put(transientSettingKey, transientSettingValue, ByteSizeUnit.BYTES);
+ request.transientSettings(transientSettingsBuilder); // <1>
// end::put-settings-settings-builder
}
{
// tag::put-settings-settings-map
Map map = new HashMap<>();
- map.put(persistentSettingKey
- , persistentSettingValue + ByteSizeUnit.BYTES.getSuffix());
- request.persistentSettings(map); // <1>
+ map.put(transientSettingKey
+ , transientSettingValue + ByteSizeUnit.BYTES.getSuffix());
+ request.transientSettings(map); // <1>
// end::put-settings-settings-map
}
{
// tag::put-settings-settings-source
- request.persistentSettings(
+ request.transientSettings(
"{\"indices.recovery.max_bytes_per_sec\": \"10b\"}"
, XContentType.JSON); // <1>
// end::put-settings-settings-source
@@ -116,21 +128,26 @@ public void testClusterPutSettings() throws IOException {
request.masterNodeTimeout("1m"); // <2>
// end::put-settings-request-masterTimeout
+ RequestOptions options = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE).build();
// tag::put-settings-execute
- ClusterUpdateSettingsResponse response = client.cluster().putSettings(request, RequestOptions.DEFAULT);
+ ClusterUpdateSettingsResponse response = client.cluster().putSettings(request, options);
// end::put-settings-execute
// tag::put-settings-response
boolean acknowledged = response.isAcknowledged(); // <1>
- Settings persistentSettingsResponse = response.getPersistentSettings(); // <2>
+ Settings transientSettingsResponse = response.getTransientSettings(); // <2>
+ Settings persistentSettingsResponse = response.getPersistentSettings(); // <3>
// end::put-settings-response
assertTrue(acknowledged);
- assertThat(persistentSettingsResponse.get(persistentSettingKey), equalTo(persistentSettingValue + ByteSizeUnit.BYTES.getSuffix()));
-
- // tag::put-settings-request-reset-persistent
- request.persistentSettings(Settings.builder().putNull(persistentSettingKey).build()); // <1>
- // tag::put-settings-request-reset-persistent
- ClusterUpdateSettingsResponse resetResponse = client.cluster().putSettings(request, RequestOptions.DEFAULT);
+ assertThat(transientSettingsResponse.get(transientSettingKey),
+ equalTo(transientSettingValue + ByteSizeUnit.BYTES.getSuffix()));
+ assertThat(persistentSettingsResponse.get(persistentSettingKey), equalTo(persistentSettingValue));
+
+ // tag::put-settings-request-reset-transient
+ request.transientSettings(Settings.builder().putNull(transientSettingKey).build()); // <1>
+ // tag::put-settings-request-reset-transient
+ request.persistentSettings(Settings.builder().putNull(persistentSettingKey));
+ ClusterUpdateSettingsResponse resetResponse = client.cluster().putSettings(request, options);
assertTrue(resetResponse.isAcknowledged());
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java
index 655f6d19aa2ad..bcd83d1c0c56b 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigTests.java
@@ -16,6 +16,7 @@
import org.elasticsearch.client.transform.transforms.pivot.PivotConfigTests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.TimeValue;
+import org.elasticsearch.core.Tuple;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.search.SearchModule;
@@ -25,6 +26,7 @@
import java.time.Instant;
import java.util.Collections;
import java.util.List;
+import java.util.Map;
import java.util.function.Predicate;
import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig;
@@ -52,6 +54,7 @@ public static TransformConfig randomTransformConfig() {
latestConfig,
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 100),
SettingsConfigTests.randomSettingsConfig(),
+ randomMetadata(),
randomBoolean() ? null : randomRetentionPolicyConfig(),
randomBoolean() ? null : Instant.now(),
randomBoolean() ? null : Version.CURRENT.toString()
@@ -66,6 +69,30 @@ public static RetentionPolicyConfig randomRetentionPolicyConfig() {
return TimeRetentionPolicyConfigTests.randomTimeRetentionPolicyConfig();
}
+ public static Map randomMetadata() {
+ return randomMap(0, 10, () -> {
+ String key = randomAlphaOfLengthBetween(1, 10);
+ Object value;
+ switch (randomIntBetween(0, 3)) {
+ case 0:
+ value = null;
+ break;
+ case 1:
+ value = randomLong();
+ break;
+ case 2:
+ value = randomAlphaOfLengthBetween(1, 10);
+ break;
+ case 3:
+ value = randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
+ break;
+ default:
+ throw new AssertionError();
+ }
+ return Tuple.tuple(key, value);
+ });
+ }
+
@Override
protected TransformConfig createTestInstance() {
return randomTransformConfig();
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java
index 4c137f88a6d7e..3e81b7ca17fdb 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/transform/transforms/TransformConfigUpdateTests.java
@@ -23,6 +23,7 @@
import static org.elasticsearch.client.transform.transforms.DestConfigTests.randomDestConfig;
import static org.elasticsearch.client.transform.transforms.SettingsConfigTests.randomSettingsConfig;
import static org.elasticsearch.client.transform.transforms.SourceConfigTests.randomSourceConfig;
+import static org.elasticsearch.client.transform.transforms.TransformConfigTests.randomMetadata;
import static org.elasticsearch.client.transform.transforms.TransformConfigTests.randomRetentionPolicyConfig;
import static org.elasticsearch.client.transform.transforms.TransformConfigTests.randomSyncConfig;
@@ -36,6 +37,7 @@ public static TransformConfigUpdate randomTransformConfigUpdate() {
randomBoolean() ? null : randomSyncConfig(),
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000),
randomBoolean() ? null : randomSettingsConfig(),
+ randomBoolean() ? null : randomMetadata(),
randomBoolean() ? null : randomRetentionPolicyConfig()
);
}
diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle
index d807813626a4d..1ead854004b7b 100644
--- a/distribution/docker/build.gradle
+++ b/distribution/docker/build.gradle
@@ -95,7 +95,7 @@ ext.expansions = { Architecture architecture, DockerBase base ->
'build_date' : buildDate,
'config_dir' : base == DockerBase.IRON_BANK ? 'scripts' : 'config',
'git_revision' : BuildParams.gitRevision,
- 'license' : base == DockerBase.IRON_BANK ? 'Elastic License 1.0' : 'Elastic-License-2.0',
+ 'license' : base == DockerBase.IRON_BANK ? 'Elastic License 2.0' : 'Elastic-License-2.0',
'package_manager' : base == DockerBase.UBI ? 'microdnf' : 'yum',
'docker_base' : base.name().toLowerCase(),
'version' : VersionProperties.elasticsearch,
diff --git a/distribution/docker/src/docker/iron_bank/LICENSE b/distribution/docker/src/docker/iron_bank/LICENSE
index ef2739c152ec6..809108b857ffd 100644
--- a/distribution/docker/src/docker/iron_bank/LICENSE
+++ b/distribution/docker/src/docker/iron_bank/LICENSE
@@ -1,280 +1,93 @@
-ELASTIC LICENSE AGREEMENT
+Elastic License 2.0
-PLEASE READ CAREFULLY THIS ELASTIC LICENSE AGREEMENT (THIS "AGREEMENT"), WHICH
-CONSTITUTES A LEGALLY BINDING AGREEMENT AND GOVERNS ALL OF YOUR USE OF ALL OF
-THE ELASTIC SOFTWARE WITH WHICH THIS AGREEMENT IS INCLUDED ("ELASTIC SOFTWARE")
-THAT IS PROVIDED IN OBJECT CODE FORMAT, AND, IN ACCORDANCE WITH SECTION 2 BELOW,
-CERTAIN OF THE ELASTIC SOFTWARE THAT IS PROVIDED IN SOURCE CODE FORMAT. BY
-INSTALLING OR USING ANY OF THE ELASTIC SOFTWARE GOVERNED BY THIS AGREEMENT, YOU
-ARE ASSENTING TO THE TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE
-WITH SUCH TERMS AND CONDITIONS, YOU MAY NOT INSTALL OR USE THE ELASTIC SOFTWARE
-GOVERNED BY THIS AGREEMENT. IF YOU ARE INSTALLING OR USING THE SOFTWARE ON
-BEHALF OF A LEGAL ENTITY, YOU REPRESENT AND WARRANT THAT YOU HAVE THE ACTUAL
-AUTHORITY TO AGREE TO THE TERMS AND CONDITIONS OF THIS AGREEMENT ON BEHALF OF
-SUCH ENTITY.
+URL: https://www.elastic.co/licensing/elastic-license
-Posted Date: April 20, 2018
+## Acceptance
-This Agreement is entered into by and between Elasticsearch BV ("Elastic") and
-You, or the legal entity on behalf of whom You are acting (as applicable,
-"You").
+By using the software, you agree to all of the terms and conditions below.
-1. OBJECT CODE END USER LICENSES, RESTRICTIONS AND THIRD PARTY OPEN SOURCE
-SOFTWARE
+## Copyright License
- 1.1 Object Code End User License. Subject to the terms and conditions of
- Section 1.2 of this Agreement, Elastic hereby grants to You, AT NO CHARGE and
- for so long as you are not in breach of any provision of this Agreement, a
- License to the Basic Features and Functions of the Elastic Software.
+The licensor grants you a non-exclusive, royalty-free, worldwide,
+non-sublicensable, non-transferable license to use, copy, distribute, make
+available, and prepare derivative works of the software, in each case subject to
+the limitations and conditions below.
- 1.2 Reservation of Rights; Restrictions. As between Elastic and You, Elastic
- and its licensors own all right, title and interest in and to the Elastic
- Software, and except as expressly set forth in Sections 1.1, and 2.1 of this
- Agreement, no other license to the Elastic Software is granted to You under
- this Agreement, by implication, estoppel or otherwise. You agree not to: (i)
- reverse engineer or decompile, decrypt, disassemble or otherwise reduce any
- Elastic Software provided to You in Object Code, or any portion thereof, to
- Source Code, except and only to the extent any such restriction is prohibited
- by applicable law, (ii) except as expressly permitted in this Agreement,
- prepare derivative works from, modify, copy or use the Elastic Software Object
- Code or the Commercial Software Source Code in any manner; (iii) except as
- expressly permitted in Section 1.1 above, transfer, sell, rent, lease,
- distribute, sublicense, loan or otherwise transfer, Elastic Software Object
- Code, in whole or in part, to any third party; (iv) use Elastic Software
- Object Code for providing time-sharing services, any software-as-a-service,
- service bureau services or as part of an application services provider or
- other service offering (collectively, "SaaS Offering") where obtaining access
- to the Elastic Software or the features and functions of the Elastic Software
- is a primary reason or substantial motivation for users of the SaaS Offering
- to access and/or use the SaaS Offering ("Prohibited SaaS Offering"); (v)
- circumvent the limitations on use of Elastic Software provided to You in
- Object Code format that are imposed or preserved by any License Key, or (vi)
- alter or remove any Marks and Notices in the Elastic Software. If You have any
- question as to whether a specific SaaS Offering constitutes a Prohibited SaaS
- Offering, or are interested in obtaining Elastic's permission to engage in
- commercial or non-commercial distribution of the Elastic Software, please
- contact elastic_license@elastic.co.
+## Limitations
- 1.3 Third Party Open Source Software. The Commercial Software may contain or
- be provided with third party open source libraries, components, utilities and
- other open source software (collectively, "Open Source Software"), which Open
- Source Software may have applicable license terms as identified on a website
- designated by Elastic. Notwithstanding anything to the contrary herein, use of
- the Open Source Software shall be subject to the license terms and conditions
- applicable to such Open Source Software, to the extent required by the
- applicable licensor (which terms shall not restrict the license rights granted
- to You hereunder, but may contain additional rights). To the extent any
- condition of this Agreement conflicts with any license to the Open Source
- Software, the Open Source Software license will govern with respect to such
- Open Source Software only. Elastic may also separately provide you with
- certain open source software that is licensed by Elastic. Your use of such
- Elastic open source software will not be governed by this Agreement, but by
- the applicable open source license terms.
+You may not provide the software to third parties as a hosted or managed
+service, where the service provides users with access to any substantial set of
+the features or functionality of the software.
-2. COMMERCIAL SOFTWARE SOURCE CODE
+You may not move, change, disable, or circumvent the license key functionality
+in the software, and you may not remove or obscure any functionality in the
+software that is protected by the license key.
- 2.1 Limited License. Subject to the terms and conditions of Section 2.2 of
- this Agreement, Elastic hereby grants to You, AT NO CHARGE and for so long as
- you are not in breach of any provision of this Agreement, a limited,
- non-exclusive, non-transferable, fully paid up royalty free right and license
- to the Commercial Software in Source Code format, without the right to grant
- or authorize sublicenses, to prepare Derivative Works of the Commercial
- Software, provided You (i) do not hack the licensing mechanism, or otherwise
- circumvent the intended limitations on the use of Elastic Software to enable
- features other than Basic Features and Functions or those features You are
- entitled to as part of a Subscription, and (ii) use the resulting object code
- only for reasonable testing purposes.
+You may not alter, remove, or obscure any licensing, copyright, or other notices
+of the licensor in the software. Any use of the licensor’s trademarks is subject
+to applicable law.
- 2.2 Restrictions. Nothing in Section 2.1 grants You the right to (i) use the
- Commercial Software Source Code other than in accordance with Section 2.1
- above, (ii) use a Derivative Work of the Commercial Software outside of a
- Non-production Environment, in any production capacity, on a temporary or
- permanent basis, or (iii) transfer, sell, rent, lease, distribute, sublicense,
- loan or otherwise make available the Commercial Software Source Code, in whole
- or in part, to any third party. Notwithstanding the foregoing, You may
- maintain a copy of the repository in which the Source Code of the Commercial
- Software resides and that copy may be publicly accessible, provided that you
- include this Agreement with Your copy of the repository.
+## Patents
-3. TERMINATION
+The licensor grants you a license, under any patent claims the licensor can
+license, or becomes able to license, to make, have made, use, sell, offer for
+sale, import and have imported the software, in each case subject to the
+limitations and conditions in this license. This license does not cover any
+patent claims that you cause to be infringed by modifications or additions to
+the software. If you or your company make any written claim that the software
+infringes or contributes to infringement of any patent, your patent license for
+the software granted under these terms ends immediately. If your company makes
+such a claim, your patent license ends immediately for work on behalf of your
+company.
- 3.1 Termination. This Agreement will automatically terminate, whether or not
- You receive notice of such Termination from Elastic, if You breach any of its
- provisions.
+## Notices
- 3.2 Post Termination. Upon any termination of this Agreement, for any reason,
- You shall promptly cease the use of the Elastic Software in Object Code format
- and cease use of the Commercial Software in Source Code format. For the
- avoidance of doubt, termination of this Agreement will not affect Your right
- to use Elastic Software, in either Object Code or Source Code formats, made
- available under the Apache License Version 2.0.
+You must ensure that anyone who gets a copy of any part of the software from you
+also gets a copy of these terms.
- 3.3 Survival. Sections 1.2, 2.2. 3.3, 4 and 5 shall survive any termination or
- expiration of this Agreement.
+If you modify the software, you must include in any modified copies of the
+software prominent notices stating that you have modified the software.
-4. DISCLAIMER OF WARRANTIES AND LIMITATION OF LIABILITY
+## No Other Rights
- 4.1 Disclaimer of Warranties. TO THE MAXIMUM EXTENT PERMITTED UNDER APPLICABLE
- LAW, THE ELASTIC SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
- AND ELASTIC AND ITS LICENSORS MAKE NO WARRANTIES WHETHER EXPRESSED, IMPLIED OR
- STATUTORY REGARDING OR RELATING TO THE ELASTIC SOFTWARE. TO THE MAXIMUM EXTENT
- PERMITTED UNDER APPLICABLE LAW, ELASTIC AND ITS LICENSORS SPECIFICALLY
- DISCLAIM ALL IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
- PURPOSE AND NON-INFRINGEMENT WITH RESPECT TO THE ELASTIC SOFTWARE, AND WITH
- RESPECT TO THE USE OF THE FOREGOING. FURTHER, ELASTIC DOES NOT WARRANT RESULTS
- OF USE OR THAT THE ELASTIC SOFTWARE WILL BE ERROR FREE OR THAT THE USE OF THE
- ELASTIC SOFTWARE WILL BE UNINTERRUPTED.
+These terms do not imply any licenses other than those expressly granted in
+these terms.
- 4.2 Limitation of Liability. IN NO EVENT SHALL ELASTIC OR ITS LICENSORS BE
- LIABLE TO YOU OR ANY THIRD PARTY FOR ANY DIRECT OR INDIRECT DAMAGES,
- INCLUDING, WITHOUT LIMITATION, FOR ANY LOSS OF PROFITS, LOSS OF USE, BUSINESS
- INTERRUPTION, LOSS OF DATA, COST OF SUBSTITUTE GOODS OR SERVICES, OR FOR ANY
- SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, IN CONNECTION WITH
- OR ARISING OUT OF THE USE OR INABILITY TO USE THE ELASTIC SOFTWARE, OR THE
- PERFORMANCE OF OR FAILURE TO PERFORM THIS AGREEMENT, WHETHER ALLEGED AS A
- BREACH OF CONTRACT OR TORTIOUS CONDUCT, INCLUDING NEGLIGENCE, EVEN IF ELASTIC
- HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
+## Termination
-5. MISCELLANEOUS
+If you use the software in violation of these terms, such use is not licensed,
+and your licenses will automatically terminate. If the licensor provides you
+with a notice of your violation, and you cease all violation of this license no
+later than 30 days after you receive that notice, your licenses will be
+reinstated retroactively. However, if you violate these terms after such
+reinstatement, any additional violation of these terms will cause your licenses
+to terminate automatically and permanently.
- This Agreement completely and exclusively states the entire agreement of the
- parties regarding the subject matter herein, and it supersedes, and its terms
- govern, all prior proposals, agreements, or other communications between the
- parties, oral or written, regarding such subject matter. This Agreement may be
- modified by Elastic from time to time, and any such modifications will be
- effective upon the "Posted Date" set forth at the top of the modified
- Agreement. If any provision hereof is held unenforceable, this Agreement will
- continue without said provision and be interpreted to reflect the original
- intent of the parties. This Agreement and any non-contractual obligation
- arising out of or in connection with it, is governed exclusively by Dutch law.
- This Agreement shall not be governed by the 1980 UN Convention on Contracts
- for the International Sale of Goods. All disputes arising out of or in
- connection with this Agreement, including its existence and validity, shall be
- resolved by the courts with jurisdiction in Amsterdam, The Netherlands, except
- where mandatory law provides for the courts at another location in The
- Netherlands to have jurisdiction. The parties hereby irrevocably waive any and
- all claims and defenses either might otherwise have in any such action or
- proceeding in any of such courts based upon any alleged lack of personal
- jurisdiction, improper venue, forum non conveniens or any similar claim or
- defense. A breach or threatened breach, by You of Section 2 may cause
- irreparable harm for which damages at law may not provide adequate relief, and
- therefore Elastic shall be entitled to seek injunctive relief without being
- required to post a bond. You may not assign this Agreement (including by
- operation of law in connection with a merger or acquisition), in whole or in
- part to any third party without the prior written consent of Elastic, which
- may be withheld or granted by Elastic in its sole and absolute discretion.
- Any assignment in violation of the preceding sentence is void. Notices to
- Elastic may also be sent to legal@elastic.co.
+## No Liability
-6. DEFINITIONS
+*As far as the law allows, the software comes as is, without any warranty or
+condition, and the licensor will not be liable to you for any damages arising
+out of these terms or the use or nature of the software, under any kind of
+legal claim.*
- The following terms have the meanings ascribed:
+## Definitions
- 6.1 "Affiliate" means, with respect to a party, any entity that controls, is
- controlled by, or which is under common control with, such party, where
- "control" means ownership of at least fifty percent (50%) of the outstanding
- voting shares of the entity, or the contractual right to establish policy for,
- and manage the operations of, the entity.
+The **licensor** is the entity offering these terms, and the **software** is the
+software the licensor makes available under these terms, including any portion
+of it.
- 6.2 "Basic Features and Functions" means those features and functions of the
- Elastic Software that are eligible for use under a Basic license, as set forth
- at https://www.elastic.co/subscriptions, as may be modified by Elastic from
- time to time.
+**you** refers to the individual or entity agreeing to these terms.
- 6.3 "Commercial Software" means the Elastic Software Source Code in any file
- containing a header stating the contents are subject to the Elastic License or
- which is contained in the repository folder labeled "x-pack", unless a LICENSE
- file present in the directory subtree declares a different license.
+**your company** is any legal entity, sole proprietorship, or other kind of
+organization that you work for, plus all organizations that have control over,
+are under the control of, or are under common control with that
+organization. **control** means ownership of substantially all the assets of an
+entity, or the power to direct its management and policies by vote, contract, or
+otherwise. Control can be direct or indirect.
- 6.4 "Derivative Work of the Commercial Software" means, for purposes of this
- Agreement, any modification(s) or enhancement(s) to the Commercial Software,
- which represent, as a whole, an original work of authorship.
+**your licenses** are all the licenses granted to you for the software under
+these terms.
- 6.5 "License" means a limited, non-exclusive, non-transferable, fully paid up,
- royalty free, right and license, without the right to grant or authorize
- sublicenses, solely for Your internal business operations to (i) install and
- use the applicable Features and Functions of the Elastic Software in Object
- Code, and (ii) permit Contractors and Your Affiliates to use the Elastic
- software as set forth in (i) above, provided that such use by Contractors must
- be solely for Your benefit and/or the benefit of Your Affiliates, and You
- shall be responsible for all acts and omissions of such Contractors and
- Affiliates in connection with their use of the Elastic software that are
- contrary to the terms and conditions of this Agreement.
+**use** means anything you do with the software requiring one of your licenses.
- 6.6 "License Key" means a sequence of bytes, including but not limited to a
- JSON blob, that is used to enable certain features and functions of the
- Elastic Software.
-
- 6.7 "Marks and Notices" means all Elastic trademarks, trade names, logos and
- notices present on the Documentation as originally provided by Elastic.
-
- 6.8 "Non-production Environment" means an environment for development, testing
- or quality assurance, where software is not used for production purposes.
-
- 6.9 "Object Code" means any form resulting from mechanical transformation or
- translation of Source Code form, including but not limited to compiled object
- code, generated documentation, and conversions to other media types.
-
- 6.10 "Source Code" means the preferred form of computer software for making
- modifications, including but not limited to software source code,
- documentation source, and configuration files.
-
- 6.11 "Subscription" means the right to receive Support Services and a License
- to the Commercial Software.
-
-
-GOVERNMENT END USER ADDENDUM TO THE ELASTIC LICENSE AGREEMENT
-
- This ADDENDUM TO THE ELASTIC LICENSE AGREEMENT (this "Addendum") applies
-only to U.S. Federal Government, State Government, and Local Government
-entities ("Government End Users") of the Elastic Software. This Addendum is
-subject to, and hereby incorporated into, the Elastic License Agreement,
-which is being entered into as of even date herewith, by Elastic and You (the
-"Agreement"). This Addendum sets forth additional terms and conditions
-related to Your use of the Elastic Software. Capitalized terms not defined in
-this Addendum have the meaning set forth in the Agreement.
-
- 1. LIMITED LICENSE TO DISTRIBUTE (DSOP ONLY). Subject to the terms and
-conditions of the Agreement (including this Addendum), Elastic grants the
-Department of Defense Enterprise DevSecOps Initiative (DSOP) a royalty-free,
-non-exclusive, non-transferable, limited license to reproduce and distribute
-the Elastic Software solely through a software distribution repository
-controlled and managed by DSOP, provided that DSOP: (i) distributes the
-Elastic Software complete and unmodified, inclusive of the Agreement
-(including this Addendum) and (ii) does not remove or alter any proprietary
-legends or notices contained in the Elastic Software.
-
- 2. CHOICE OF LAW. The choice of law and venue provisions set forth shall
-prevail over those set forth in Section 5 of the Agreement.
-
- "For U.S. Federal Government Entity End Users. This Agreement and any
- non-contractual obligation arising out of or in connection with it, is
- governed exclusively by U.S. Federal law. To the extent permitted by
- federal law, the laws of the State of Delaware (excluding Delaware choice
- of law rules) will apply in the absence of applicable federal law.
-
- For State and Local Government Entity End Users. This Agreement and any
- non-contractual obligation arising out of or in connection with it, is
- governed exclusively by the laws of the state in which you are located
- without reference to conflict of laws. Furthermore, the Parties agree that
- the Uniform Computer Information Transactions Act or any version thereof,
- adopted by any state in any form ('UCITA'), shall not apply to this
- Agreement and, to the extent that UCITA is applicable, the Parties agree to
- opt out of the applicability of UCITA pursuant to the opt-out provision(s)
- contained therein."
-
- 3. ELASTIC LICENSE MODIFICATION. Section 5 of the Agreement is hereby
-amended to replace
-
- "This Agreement may be modified by Elastic from time to time, and any
- such modifications will be effective upon the "Posted Date" set forth at
- the top of the modified Agreement."
-
- with:
-
- "This Agreement may be modified by Elastic from time to time; provided,
- however, that any such modifications shall apply only to Elastic Software
- that is installed after the "Posted Date" set forth at the top of the
- modified Agreement."
-
-V100820.0
+**trademark** means trademarks, service marks, and similar rights.
diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle
index e41341a70277a..c59f5a5bddeca 100644
--- a/distribution/packages/build.gradle
+++ b/distribution/packages/build.gradle
@@ -254,7 +254,7 @@ def commonPackageConfig(String type, String architecture) {
}
}
-// this is package indepdendent configuration
+// this is package independent configuration
ospackage {
maintainer 'Elasticsearch Team '
summary 'Distributed RESTful search engine built for the cloud'
diff --git a/distribution/packages/src/common/scripts/postinst b/distribution/packages/src/common/scripts/postinst
index e73a095556470..a2b4c2930851f 100644
--- a/distribution/packages/src/common/scripts/postinst
+++ b/distribution/packages/src/common/scripts/postinst
@@ -49,13 +49,58 @@ case "$1" in
exit 1
;;
esac
-
# to pick up /usr/lib/sysctl.d/elasticsearch.conf
if command -v systemctl > /dev/null; then
systemctl restart systemd-sysctl.service || true
fi
-
if [ "x$IS_UPGRADE" != "xtrue" ]; then
+ # Don't exit immediately on error, we want to hopefully print some helpful banners
+ set +e
+ # Attempt to auto-configure security, this seems to be an installation
+ if ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.ConfigInitialNode \
+ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \
+ ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \
+ /usr/share/elasticsearch/bin/elasticsearch-cli <<< ""; then
+ # Above command runs as root and TLS keystores are created group-owned by root. It's simple to correct the ownership here
+ for dir in "${ES_PATH_CONF}"/tls_auto_config_initial_node_*
+ do
+ chown root:elasticsearch "${dir}"/http_keystore_local_node.p12
+ chown root:elasticsearch "${dir}"/http_ca.crt
+ chown root:elasticsearch "${dir}"/transport_keystore_all_nodes.p12
+ done
+ if INITIAL_PASSWORD=$(ES_MAIN_CLASS=org.elasticsearch.xpack.security.enrollment.tool.AutoConfigGenerateElasticPasswordHash \
+ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \
+ ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \
+ /usr/share/elasticsearch/bin/elasticsearch-cli); then
+ echo "########## Security autoconfiguration information ############"
+ echo "# #"
+ echo "# Authentication and Authorization are enabled. #"
+ echo "# TLS for the transport and the http layers is enabled and configured. #"
+ echo "# #"
+ echo "# The password of the elastic superuser will be set to: ${INITIAL_PASSWORD} #"
+ echo "# upon starting elasticsearch for the first time #"
+ echo "# #"
+ echo "##############################################################################"
+ fi
+ else
+ if [ $? -eq 80 ]; then
+ # ExitCodes.NOOP
+ echo "########## Security autoconfiguration information ############"
+ echo "# #"
+ echo "# Security features appear to be already configured. #"
+ echo "# #"
+ echo "##############################################################################"
+ else
+ echo "########## Security autoconfiguration information ############"
+ echo "# #"
+ echo "# Failed to auto-configure security features. #"
+ echo "# Authentication and Authorization are enabled. #"
+ echo "# You can use elasticsearch-reset-elastic-password to set a password #"
+ echo "# for the elastic user. #"
+ echo "# #"
+ echo "##############################################################################"
+ fi
+ fi
if command -v systemctl >/dev/null; then
echo "### NOT starting on installation, please execute the following statements to configure elasticsearch service to start automatically using systemd"
echo " sudo systemctl daemon-reload"
@@ -63,6 +108,8 @@ if [ "x$IS_UPGRADE" != "xtrue" ]; then
echo "### You can start elasticsearch service by executing"
echo " sudo systemctl start elasticsearch.service"
fi
+ set -e
+
elif [ "$RESTART_ON_UPGRADE" = "true" ]; then
echo -n "Restarting elasticsearch service..."
diff --git a/distribution/packages/src/common/scripts/postrm b/distribution/packages/src/common/scripts/postrm
index 0fc3aca400c1d..cc1032c0e73b7 100644
--- a/distribution/packages/src/common/scripts/postrm
+++ b/distribution/packages/src/common/scripts/postrm
@@ -18,6 +18,7 @@ export ES_PATH_CONF=${ES_PATH_CONF:-@path.conf@}
REMOVE_DIRS=false
REMOVE_JVM_OPTIONS_DIRECTORY=false
+REMOVE_SECURITY_AUTO_CONFIG_DIRECTORY=false
REMOVE_ELASTICSEARCH_KEYSTORE=false
REMOVE_USER_AND_GROUP=false
@@ -31,6 +32,7 @@ case "$1" in
purge)
REMOVE_DIRS=true
REMOVE_JVM_OPTIONS_DIRECTORY=true
+ REMOVE_SECURITY_AUTO_CONFIG_DIRECTORY=true
REMOVE_ELASTICSEARCH_KEYSTORE=true
REMOVE_USER_AND_GROUP=true
;;
@@ -99,6 +101,16 @@ if [ "$REMOVE_DIRS" = "true" ]; then
fi
fi
+ # delete the security auto config directory if we are purging
+ if [ "$REMOVE_SECURITY_AUTO_CONFIG_DIRECTORY" = "true" ]; then
+ for dir in "${ES_PATH_CONF}"/tls_auto_config_initial_node_*
+ do
+ echo -n "Deleting security auto-configuration directory..."
+ rm -rf "${dir}"
+ echo "OK"
+ done
+ fi
+
# delete the elasticsearch keystore if we are purging
if [ "$REMOVE_ELASTICSEARCH_KEYSTORE" = "true" ]; then
if [ -e "${ES_PATH_CONF}/elasticsearch.keystore" ]; then
diff --git a/distribution/src/bin/elasticsearch b/distribution/src/bin/elasticsearch
index c5805ea2ebd64..59aabfc3ec368 100755
--- a/distribution/src/bin/elasticsearch
+++ b/distribution/src/bin/elasticsearch
@@ -16,11 +16,13 @@
source "`dirname "$0"`"/elasticsearch-env
CHECK_KEYSTORE=true
+ATTEMPT_SECURITY_AUTO_CONFIG=true
DAEMONIZE=false
for option in "$@"; do
case "$option" in
-h|--help|-V|--version)
CHECK_KEYSTORE=false
+ ATTEMPT_SECURITY_AUTO_CONFIG=false
;;
-d|--daemonize)
DAEMONIZE=true
@@ -45,6 +47,27 @@ then
fi
fi
+if [[ $ATTEMPT_SECURITY_AUTO_CONFIG = true ]]; then
+ # It is possible that an auto-conf failure prevents the node from starting, but this is only the exceptional case (exit code 1).
+ # Most likely an auto-conf failure will leave the configuration untouched (exit codes 73, 78 and 80), optionally printing a message
+ # if the error is uncommon or unexpected, but it should otherwise let the node to start as usual.
+ # It is passed in all the command line options in order to read the node settings ones (-E), while the other parameters are ignored
+ # (a small caveat is that it also inspects the -v option in order to provide more information on how auto config went)
+ if ES_MAIN_CLASS=org.elasticsearch.xpack.security.cli.ConfigInitialNode \
+ ES_ADDITIONAL_SOURCES="x-pack-env;x-pack-security-env" \
+ ES_ADDITIONAL_CLASSPATH_DIRECTORIES=lib/tools/security-cli \
+ "`dirname "$0"`"/elasticsearch-cli "$@" <<<"$KEYSTORE_PASSWORD"; then
+ :
+ else
+ retval=$?
+ # these exit codes cover the cases where auto-conf cannot run but the node should NOT be prevented from starting as usual
+ # eg the node is restarted, is already configured in an incompatible way, or the file system permissions do not allow it
+ if [[ $retval -ne 80 ]] && [[ $retval -ne 73 ]] && [[ $retval -ne 78 ]]; then
+ exit $retval
+ fi
+ fi
+fi
+
# The JVM options parser produces the final JVM options to start Elasticsearch.
# It does this by incorporating JVM options in the following way:
# - first, system JVM options are applied (these are hardcoded options in the
diff --git a/distribution/src/bin/elasticsearch-env b/distribution/src/bin/elasticsearch-env
index 78894daec6c42..98ebddbdd39d5 100644
--- a/distribution/src/bin/elasticsearch-env
+++ b/distribution/src/bin/elasticsearch-env
@@ -119,11 +119,21 @@ if [[ "$ES_DISTRIBUTION_TYPE" == "docker" ]]; then
declare -a es_arg_array
+ containsElement () {
+ local e match="$1"
+ shift
+ for e; do [[ "$e" == "$match" ]] && return 0; done
+ return 1
+ }
+
# Elasticsearch settings need to either:
# a. have at least two dot separated lower case words, e.g. `cluster.name`, or
while IFS='=' read -r envvar_key envvar_value; do
+ es_opt=""
if [[ -n "$envvar_value" ]]; then
es_opt="-E${envvar_key}=${envvar_value}"
+ fi
+ if [[ ! -z "${es_opt}" ]] && ! containsElement "${es_opt}" "$@" ; then
es_arg_array+=("${es_opt}")
fi
done <<< "$(env | grep -E '^[-a-z0-9_]+(\.[-a-z0-9_]+)+=')"
@@ -131,10 +141,13 @@ if [[ "$ES_DISTRIBUTION_TYPE" == "docker" ]]; then
# b. be upper cased with underscore separators and prefixed with `ES_SETTING_`, e.g. `ES_SETTING_CLUSTER_NAME`.
# Underscores in setting names are escaped by writing them as a double-underscore e.g. "__"
while IFS='=' read -r envvar_key envvar_value; do
+ es_opt=""
if [[ -n "$envvar_value" ]]; then
# The long-hand sed `y` command works in any sed variant.
envvar_key="$(echo "$envvar_key" | sed -e 's/^ES_SETTING_//; s/_/./g ; s/\.\./_/g; y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/' )"
es_opt="-E${envvar_key}=${envvar_value}"
+ fi
+ if [[ ! -z "${es_opt}" ]] && ! containsElement "${es_opt}" "$@" ; then
es_arg_array+=("${es_opt}")
fi
done <<< "$(env | grep -E '^ES_SETTING(_{1,2}[A-Z]+)+=')"
diff --git a/distribution/src/bin/elasticsearch.bat b/distribution/src/bin/elasticsearch.bat
index 7d4d58010ba33..c8b94d7164b7a 100644
--- a/distribution/src/bin/elasticsearch.bat
+++ b/distribution/src/bin/elasticsearch.bat
@@ -5,6 +5,7 @@ setlocal enableextensions
SET params='%*'
SET checkpassword=Y
+SET attemptautoconfig=Y
:loop
FOR /F "usebackq tokens=1* delims= " %%A IN (!params!) DO (
@@ -21,16 +22,20 @@ FOR /F "usebackq tokens=1* delims= " %%A IN (!params!) DO (
IF "!current!" == "-h" (
SET checkpassword=N
+ SET attemptautoconfig=N
)
IF "!current!" == "--help" (
SET checkpassword=N
+ SET attemptautoconfig=N
)
IF "!current!" == "-V" (
SET checkpassword=N
+ SET attemptautoconfig=N
)
IF "!current!" == "--version" (
SET checkpassword=N
+ SET attemptautoconfig=N
)
IF "!silent!" == "Y" (
@@ -68,6 +73,23 @@ IF "%checkpassword%"=="Y" (
)
)
+IF "%attemptautoconfig%"=="Y" (
+ ECHO.!KEYSTORE_PASSWORD!| %JAVA% %ES_JAVA_OPTS% ^
+ -Des.path.home="%ES_HOME%" ^
+ -Des.path.conf="%ES_PATH_CONF%" ^
+ -Des.distribution.flavor="%ES_DISTRIBUTION_FLAVOR%" ^
+ -Des.distribution.type="%ES_DISTRIBUTION_TYPE%" ^
+ -cp "!ES_CLASSPATH!;!ES_HOME!/lib/tools/security-cli/*;!ES_HOME!/modules/x-pack-core/*;!ES_HOME!/modules/x-pack-security/*" "org.elasticsearch.xpack.security.cli.ConfigInitialNode" !newparams!
+ SET SHOULDEXIT=Y
+ IF !ERRORLEVEL! EQU 0 SET SHOULDEXIT=N
+ IF !ERRORLEVEL! EQU 73 SET SHOULDEXIT=N
+ IF !ERRORLEVEL! EQU 78 SET SHOULDEXIT=N
+ IF !ERRORLEVEL! EQU 80 SET SHOULDEXIT=N
+ IF "!SHOULDEXIT!"=="Y" (
+ exit /b !ERRORLEVEL!
+ )
+)
+
if not defined ES_TMPDIR (
for /f "tokens=* usebackq" %%a in (`CALL %JAVA% -cp "!ES_CLASSPATH!" "org.elasticsearch.tools.launchers.TempDirectory"`) do set ES_TMPDIR=%%a
)
diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java
index bb6da0c6a4eb5..4f33a1ba5b02c 100644
--- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java
+++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java
@@ -123,7 +123,6 @@ private List jvmOptions(final Path config, Path plugins, final String es
throws InterruptedException, IOException, JvmOptionsFileParserException {
final List jvmOptions = readJvmOptionsFiles(config);
- final MachineDependentHeap machineDependentHeap = new MachineDependentHeap(new DefaultSystemMemoryInfo());
if (esJavaOpts != null) {
jvmOptions.addAll(
@@ -132,6 +131,9 @@ private List jvmOptions(final Path config, Path plugins, final String es
}
final List substitutedJvmOptions = substitutePlaceholders(jvmOptions, Collections.unmodifiableMap(substitutions));
+ final MachineDependentHeap machineDependentHeap = new MachineDependentHeap(
+ new OverridableSystemMemoryInfo(substitutedJvmOptions, new DefaultSystemMemoryInfo())
+ );
substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(config, substitutedJvmOptions));
final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions);
final List systemJvmOptions = SystemJvmOptions.systemJvmOptions();
diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/OverridableSystemMemoryInfo.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/OverridableSystemMemoryInfo.java
new file mode 100644
index 0000000000000..118c68b2111b6
--- /dev/null
+++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/OverridableSystemMemoryInfo.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.tools.launchers;
+
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * A {@link SystemMemoryInfo} which returns a user-overridden memory size if one
+ * has been specified using the {@code es.total_memory_bytes} system property, or
+ * else returns the value provided by a fallback provider.
+ */
+public final class OverridableSystemMemoryInfo implements SystemMemoryInfo {
+
+ private final List userDefinedJvmOptions;
+ private final SystemMemoryInfo fallbackSystemMemoryInfo;
+
+ public OverridableSystemMemoryInfo(final List userDefinedJvmOptions, SystemMemoryInfo fallbackSystemMemoryInfo) {
+ this.userDefinedJvmOptions = Objects.requireNonNull(userDefinedJvmOptions);
+ this.fallbackSystemMemoryInfo = Objects.requireNonNull(fallbackSystemMemoryInfo);
+ }
+
+ @Override
+ public long availableSystemMemory() throws SystemMemoryInfoException {
+
+ return userDefinedJvmOptions.stream()
+ .filter(option -> option.startsWith("-Des.total_memory_bytes="))
+ .map(totalMemoryBytesOption -> {
+ try {
+ long bytes = Long.parseLong(totalMemoryBytesOption.split("=", 2)[1]);
+ if (bytes < 0) {
+ throw new IllegalArgumentException("Negative memory size specified in [" + totalMemoryBytesOption + "]");
+ }
+ return bytes;
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("Unable to parse number of bytes from [" + totalMemoryBytesOption + "]", e);
+ }
+ })
+ .reduce((previous, current) -> current) // this is effectively findLast(), so that ES_JAVA_OPTS overrides jvm.options
+ .orElse(fallbackSystemMemoryInfo.availableSystemMemory());
+ }
+}
diff --git a/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/OverridableSystemMemoryInfoTests.java b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/OverridableSystemMemoryInfoTests.java
new file mode 100644
index 0000000000000..f56db17422578
--- /dev/null
+++ b/distribution/tools/launchers/src/test/java/org/elasticsearch/tools/launchers/OverridableSystemMemoryInfoTests.java
@@ -0,0 +1,86 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.tools.launchers;
+
+import org.elasticsearch.tools.launchers.SystemMemoryInfo.SystemMemoryInfoException;
+
+import java.util.List;
+
+import static org.hamcrest.Matchers.is;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+public class OverridableSystemMemoryInfoTests extends LaunchersTestCase {
+
+ private static final long FALLBACK = -1L;
+
+ public void testNoOptions() throws SystemMemoryInfoException {
+ final SystemMemoryInfo memoryInfo = new OverridableSystemMemoryInfo(List.of(), fallbackSystemMemoryInfo());
+ assertThat(memoryInfo.availableSystemMemory(), is(FALLBACK));
+ }
+
+ public void testNoOverrides() throws SystemMemoryInfoException {
+ final SystemMemoryInfo memoryInfo = new OverridableSystemMemoryInfo(List.of("-Da=b", "-Dx=y"), fallbackSystemMemoryInfo());
+ assertThat(memoryInfo.availableSystemMemory(), is(FALLBACK));
+ }
+
+ public void testValidSingleOverride() throws SystemMemoryInfoException {
+ final SystemMemoryInfo memoryInfo = new OverridableSystemMemoryInfo(
+ List.of("-Des.total_memory_bytes=123456789"),
+ fallbackSystemMemoryInfo()
+ );
+ assertThat(memoryInfo.availableSystemMemory(), is(123456789L));
+ }
+
+ public void testValidOverrideInList() throws SystemMemoryInfoException {
+ final SystemMemoryInfo memoryInfo = new OverridableSystemMemoryInfo(
+ List.of("-Da=b", "-Des.total_memory_bytes=987654321", "-Dx=y"),
+ fallbackSystemMemoryInfo()
+ );
+ assertThat(memoryInfo.availableSystemMemory(), is(987654321L));
+ }
+
+ public void testMultipleValidOverridesInList() throws SystemMemoryInfoException {
+ final SystemMemoryInfo memoryInfo = new OverridableSystemMemoryInfo(
+ List.of("-Des.total_memory_bytes=123456789", "-Da=b", "-Des.total_memory_bytes=987654321", "-Dx=y"),
+ fallbackSystemMemoryInfo()
+ );
+ assertThat(memoryInfo.availableSystemMemory(), is(987654321L));
+ }
+
+ public void testNegativeOverride() throws SystemMemoryInfoException {
+ final SystemMemoryInfo memoryInfo = new OverridableSystemMemoryInfo(
+ List.of("-Da=b", "-Des.total_memory_bytes=-123", "-Dx=y"),
+ fallbackSystemMemoryInfo()
+ );
+ try {
+ memoryInfo.availableSystemMemory();
+ fail("expected to fail");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), is("Negative memory size specified in [-Des.total_memory_bytes=-123]"));
+ }
+ }
+
+ public void testUnparsableOverride() throws SystemMemoryInfoException {
+ final SystemMemoryInfo memoryInfo = new OverridableSystemMemoryInfo(
+ List.of("-Da=b", "-Des.total_memory_bytes=invalid", "-Dx=y"),
+ fallbackSystemMemoryInfo()
+ );
+ try {
+ memoryInfo.availableSystemMemory();
+ fail("expected to fail");
+ } catch (IllegalArgumentException e) {
+ assertThat(e.getMessage(), is("Unable to parse number of bytes from [-Des.total_memory_bytes=invalid]"));
+ }
+ }
+
+ private static SystemMemoryInfo fallbackSystemMemoryInfo() {
+ return () -> FALLBACK;
+ }
+}
diff --git a/docs/changelog/75144.yaml b/docs/changelog/75144.yaml
new file mode 100644
index 0000000000000..ec7558dbe10ad
--- /dev/null
+++ b/docs/changelog/75144.yaml
@@ -0,0 +1,6 @@
+pr: 75144
+summary: Security auto-configuration for packaged installations
+area: Security
+type: enhancement
+issues:
+ - 78306
diff --git a/docs/changelog/77231.yaml b/docs/changelog/77231.yaml
new file mode 100644
index 0000000000000..53716a944b928
--- /dev/null
+++ b/docs/changelog/77231.yaml
@@ -0,0 +1,7 @@
+pr: 77231
+summary: Auto configure TLS for new nodes of new clusters
+area: Security
+type: feature
+issues:
+ - 75144
+ - 75704
diff --git a/docs/changelog/78750.yaml b/docs/changelog/78750.yaml
new file mode 100644
index 0000000000000..c5f06c8783460
--- /dev/null
+++ b/docs/changelog/78750.yaml
@@ -0,0 +1,6 @@
+pr: 78750
+summary: Allow total memory to be overridden
+area: Packaging
+type: enhancement
+issues:
+ - 65905
diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc
index 45effa530b120..390730c566e0c 100644
--- a/docs/reference/cluster/nodes-info.asciidoc
+++ b/docs/reference/cluster/nodes-info.asciidoc
@@ -46,18 +46,28 @@ comma-separated list, such as `http,ingest`.
[%collapsible%open]
.Valid values for ``
====
+`aggregations`::
+Information about the available types of aggregation.
+
`http`::
-HTTP connection information.
+Information about the HTTP interface of this node.
+
+`indices`::
++
+--
+Node-level configuration related to indexing:
+
+* `total_indexing_buffer`: the maximum size of the indexing buffer on this node.
+--
`ingest`::
Information about ingest pipelines and processors.
`jvm`::
-JVM stats, memory pool information, garbage collection, buffer pools, number of
-loaded/unloaded classes.
+JVM information, including its name, its version, and its configuration.
`os`::
-Operating system stats, load average, mem, swap.
+Operating system information, including its name and version.
`plugins`::
+
@@ -74,23 +84,25 @@ process
--
`process`::
-Process statistics, memory consumption, cpu usage, open file descriptors.
+Process information, including the numeric process ID.
`settings`::
Lists all node settings in use as defined in the `elasticsearch.yml` file.
`thread_pool`::
-Statistics about each thread pool, including current size, queue and rejected
-tasks
+Information about the configuration of each thread pool.
`transport`::
-Transport statistics about sent and received bytes in cluster communication.
+Information about the transport interface of the node.
====
+If you use the full `GET /_nodes//` form of this API then you
+can also request the metric `_all` to retrieve all metrics, or you can request
+the metric `_none` to suppress all metrics and retrieve only the identity of
+the node.
include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=node-id]
-
[[cluster-nodes-info-api-response-body]]
==== {api-response-body-title}
diff --git a/docs/reference/cluster/nodes-stats.asciidoc b/docs/reference/cluster/nodes-stats.asciidoc
index a2ba22578275f..7e15acb99d4bd 100644
--- a/docs/reference/cluster/nodes-stats.asciidoc
+++ b/docs/reference/cluster/nodes-stats.asciidoc
@@ -1036,6 +1036,18 @@ Total amount of physical memory.
(integer)
Total amount of physical memory in bytes.
+`adjusted_total`::
+(<>)
+If the amount of physical memory has been overridden using the `es.total_memory_bytes`
+system property then this reports the overridden value. Otherwise it reports the same
+value as `total`.
+
+`adjusted_total_in_bytes`::
+(integer)
+If the amount of physical memory has been overridden using the `es.total_memory_bytes`
+system property then this reports the overridden value in bytes. Otherwise it reports
+the same value as `total_in_bytes`.
+
`free`::
(<>)
Amount of free physical memory.
diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc
index e9066542b0157..209c40a3e4070 100644
--- a/docs/reference/cluster/stats.asciidoc
+++ b/docs/reference/cluster/stats.asciidoc
@@ -916,6 +916,18 @@ Total amount of physical memory across all selected nodes.
(integer)
Total amount, in bytes, of physical memory across all selected nodes.
+`adjusted_total`::
+(<>)
+Total amount of memory across all selected nodes, but using the value specified
+using the `es.total_memory_bytes` system property instead of measured total
+memory for those nodes where that system property was set.
+
+`adjusted_total_in_bytes`::
+(integer)
+Total amount, in bytes, of memory across all selected nodes, but using the
+value specified using the `es.total_memory_bytes` system property instead
+of measured total memory for those nodes where that system property was set.
+
`free`::
(<>)
Amount of free physical memory across all selected nodes.
@@ -1399,6 +1411,8 @@ The API returns the following response:
"mem" : {
"total" : "16gb",
"total_in_bytes" : 17179869184,
+ "adjusted_total" : "16gb",
+ "adjusted_total_in_bytes" : 17179869184,
"free" : "78.1mb",
"free_in_bytes" : 81960960,
"used" : "15.9gb",
diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc
index 54f586965288c..f75ace0995a2f 100644
--- a/docs/reference/cluster/update-settings.asciidoc
+++ b/docs/reference/cluster/update-settings.asciidoc
@@ -4,7 +4,7 @@
Cluster update settings
++++
-Updates cluster-wide settings.
+Updates cluster-wide settings.
[[cluster-update-settings-api-request]]
@@ -21,21 +21,21 @@ Updates cluster-wide settings.
[[cluster-update-settings-api-desc]]
==== {api-description-title}
-With specifications in the request body, this API call can update cluster
-settings. Updates to settings can be persistent, meaning they apply across
+With specifications in the request body, this API call can update cluster
+settings. Updates to settings can be persistent, meaning they apply across
restarts, or transient, where they don't survive a full cluster restart.
-You can reset persistent or transient settings by assigning a `null` value. If a
-transient setting is reset, the first one of these values that is defined is
+You can reset persistent or transient settings by assigning a `null` value. If a
+transient setting is reset, the first one of these values that is defined is
applied:
* the persistent setting
* the setting in the configuration file
-* the default value.
+* the default value.
The order of precedence for cluster settings is:
-1. transient cluster settings
+1. transient cluster settings
2. persistent cluster settings
3. settings in the `elasticsearch.yml` configuration file.
@@ -45,6 +45,8 @@ the setting is the same on all nodes. If, on the other hand, you define differen
settings on different nodes by accident using the configuration file, it is very
difficult to notice these discrepancies.
+NOTE: Transient settings are deprecated and will be removed in a future release.
+Prefer using persistent cluster settings instead.
[[cluster-update-settings-api-query-params]]
==== {api-query-parms-title}
@@ -52,7 +54,7 @@ difficult to notice these discrepancies.
include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=flat-settings]
`include_defaults`::
- (Optional, Boolean) If `true`, returns all default cluster settings.
+ (Optional, Boolean) If `true`, returns all default cluster settings.
Defaults to `false`.
include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms]
@@ -85,9 +87,9 @@ PUT /_cluster/settings?flat_settings=true
}
}
--------------------------------------------------
+// TEST[warning:[transient settings removal] Updating cluster settings through transientSettings is deprecated. Use persistent settings instead.]
-
-The response to an update returns the changed setting, as in this response to
+The response to an update returns the changed setting, as in this response to
the transient example:
[source,console-result]
@@ -114,6 +116,7 @@ PUT /_cluster/settings
}
}
--------------------------------------------------
+// TEST[warning:[transient settings removal] Updating cluster settings through transientSettings is deprecated. Use persistent settings instead.]
The response does not include settings that have been reset:
@@ -141,3 +144,4 @@ PUT /_cluster/settings
}
}
--------------------------------------------------
+// TEST[warning:[transient settings removal] Updating cluster settings through transientSettings is deprecated. Use persistent settings instead.]
diff --git a/docs/reference/data-management/migrate-index-allocation-filters.asciidoc b/docs/reference/data-management/migrate-index-allocation-filters.asciidoc
index be6b0d2c06067..2cd24fd8c4cc5 100644
--- a/docs/reference/data-management/migrate-index-allocation-filters.asciidoc
+++ b/docs/reference/data-management/migrate-index-allocation-filters.asciidoc
@@ -69,7 +69,7 @@ node.roles [ data_hot, data_content ]
=== Remove custom allocation settings from existing {ilm-init} policies
Update the allocate action for each lifecycle phase to remove the attribute-based
-allocation settings. This enables {ilm-init} to inject the
+allocation settings. {ilm-init} will inject a
<> action into each phase
to automatically transition the indices through the data tiers.
diff --git a/docs/reference/data-streams/change-mappings-and-settings.asciidoc b/docs/reference/data-streams/change-mappings-and-settings.asciidoc
index e365fbd76781a..28067768298e6 100644
--- a/docs/reference/data-streams/change-mappings-and-settings.asciidoc
+++ b/docs/reference/data-streams/change-mappings-and-settings.asciidoc
@@ -521,7 +521,7 @@ lowers the `indices.lifecycle.poll_interval` setting to `1m` (one minute).
----
PUT /_cluster/settings
{
- "transient": {
+ "persistent": {
"indices.lifecycle.poll_interval": "1m"
}
}
@@ -658,7 +658,7 @@ The following update cluster settings API request resets the
----
PUT /_cluster/settings
{
- "transient": {
+ "persistent": {
"indices.lifecycle.poll_interval": null
}
}
diff --git a/docs/reference/fleet/fleet-multi-search.asciidoc b/docs/reference/fleet/fleet-multi-search.asciidoc
new file mode 100644
index 0000000000000..908433e12b679
--- /dev/null
+++ b/docs/reference/fleet/fleet-multi-search.asciidoc
@@ -0,0 +1,45 @@
+[role="xpack"]
+[[fleet-multi-search]]
+=== Fleet multi search API
+++++
+Fleet search
+++++
+
+Executes several <> with a single API request.
+
+The API follows the same structure as the <> API. However,
+similar to the fleet search API, it supports the `wait_for_checkpoints` parameter.
+
+NOTE: The fleet multi search API is designed for indirect use through fleet server. Direct use is
+not supported. Elastic reserves the right to change or remove this feature in future releases
+without prior notice.
+
+[[fleet-multi-search-api-request]]
+==== {api-request-title}
+
+`GET /_fleet/_msearch`
+
+`GET //_fleet/_msearch`
+
+[[fleet-multi-search-api-path-params]]
+==== {api-path-parms-title}
+
+``::
+(Optional, string)
+A single target to search. If the target is an index alias, it must resolve to a single index.
+
+[role="child_attributes"]
+[[fleet-multi-search-api-query-parms]]
+==== {api-query-parms-title}
+
+`wait_for_checkpoints`::
+(Optional, list) A comma separated list of checkpoints. When configured, the search API will
+only be executed on a shard after the relevant checkpoint has become visible for search.
+Defaults to an empty list which will cause Elasticsearch to immediately execute the search.
+
+`allow_partial_search_results`::
+(Optional, Boolean)
+If `true`, returns partial results if there are shard request timeouts or
+<>. If `false`, returns an error with
+no partial results. Defaults to the configured cluster setting `search.default_allow_partial_results` which
+is `true` by default.
diff --git a/docs/reference/fleet/fleet-search.asciidoc b/docs/reference/fleet/fleet-search.asciidoc
new file mode 100644
index 0000000000000..1993ceb3b7f60
--- /dev/null
+++ b/docs/reference/fleet/fleet-search.asciidoc
@@ -0,0 +1,68 @@
+[role="xpack"]
+[[fleet-search]]
+=== Fleet search API
+++++
+Fleet search
+++++
+
+The purpose of the fleet search api is to provide a search api where the search
+will only be executed after provided checkpoint has been processed and is visible
+for searches inside of Elasticsearch.
+
+NOTE: The fleet search API is designed for indirect use through fleet server. Direct use is
+not supported. Elastic reserves the right to change or remove this feature in future releases
+without prior notice.
+
+[discrete]
+[[wait-for-checkpoint-functionality]]
+== Wait for checkpoint functionality
+
+The fleet search API supports the optional parameter `wait_for_checkpoints`. This parameter
+is a list of sequence number checkpoints. When this parameter is present, the search will
+only be executed on local shards after the all operations up to and including the provided
+sequence number checkpoint are visible for search. Indexed operations become visible after a
+refresh. The checkpoints are indexed by shard.
+
+If a timeout occurs before the the checkpoint has been refreshed into Elasticsearch,
+the search request will timeout.
+
+The fleet search API only supports searches against a single target. If an index alias
+is supplied as the search target, it must resolve to a single concrete index.
+
+[discrete]
+[[fleet-search-partial-responses]]
+== Allow partial results
+
+By default, the Elasticsearch search api will allow <>.
+With this fleet API, it is common to configure this to be `false` or to check in the response
+to ensure each shard search was successful. If these precautions are not taken, it is
+possible for search results to be successfully returned even if one or more shards
+timed out.
+
+[[fleet-search-api-request]]
+==== {api-request-title}
+
+`GET //_fleet/_search`
+
+[[fleet-search-api-path-params]]
+==== {api-path-parms-title}
+
+``::
+(Required, string)
+A single target to search. If the target is an index alias, it must resolve to a single index.
+
+[role="child_attributes"]
+[[fleet-search-api-query-parms]]
+==== {api-query-parms-title}
+
+`wait_for_checkpoints`::
+(Optional, list) A comma separated list of checkpoints. When configured, the search API will
+only be executed on a shard after the relevant checkpoint has become visible for search.
+Defaults to an empty list which will cause Elasticsearch to immediately execute the search.
+
+`allow_partial_search_results`::
+(Optional, Boolean)
+If `true`, returns partial results if there are shard request timeouts or
+<>. If `false`, returns an error with
+no partial results. Defaults to the configured cluster setting `search.default_allow_partial_results` which
+is `true` by default.
diff --git a/docs/reference/fleet/index.asciidoc b/docs/reference/fleet/index.asciidoc
index 2581c226fd31e..f550542589e0c 100644
--- a/docs/reference/fleet/index.asciidoc
+++ b/docs/reference/fleet/index.asciidoc
@@ -10,6 +10,9 @@ agent and action data. These APIs are experimental and for internal use by
{fleet} only.
* <>
+* <>
// top-level
include::get-global-checkpoints.asciidoc[]
+include::fleet-search.asciidoc[]
+include::fleet-multi-search.asciidoc[]
diff --git a/docs/reference/high-availability/cluster-design.asciidoc b/docs/reference/high-availability/cluster-design.asciidoc
index 48bdb5d72fb97..919e026bbeea2 100644
--- a/docs/reference/high-availability/cluster-design.asciidoc
+++ b/docs/reference/high-availability/cluster-design.asciidoc
@@ -191,7 +191,7 @@ these nodes according to their responsibilities, allowing you to scale their
resources independently as needed. You can have as many <>, <>, <>, etc. as needed to
support your workload. As your cluster grows larger, we recommend using
-dedicated nodes for each role. This lets you to independently scale resources
+dedicated nodes for each role. This allows you to independently scale resources
for each task.
However, it is good practice to limit the number of master-eligible nodes in
diff --git a/docs/reference/how-to/fix-common-cluster-issues.asciidoc b/docs/reference/how-to/fix-common-cluster-issues.asciidoc
index 1c255380fd63e..6c9e0ab508265 100644
--- a/docs/reference/how-to/fix-common-cluster-issues.asciidoc
+++ b/docs/reference/how-to/fix-common-cluster-issues.asciidoc
@@ -48,8 +48,8 @@ watermarks and remove the write block.
[source,console]
----
PUT _cluster/settings
-{
- "transient": {
+{
+ "persistent": {
"cluster.routing.allocation.disk.watermark.low": "90%",
"cluster.routing.allocation.disk.watermark.high": "95%",
"cluster.routing.allocation.disk.watermark.flood_stage": "97%"
@@ -57,7 +57,7 @@ PUT _cluster/settings
}
PUT */_settings?expand_wildcards=all
-{
+{
"index.blocks.read_only_allow_delete": null
}
----
@@ -79,8 +79,8 @@ When a long-term solution is in place, reset or reconfigure the disk watermarks.
[source,console]
----
PUT _cluster/settings
-{
- "transient": {
+{
+ "persistent": {
"cluster.routing.allocation.disk.watermark.low": null,
"cluster.routing.allocation.disk.watermark.high": null,
"cluster.routing.allocation.disk.watermark.flood_stage": null
@@ -208,7 +208,7 @@ include::{es-repo-dir}/tab-widgets/cpu-usage-widget.asciidoc[]
**Check hot threads**
If a node has high CPU usage, use the <> to check for resource-intensive threads running on the node.
+threads API>> to check for resource-intensive threads running on the node.
[source,console]
----
diff --git a/docs/reference/how-to/size-your-shards.asciidoc b/docs/reference/how-to/size-your-shards.asciidoc
index 045eeba97947b..3696c9df1bec8 100644
--- a/docs/reference/how-to/size-your-shards.asciidoc
+++ b/docs/reference/how-to/size-your-shards.asciidoc
@@ -329,7 +329,7 @@ cluster settings API>> and retry the action.
----
PUT _cluster/settings
{
- "transient" : {
+ "persistent" : {
"cluster.max_shards_per_node": 1200
}
}
@@ -353,7 +353,7 @@ When a long-term solution is in place, we recommend you reset the
----
PUT _cluster/settings
{
- "transient" : {
+ "persistent" : {
"cluster.max_shards_per_node": null
}
}
diff --git a/docs/reference/ilm/actions/ilm-migrate.asciidoc b/docs/reference/ilm/actions/ilm-migrate.asciidoc
index f096fd98815c2..61ce2e1a4064d 100644
--- a/docs/reference/ilm/actions/ilm-migrate.asciidoc
+++ b/docs/reference/ilm/actions/ilm-migrate.asciidoc
@@ -8,11 +8,9 @@ Moves the index to the <> that corresponds
to the current phase by updating the <>
index setting.
{ilm-init} automatically injects the migrate action in the warm and cold
-phases if no allocation options are specified with the <> action.
-If you specify an allocate action that only modifies the number of index
-replicas, {ilm-init} reduces the number of replicas before migrating the index.
-To prevent automatic migration without specifying allocation options,
-you can explicitly include the migrate action and set the enabled option to `false`.
+phases. To prevent automatic migration, you
+can explicitly include the migrate action and set the enabled option to
+`false`.
If the `cold` phase defines a <> the `migrate`
action will not be injected automatically in the `cold` phase because the managed index will be
@@ -53,9 +51,9 @@ Defaults to `true`.
[[ilm-enabled-migrate-ex]]
==== Example
-In the following policy, the allocate action is specified to reduce the number of replicas before {ilm-init} migrates the index to warm nodes.
+In the following policy, the <> action is specified to reduce the number of replicas before {ilm-init} migrates the index to warm nodes.
-NOTE: Explicitly specifying the migrate action is not required--{ilm-init} automatically performs the migrate action unless you specify allocation options or disable migration.
+NOTE: Explicitly specifying the migrate action is not required--{ilm-init} automatically performs the migrate action unless you disable migration.
[source,console]
--------------------------------------------------
@@ -84,8 +82,6 @@ The migrate action in the following policy is disabled and
the allocate action assigns the index to nodes that have a
`rack_id` of _one_ or _two_.
-NOTE: Explicitly disabling the migrate action is not required--{ilm-init} does not inject the migrate action if you specify allocation options.
-
[source,console]
--------------------------------------------------
PUT _ilm/policy/my_policy
diff --git a/docs/reference/ilm/ilm-with-existing-indices.asciidoc b/docs/reference/ilm/ilm-with-existing-indices.asciidoc
index 28a9ffce71caf..4ea4e340fd748 100644
--- a/docs/reference/ilm/ilm-with-existing-indices.asciidoc
+++ b/docs/reference/ilm/ilm-with-existing-indices.asciidoc
@@ -6,51 +6,51 @@
If you've been using Curator or some other mechanism to manage periodic indices,
you have a couple options when migrating to {ilm-init}:
-* Set up your index templates to use an {ilm-init} policy to manage your new indices.
+* Set up your index templates to use an {ilm-init} policy to manage your new indices.
Once {ilm-init} is managing your current write index, you can apply an appropriate policy to your old indices.
-* Reindex into an {ilm-init}-managed index.
+* Reindex into an {ilm-init}-managed index.
NOTE: Starting in Curator version 5.7, Curator ignores {ilm-init} managed indices.
[discrete]
[[ilm-existing-indices-apply]]
-=== Apply policies to existing time series indices
+=== Apply policies to existing time series indices
The simplest way to transition to managing your periodic indices with {ilm-init} is
-to <> to apply a lifecycle policy to new indices.
-Once the index you are writing to is being managed by {ilm-init},
+to <> to apply a lifecycle policy to new indices.
+Once the index you are writing to is being managed by {ilm-init},
you can <> to your older indices.
-Define a separate policy for your older indices that omits the rollover action.
-Rollover is used to manage where new data goes, so isn't applicable.
+Define a separate policy for your older indices that omits the rollover action.
+Rollover is used to manage where new data goes, so isn't applicable.
-Keep in mind that policies applied to existing indices compare the `min_age` for each phase to
+Keep in mind that policies applied to existing indices compare the `min_age` for each phase to
the original creation date of the index, and might proceed through multiple phases immediately.
-If your policy performs resource-intensive operations like force merge,
+If your policy performs resource-intensive operations like force merge,
you don't want to have a lot of indices performing those operations all at once
-when you switch over to {ilm-init}.
+when you switch over to {ilm-init}.
-You can specify different `min_age` values in the policy you use for existing indices,
-or set <>
-to control how the index age is calculated.
+You can specify different `min_age` values in the policy you use for existing indices,
+or set <>
+to control how the index age is calculated.
-Once all pre-{ilm-init} indices have been aged out and removed,
+Once all pre-{ilm-init} indices have been aged out and removed,
you can delete the policy you used to manage them.
NOTE: If you are using {beats} or {ls}, enabling {ilm-init} in version 7.0 and onward
-sets up {ilm-init} to manage new indices automatically.
-If you are using {beats} through {ls},
-you might need to change your {ls} output configuration and invoke the {beats} setup
+sets up {ilm-init} to manage new indices automatically.
+If you are using {beats} through {ls},
+you might need to change your {ls} output configuration and invoke the {beats} setup
to use {ilm-init} for new data.
[discrete]
[[ilm-existing-indices-reindex]]
=== Reindex into a managed index
-An alternative to <> is to
+An alternative to <> is to
reindex your data into an {ilm-init}-managed index.
-You might want to do this if creating periodic indices with very small amounts of data
+You might want to do this if creating periodic indices with very small amounts of data
has led to excessive shard counts, or if continually indexing into the same index has led to large shards
and performance issues.
@@ -58,24 +58,24 @@ First, you need to set up the new {ilm-init}-managed index:
. Update your index template to include the necessary {ilm-init} settings.
. Bootstrap an initial index as the write index.
-. Stop writing to the old indices and index new documents using the alias that points to bootstrapped index.
+. Stop writing to the old indices and index new documents using the alias that points to bootstrapped index.
To reindex into the managed index:
. Pause indexing new documents if you do not want to mix new and old data in the {ilm-init}-managed index.
-Mixing old and new data in one index is safe,
+Mixing old and new data in one index is safe,
but a combined index needs to be retained until you are ready to delete the new data.
-. Reduce the {ilm-init} poll interval to ensure that the index doesn't
+. Reduce the {ilm-init} poll interval to ensure that the index doesn't
grow too large while waiting for the rollover check.
-By default, {ilm-init} checks to see what actions need to be taken every 10 minutes.
+By default, {ilm-init} checks to see what actions need to be taken every 10 minutes.
+
--
[source,console]
-----------------------
PUT _cluster/settings
{
- "transient": {
+ "persistent": {
"indices.lifecycle.poll_interval": "1m" <1>
}
}
@@ -84,13 +84,13 @@ PUT _cluster/settings
<1> Check once a minute to see if {ilm-init} actions such as rollover need to be performed.
--
-. Reindex your data using the <>.
-If you want to partition the data in the order in which it was originally indexed,
-you can run separate reindex requests.
+. Reindex your data using the <>.
+If you want to partition the data in the order in which it was originally indexed,
+you can run separate reindex requests.
+
--
-IMPORTANT: Documents retain their original IDs. If you don't use automatically generated document IDs,
-and are reindexing from multiple source indices, you might need to do additional processing to
+IMPORTANT: Documents retain their original IDs. If you don't use automatically generated document IDs,
+and are reindexing from multiple source indices, you might need to do additional processing to
ensure that document IDs don't conflict. One way to do this is to use a
<> in the reindex call to append the original index name
to the document ID.
@@ -174,19 +174,19 @@ POST _reindex
<1> Matches your existing indices. Using the prefix for
the new indices makes using this index pattern much easier.
<2> The alias that points to your bootstrapped index.
-<3> Halts reindexing if multiple documents have the same ID.
- This is recommended to prevent accidentally overwriting documents
+<3> Halts reindexing if multiple documents have the same ID.
+ This is recommended to prevent accidentally overwriting documents
if documents in different source indices have the same ID.
--
-. When reindexing is complete, set the {ilm-init} poll interval back to its default value to
+. When reindexing is complete, set the {ilm-init} poll interval back to its default value to
prevent unnecessary load on the master node:
+
[source,console]
-----------------------
PUT _cluster/settings
{
- "transient": {
+ "persistent": {
"indices.lifecycle.poll_interval": null
}
}
@@ -198,5 +198,5 @@ PUT _cluster/settings
+
Querying using this alias will now search your new data and all of the reindexed data.
-. Once you have verified that all of the reindexed data is available in the new managed indices,
+. Once you have verified that all of the reindexed data is available in the new managed indices,
you can safely remove the old indices.
diff --git a/docs/reference/indices/recovery.asciidoc b/docs/reference/indices/recovery.asciidoc
index da30b53f982c6..8564b5e2b46ed 100644
--- a/docs/reference/indices/recovery.asciidoc
+++ b/docs/reference/indices/recovery.asciidoc
@@ -107,25 +107,25 @@ to a different node in the same cluster.
--
(String)
Recovery stage.
-Returned values include:
+Returned values can include:
-`DONE`::
-Complete.
-
-`FINALIZE`::
-Cleanup.
+`INIT`::
+Recovery has not started.
`INDEX`::
Reading index metadata and copying bytes from source to destination.
-`INIT`::
-Recovery has not started.
-
-`START`::
-Starting the recovery process; opening the index for use.
+`VERIFY_INDEX`::
+Verifying the integrity of the index.
`TRANSLOG`::
-Replaying transaction log .
+Replaying transaction log.
+
+`FINALIZE`::
+Cleanup.
+
+`DONE`::
+Complete.
--
`primary`::
diff --git a/docs/reference/ingest/enrich.asciidoc b/docs/reference/ingest/enrich.asciidoc
index e7ba886261e9e..051d4bebcd6d1 100644
--- a/docs/reference/ingest/enrich.asciidoc
+++ b/docs/reference/ingest/enrich.asciidoc
@@ -220,3 +220,54 @@ Instead, you can:
include::geo-match-enrich-policy-type-ex.asciidoc[]
include::match-enrich-policy-type-ex.asciidoc[]
+
+[[ingest-enrich-components]]
+==== Enrich components
+
+The enrich coordinator is a component that manages and performs the searches
+required to enrich documents on each ingest node. It combines searches from all enrich
+processors in all pipelines into bulk <>.
+
+The enrich policy executor is a component that manages the executions of all
+enrich policies. When an enrich policy is executed, this component creates
+a new enrich index and removes the previous enrich index. The enrich policy
+executions are managed from the elected master node. The execution of these
+policies occurs on a different node.
+
+[[ingest-enrich-settings]]
+==== Node Settings
+
+The `enrich` processor has node settings for enrich coordinator and
+enrich policy executor.
+
+The enrich coordinator supports the following node settings:
+
+`enrich.cache_size`::
+Maximum number of searches to cache for enriching documents. Defaults to `1000`.
+There is a single cache for all enrich processors in the cluster. This setting
+determines the size of that cache.
+
+`enrich.coordinator_proxy.max_concurrent_requests`::
+Maximum number of concurrent <> to
+run when enriching documents. Defaults to `8`.
+
+`enrich.coordinator_proxy.max_lookups_per_request`::
+Maximum number of searches to include in a <> when enriching documents. Defaults to `128`.
+
+The enrich policy executor supports the following node settings:
+
+`enrich.fetch_size`::
+Maximum batch size when reindexing a source index into an enrich index. Defaults
+to `10000`.
+
+`enrich.max_force_merge_attempts`::
+Maximum number of <> attempts allowed on an
+enrich index. Defaults to `3`.
+
+`enrich.cleanup_period`::
+How often {es} checks whether unused enrich indices can be deleted. Defaults to
+`15m`.
+
+`enrich.max_concurrent_policy_executions`::
+Maximum number of enrich policies to execute concurrently. Defaults to `50`.
diff --git a/docs/reference/ingest/processors/grok.asciidoc b/docs/reference/ingest/processors/grok.asciidoc
index 394b8d1f8f7ff..3188e0b2bbfbb 100644
--- a/docs/reference/ingest/processors/grok.asciidoc
+++ b/docs/reference/ingest/processors/grok.asciidoc
@@ -338,7 +338,7 @@ server log.
--------------------------------------------------
PUT _cluster/settings
{
- "transient": {
+ "persistent": {
"logger.org.elasticsearch.ingest.common.GrokProcessor": "debug"
}
}
diff --git a/docs/reference/licensing/get-license.asciidoc b/docs/reference/licensing/get-license.asciidoc
index 5f24e623ec2e7..b4897f5727cd9 100644
--- a/docs/reference/licensing/get-license.asciidoc
+++ b/docs/reference/licensing/get-license.asciidoc
@@ -32,7 +32,14 @@ response after cluster startup, wait a short period and retry the request.
`local`::
(Boolean) Specifies whether to retrieve local information. The default value
is `false`, which means the information is retrieved from the master node.
-
+
+ `accept_enterprise`::
+(Boolean) If `true`, this parameter returns `enterprise` for Enterprise
+license types. If `false`, this parameter returns `platinum` for both
+`platinum` and `enterprise` license types. This behavior is maintained for
+backwards compatibility.
+
+deprecated::[7.6.0,"This parameter is deprecated and will always be set to `true` in 8.x."]
[discrete]
==== Authorization
diff --git a/docs/reference/migration/apis/deprecation.asciidoc b/docs/reference/migration/apis/deprecation.asciidoc
index ae7e7e9241fb4..613b66a0593d4 100644
--- a/docs/reference/migration/apis/deprecation.asciidoc
+++ b/docs/reference/migration/apis/deprecation.asciidoc
@@ -39,6 +39,19 @@ expressions are supported.
When you specify this parameter, only deprecations for the specified
data streams or indices are returned.
+[[migration-api-settings]]
+==== Settings
+
+You can use the following settings to control the behavior of the deprecation info API:
+
+[[skip_deprecated_settings]]
+// tag::skip_deprecated_settings-tag[]
+`deprecation.skip_deprecated_settings`
+(<>)
+Defaults to an empty list. Set to a list of setting names to be ignored by the deprecation info API. Any
+deprecations related to settings in this list will not be returned by the API. Simple wildcard matching is supported.
+// end::skip_deprecated_settings-tag[]
+
[[migration-api-example]]
==== {api-examples-title}
diff --git a/docs/reference/modules/cluster/allocation_filtering.asciidoc b/docs/reference/modules/cluster/allocation_filtering.asciidoc
index 8aad7a97855f8..e70e43682973b 100644
--- a/docs/reference/modules/cluster/allocation_filtering.asciidoc
+++ b/docs/reference/modules/cluster/allocation_filtering.asciidoc
@@ -22,7 +22,7 @@ it down, you could create a filter that excludes the node by its IP address:
--------------------------------------------------
PUT _cluster/settings
{
- "transient" : {
+ "persistent" : {
"cluster.routing.allocation.exclude._ip" : "10.0.0.1"
}
}
@@ -70,7 +70,7 @@ You can use wildcards when specifying attribute values, for example:
------------------------
PUT _cluster/settings
{
- "transient": {
+ "persistent": {
"cluster.routing.allocation.exclude._ip": "192.168.2.*"
}
}
diff --git a/docs/reference/modules/cluster/disk_allocator.asciidoc b/docs/reference/modules/cluster/disk_allocator.asciidoc
index 7410c3052f050..11d94b5c59c43 100644
--- a/docs/reference/modules/cluster/disk_allocator.asciidoc
+++ b/docs/reference/modules/cluster/disk_allocator.asciidoc
@@ -63,7 +63,7 @@ You can use the following settings to control disk-based allocation:
[[cluster-routing-disk-threshold]]
// tag::cluster-routing-disk-threshold-tag[]
-`cluster.routing.allocation.disk.threshold_enabled` {ess-icon}::
+`cluster.routing.allocation.disk.threshold_enabled`::
(<>)
Defaults to `true`. Set to `false` to disable the disk allocation decider.
// end::cluster-routing-disk-threshold-tag[]
@@ -145,7 +145,7 @@ gigabytes free, and updating the information about the cluster every minute:
--------------------------------------------------
PUT _cluster/settings
{
- "transient": {
+ "persistent": {
"cluster.routing.allocation.disk.watermark.low": "100gb",
"cluster.routing.allocation.disk.watermark.high": "50gb",
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
diff --git a/docs/reference/modules/cluster/misc.asciidoc b/docs/reference/modules/cluster/misc.asciidoc
index a447b49c6058e..83adaef9ec1a0 100644
--- a/docs/reference/modules/cluster/misc.asciidoc
+++ b/docs/reference/modules/cluster/misc.asciidoc
@@ -159,7 +159,7 @@ The settings which control logging can be updated <> setting enables you to create multiple
@@ -105,7 +105,7 @@ will only form a cluster from nodes that all have the same cluster name. The
default value for the cluster name is `elasticsearch`, but it is recommended to
change this to reflect the logical name of the cluster.
-[discrete]
+[[bootstrap-auto-bootstrap]]
==== Auto-bootstrapping in development mode
If the cluster is running with a completely default configuration then it will
@@ -125,10 +125,10 @@ in the <>:
* `discovery.seed_hosts`
* `cluster.initial_master_nodes`
-[NOTE]
-==================================================
-
-[[modules-discovery-bootstrap-cluster-joining]] If you start an {es} node
+[[modules-discovery-bootstrap-cluster-joining]]
+.Forming a single cluster
+****
+If you start an {es} node
without configuring these settings then it will start up in development mode and
auto-bootstrap itself into a new cluster. If you start some {es} nodes on
different hosts then by default they will not discover each other and will form
@@ -145,4 +145,4 @@ If you intended to form a single cluster then you should start again:
* Configure `cluster.initial_master_nodes` as described above.
* Restart all the nodes and verify that they have formed a single cluster.
-==================================================
+****
diff --git a/docs/reference/modules/network/tracers.asciidoc b/docs/reference/modules/network/tracers.asciidoc
index 605db04f2e44a..45f98001f8a7b 100644
--- a/docs/reference/modules/network/tracers.asciidoc
+++ b/docs/reference/modules/network/tracers.asciidoc
@@ -16,7 +16,7 @@ the `org.elasticsearch.http.HttpTracer` logger to `TRACE`:
--------------------------------------------------
PUT _cluster/settings
{
- "transient" : {
+ "persistent" : {
"logger.org.elasticsearch.http.HttpTracer" : "TRACE"
}
}
@@ -29,7 +29,7 @@ exclude wildcard patterns. By default every request will be traced.
--------------------------------------------------
PUT _cluster/settings
{
- "transient" : {
+ "persistent" : {
"http.tracer.include" : "*",
"http.tracer.exclude" : ""
}
@@ -47,7 +47,7 @@ requests and responses. Activate the tracer by setting the level of the
--------------------------------------------------
PUT _cluster/settings
{
- "transient" : {
+ "persistent" : {
"logger.org.elasticsearch.transport.TransportService.tracer" : "TRACE"
}
}
@@ -61,7 +61,7 @@ fault detection pings:
--------------------------------------------------
PUT _cluster/settings
{
- "transient" : {
+ "persistent" : {
"transport.tracer.include" : "*",
"transport.tracer.exclude" : "internal:coordination/fault_detection/*"
}
diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc
index 25215759204e3..f886bd2fd27fa 100644
--- a/docs/reference/rest-api/common-parms.asciidoc
+++ b/docs/reference/rest-api/common-parms.asciidoc
@@ -970,6 +970,10 @@ The `latest` method transforms the data by finding the latest document for each
unique key.
end::transform-latest[]
+tag::transform-metadata[]
+Defines optional {transform} metadata.
+end::transform-metadata[]
+
tag::transform-retention[]
Defines a retention policy for the {transform}. Data that meets the defined
criteria is deleted from the destination index.
diff --git a/docs/reference/search/search-your-data/retrieve-selected-fields.asciidoc b/docs/reference/search/search-your-data/retrieve-selected-fields.asciidoc
index 1cf05d264030d..8a916d562973d 100644
--- a/docs/reference/search/search-your-data/retrieve-selected-fields.asciidoc
+++ b/docs/reference/search/search-your-data/retrieve-selected-fields.asciidoc
@@ -376,6 +376,17 @@ won't be included in the response because `include_unmapped` isn't set to
// TESTRESPONSE[s/"max_score" : 1.0/"max_score" : $body.hits.max_score/]
// TESTRESPONSE[s/"_score" : 1.0/"_score" : $body.hits.hits.0._score/]
+[discrete]
+[[retrieve-metadata-fields]]
+==== Retrieving metadata fields
+By default, document metadata fields like `_id` or `_index` are not returned
+when the requested `fields` option uses wildcard patterns like `*`. However,
+when explicitly requested using the fields name, the `_id`, `_routing`,
+`_ignored`, `_index` and `_version` metadata fields can be retrieved.
+In addition, when you define an <> with a path to one
+of the above metadata fields, this field will also be retrievable using either
+the alias name or via wildcard patterns that match the alias.
+
[discrete]
[[Ignored-field values]]
==== Ignored field values
diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc
index 832e574a10cce..21bef5abf745e 100644
--- a/docs/reference/settings/monitoring-settings.asciidoc
+++ b/docs/reference/settings/monitoring-settings.asciidoc
@@ -239,9 +239,8 @@ Array-based headers are sent `n` times where `n` is the size of the array.
monitoring agent will override anything defined here.
`index.name.time_format`::
-A mechanism for changing the default date suffix for the, by default, daily
-monitoring indices. The default value is `yyyy.MM.dd`, which is why the indices
-are created daily.
+A mechanism for changing the default date suffix for daily monitoring indices.
+The default format is `yyyy.MM.dd`. For example, `.monitoring-es-7-2021.08.26`.
`use_ingest`::
Whether to supply a placeholder pipeline to the monitoring cluster and a
diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc
index 71c2199d75c77..e117b61682dfe 100644
--- a/docs/reference/setup/configuration.asciidoc
+++ b/docs/reference/setup/configuration.asciidoc
@@ -140,6 +140,9 @@ settings API and use `elasticsearch.yml` only for local configurations. Using
the cluster update settings API ensures the setting is the same on all nodes. If
you accidentally configure different settings in `elasticsearch.yml` on
different nodes, it can be difficult to notice discrepancies.
+
+NOTE: Transient settings are deprecated and will be removed in a future release.
+Prefer using persistent cluster settings instead.
--
[[static-cluster-setting]]
diff --git a/docs/reference/setup/important-settings/path-settings.asciidoc b/docs/reference/setup/important-settings/path-settings.asciidoc
index c3a4558335917..f66477c25146b 100644
--- a/docs/reference/setup/important-settings/path-settings.asciidoc
+++ b/docs/reference/setup/important-settings/path-settings.asciidoc
@@ -57,9 +57,9 @@ Storage Spaces on Windows. If you wish to use multiple data paths on a single
machine then you must run one node for each data path.
If you currently use multiple data paths in a
-{ref}/high-availability-cluster-design.html[highly available cluster] then you
-can migrate to a setup that uses a single path for each node without downtime
-using a process similar to a
+{ref}/high-availability-cluster-design.html[highly available cluster] then you
+can migrate to a setup that uses a single path for each node without downtime
+using a process similar to a
{ref}/restart-cluster.html#restart-cluster-rolling[rolling restart]: shut each
node down in turn and replace it with one or more nodes each configured to use
a single data path. In more detail, for each node that currently has multiple
@@ -76,18 +76,18 @@ migrating to a single-data-path setup before starting to upgrade.
--------------------------------------------------
PUT _cluster/settings
{
- "transient": {
+ "persistent": {
"cluster.routing.allocation.exclude._name": "target-node-name"
}
}
--------------------------------------------------
+
-You can use the {ref}/cat-allocation.html[cat allocation API] to track progress
+You can use the {ref}/cat-allocation.html[cat allocation API] to track progress
of this data migration. If some shards do not migrate then the
-{ref}/cluster-allocation-explain.html[cluster allocation explain API] will help
+{ref}/cluster-allocation-explain.html[cluster allocation explain API] will help
you to determine why.
-3. Follow the steps in the
+3. Follow the steps in the
{ref}/restart-cluster.html#restart-cluster-rolling[rolling restart process]
up to and including shutting the target node down.
@@ -100,7 +100,7 @@ of every shard assigned to at least one of the other nodes in your cluster.
--------------------------------------------------
PUT _cluster/settings
{
- "transient": {
+ "persistent": {
"cluster.routing.allocation.exclude._name": null
}
}
@@ -118,7 +118,7 @@ has sufficient space for the data that it will hold.
`path.data` setting pointing at a separate data path.
9. Start the new nodes and follow the rest of the
-{ref}/restart-cluster.html#restart-cluster-rolling[rolling restart process] for
+{ref}/restart-cluster.html#restart-cluster-rolling[rolling restart process] for
them.
10. Ensure your cluster health is `green`, so that every shard has been
@@ -126,9 +126,9 @@ assigned.
You can alternatively add some number of single-data-path nodes to your
cluster, migrate all your data over to these new nodes using
-{ref}/modules-cluster.html#cluster-shard-allocation-filtering[allocation filters],
-and then remove the old nodes from the cluster. This approach will temporarily
-double the size of your cluster so it will only work if you have the capacity to
+{ref}/modules-cluster.html#cluster-shard-allocation-filtering[allocation filters],
+and then remove the old nodes from the cluster. This approach will temporarily
+double the size of your cluster so it will only work if you have the capacity to
expand your cluster like this.
If you currently use multiple data paths but your cluster is not highly
diff --git a/docs/reference/setup/logging-config.asciidoc b/docs/reference/setup/logging-config.asciidoc
index 50b32bc4f9b81..f41658f637602 100644
--- a/docs/reference/setup/logging-config.asciidoc
+++ b/docs/reference/setup/logging-config.asciidoc
@@ -155,7 +155,7 @@ only intended for expert use.
----
PUT /_cluster/settings
{
- "transient": {
+ "persistent": {
"logger.org.elasticsearch.discovery": "DEBUG"
}
}
diff --git a/docs/reference/transform/apis/put-transform.asciidoc b/docs/reference/transform/apis/put-transform.asciidoc
index d60fdff28d07e..3b69cc1996d5d 100644
--- a/docs/reference/transform/apis/put-transform.asciidoc
+++ b/docs/reference/transform/apis/put-transform.asciidoc
@@ -126,6 +126,12 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-unique-key]
====
//End latest
+//Begin _meta
+`_meta`::
+(Optional, object)
+include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-metadata]
+//End _meta
+
//Begin pivot
`pivot`::
(Required^*^, object)
diff --git a/docs/reference/transform/apis/update-transform.asciidoc b/docs/reference/transform/apis/update-transform.asciidoc
index c601bd31f47fb..bfc514fdb10ec 100644
--- a/docs/reference/transform/apis/update-transform.asciidoc
+++ b/docs/reference/transform/apis/update-transform.asciidoc
@@ -98,6 +98,12 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=dest-pipeline]
(Optional, <>)
include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=frequency]
+//Begin _meta
+`_meta`::
+(Optional, object)
+include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=transform-metadata]
+//End _meta
+
//Begin retention policy
`retention_policy`::
(Optional, object)
diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/ExitCodes.java b/libs/cli/src/main/java/org/elasticsearch/cli/ExitCodes.java
index bebf23a5f798f..f4d712f70f5d4 100644
--- a/libs/cli/src/main/java/org/elasticsearch/cli/ExitCodes.java
+++ b/libs/cli/src/main/java/org/elasticsearch/cli/ExitCodes.java
@@ -12,8 +12,9 @@
* POSIX exit codes.
*/
public class ExitCodes {
+ // please be extra careful when changing these as the values might be used in scripts,
+ // usages of which are not tracked by the IDE
public static final int OK = 0;
- public static final int NOOP = 63; // nothing to do
public static final int USAGE = 64; // command line usage error
public static final int DATA_ERROR = 65; // data format error
public static final int NO_INPUT = 66; // cannot open input
@@ -27,6 +28,7 @@ public class ExitCodes {
public static final int PROTOCOL = 76; // remote error in protocol
public static final int NOPERM = 77; // permission denied
public static final int CONFIG = 78; // configuration error
+ public static final int NOOP = 80; // nothing to do
private ExitCodes() { /* no instance, just constants */ }
}
diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java
index bb3771bfd5762..55d930128d0f6 100644
--- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java
+++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorFactory.java
@@ -55,7 +55,7 @@ protected Aggregator doCreateInternal(
for (Map.Entry entry : valuesSources.entrySet()) {
if (entry.getValue() instanceof ValuesSource.Numeric == false) {
throw new AggregationExecutionException(
- "ValuesSource type " + entry.getValue().toString() + "is not supported for aggregation " + this.name()
+ "ValuesSource type [" + entry.getValue().toString() + "] is not supported for aggregation [" + this.name() + "]"
);
}
// TODO: There must be a better option than this.
diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_versioned_update.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_versioned_update.yml
new file mode 100644
index 0000000000000..780f33be52dc0
--- /dev/null
+++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_versioned_update.yml
@@ -0,0 +1,177 @@
+---
+"Test pipeline versioned updates":
+ - skip:
+ version: " - 7.99.99"
+ reason: "re-enable in 7.16+ when backported"
+
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ # conditional update fails because of missing version
+ - do:
+ catch: bad_request
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ if_version: 1
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ body: >
+ {
+ "description": "_description",
+ "version": 1,
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ ingest.get_pipeline:
+ id: "my_pipeline"
+ - match: { my_pipeline.version: 1 }
+
+ # required version does not match specified version
+ - do:
+ catch: bad_request
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ if_version: 99
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+
+ # may not update to same version
+ - do:
+ catch: bad_request
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ if_version: 1
+ body: >
+ {
+ "version": 1,
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+
+ # cannot conditionally update non-existent pipeline
+ - do:
+ catch: bad_request
+ ingest.put_pipeline:
+ id: "my_pipeline2"
+ if_version: 1
+ body: >
+ {
+ "version": 1,
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+
+ # conditionally update to specified version
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ if_version: 1
+ body: >
+ {
+ "version": 99,
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ ingest.get_pipeline:
+ id: "my_pipeline"
+ - match: { my_pipeline.version: 99 }
+
+ # conditionally update without specified version
+ - do:
+ ingest.put_pipeline:
+ id: "my_pipeline"
+ if_version: 99
+ body: >
+ {
+ "description": "_description",
+ "processors": [
+ {
+ "set" : {
+ "field" : "field2",
+ "value": "_value"
+ }
+ }
+ ]
+ }
+ - match: { acknowledged: true }
+
+ - do:
+ ingest.get_pipeline:
+ id: "my_pipeline"
+ - match: { my_pipeline.version: 100 }
+
+ - do:
+ ingest.delete_pipeline:
+ id: "my_pipeline"
+ - match: { acknowledged: true }
diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java
index 1fa72df222f6b..4d12b66290f25 100644
--- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java
+++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java
@@ -19,6 +19,8 @@
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.core.internal.io.IOUtils;
+import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xcontent.json.JsonXContent;
@@ -93,6 +95,8 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
@After
public void cleanUp() throws Exception {
+ deleteDatabasesInConfigDirectory();
+
ClusterUpdateSettingsResponse settingsResponse = client().admin().cluster()
.prepareUpdateSettings()
.setPersistentSettings(Settings.builder()
@@ -109,6 +113,18 @@ public void cleanUp() throws Exception {
assertThat(state.getDatabases(), anEmptyMap());
}
});
+ assertBusy(() -> {
+ GeoIpDownloaderStatsAction.Response response =
+ client().execute(GeoIpDownloaderStatsAction.INSTANCE, new GeoIpDownloaderStatsAction.Request()).actionGet();
+ assertThat(response.getStats().getDatabasesCount(), equalTo(0));
+ assertThat(response.getNodes(), not(empty()));
+ for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) {
+ assertThat(nodeResponse.getConfigDatabases(), empty());
+ assertThat(nodeResponse.getDatabases(), empty());
+ assertThat(nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).collect(Collectors.toList()),
+ empty());
+ }
+ });
assertBusy(() -> {
List geoIpTmpDirs = getGeoIpTmpDirs();
for (Path geoIpTmpDir : geoIpTmpDirs) {
@@ -263,7 +279,6 @@ public void testGeoIpDatabasesDownload() throws Exception {
}
}
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/79074")
@TestLogging(value = "org.elasticsearch.ingest.geoip:TRACE", reason = "https://github.com/elastic/elasticsearch/issues/69972")
public void testUseGeoIpProcessorWithDownloadedDBs() throws Exception {
assumeTrue("only test with fixture to have stable results", ENDPOINT != null);
@@ -273,10 +288,20 @@ public void testUseGeoIpProcessorWithDownloadedDBs() throws Exception {
// verify before updating dbs
{
- SimulateDocumentBaseResult result = simulatePipeline();
- assertThat(result.getIngestDocument().getFieldValue("ip-city.city_name", String.class), equalTo("Tumba"));
- assertThat(result.getIngestDocument().getFieldValue("ip-asn.organization_name", String.class), equalTo("Bredband2 AB"));
- assertThat(result.getIngestDocument().getFieldValue("ip-country.country_name", String.class), equalTo("Sweden"));
+ assertBusy(() -> {
+ SimulateDocumentBaseResult result = simulatePipeline();
+ assertThat(result.getFailure(), nullValue());
+ assertThat(result.getIngestDocument(), notNullValue());
+
+ IngestDocument doc = result.getIngestDocument();
+ assertThat(doc.getSourceAndMetadata(), hasKey("ip-city"));
+ assertThat(doc.getSourceAndMetadata(), hasKey("ip-asn"));
+ assertThat(doc.getSourceAndMetadata(), hasKey("ip-country"));
+
+ assertThat(doc.getFieldValue("ip-city.city_name", String.class), equalTo("Tumba"));
+ assertThat(doc.getFieldValue("ip-asn.organization_name", String.class), equalTo("Bredband2 AB"));
+ assertThat(doc.getFieldValue("ip-country.country_name", String.class), equalTo("Sweden"));
+ });
}
// Enable downloader:
@@ -330,31 +355,24 @@ public void testStartWithNoDatabases() throws Exception {
// Enable downloader:
Settings.Builder settings = Settings.builder().put(GeoIpDownloaderTaskExecutor.ENABLED_SETTING.getKey(), true);
assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settings));
- {
- assertBusy(() -> {
- SimulateDocumentBaseResult result = simulatePipeline();
- assertThat(result.getFailure(), nullValue());
- assertThat(result.getIngestDocument(), notNullValue());
- Map, ?> source = result.getIngestDocument().getSourceAndMetadata();
- assertThat(source, not(hasKey("tags")));
- assertThat(source, hasKey("ip-city"));
- assertThat(source, hasKey("ip-asn"));
- assertThat(source, hasKey("ip-country"));
-
- assertThat(((Map, ?>) source.get("ip-city")).get("city_name"), equalTo("Linköping"));
- assertThat(((Map, ?>) source.get("ip-asn")).get("organization_name"), equalTo("Bredband2 AB"));
- assertThat(((Map, ?>) source.get("ip-country")).get("country_name"), equalTo("Sweden"));
- });
- }
+ verifyUpdatedDatabase();
}
private void verifyUpdatedDatabase() throws Exception {
assertBusy(() -> {
SimulateDocumentBaseResult result = simulatePipeline();
assertThat(result.getFailure(), nullValue());
- assertThat(result.getIngestDocument().getFieldValue("ip-city.city_name", String.class), equalTo("Linköping"));
- assertThat(result.getIngestDocument().getFieldValue("ip-asn.organization_name", String.class), equalTo("Bredband2 AB"));
- assertThat(result.getIngestDocument().getFieldValue("ip-country.country_name", String.class), equalTo("Sweden"));
+ assertThat(result.getIngestDocument(), notNullValue());
+
+ Map, ?> source = result.getIngestDocument().getSourceAndMetadata();
+ assertThat(source, not(hasKey("tags")));
+ assertThat(source, hasKey("ip-city"));
+ assertThat(source, hasKey("ip-asn"));
+ assertThat(source, hasKey("ip-country"));
+
+ assertThat(((Map, ?>) source.get("ip-city")).get("city_name"), equalTo("Linköping"));
+ assertThat(((Map, ?>) source.get("ip-asn")).get("organization_name"), equalTo("Bredband2 AB"));
+ assertThat(((Map, ?>) source.get("ip-country")).get("country_name"), equalTo("Sweden"));
});
}
@@ -483,6 +501,32 @@ private void setupDatabasesInConfigDirectory() throws Exception {
for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) {
assertThat(nodeResponse.getConfigDatabases(),
containsInAnyOrder("GeoLite2-Country.mmdb", "GeoLite2-City.mmdb", "GeoLite2-ASN.mmdb"));
+ assertThat(nodeResponse.getDatabases(), empty());
+ assertThat(nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).collect(Collectors.toList()),
+ empty());
+ }
+ });
+ }
+
+ private void deleteDatabasesInConfigDirectory() throws Exception {
+ StreamSupport.stream(internalCluster().getInstances(Environment.class).spliterator(), false)
+ .map(Environment::configFile)
+ .map(path -> path.resolve("ingest-geoip"))
+ .distinct()
+ .forEach(path -> {
+ try {
+ IOUtils.rm(path);
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ });
+
+ assertBusy(() -> {
+ GeoIpDownloaderStatsAction.Response response =
+ client().execute(GeoIpDownloaderStatsAction.INSTANCE, new GeoIpDownloaderStatsAction.Request()).actionGet();
+ assertThat(response.getNodes(), not(empty()));
+ for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) {
+ assertThat(nodeResponse.getConfigDatabases(), empty());
}
});
}
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java
index d1d9cd788d2da..b875c88a28a48 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java
@@ -70,11 +70,11 @@ public final class GeoIpProcessor extends AbstractProcessor {
private final Set properties;
private final boolean ignoreMissing;
private final boolean firstOnly;
+ private final String databaseFile;
/**
* Construct a geo-IP processor.
- *
- * @param tag the processor tag
+ * @param tag the processor tag
* @param description the processor description
* @param field the source field to geo-IP map
* @param supplier a supplier of a geo-IP database reader; ideally this is lazily-loaded once on first use
@@ -83,6 +83,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
* @param properties the properties; ideally this is lazily-loaded once on first use
* @param ignoreMissing true if documents with a missing value for the field should be ignored
* @param firstOnly true if only first result should be returned in case of array
+ * @param databaseFile
*/
GeoIpProcessor(
final String tag,
@@ -93,7 +94,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
final String targetField,
final Set properties,
final boolean ignoreMissing,
- final boolean firstOnly) {
+ final boolean firstOnly,
+ final String databaseFile) {
super(tag, description);
this.field = field;
this.isValid = isValid;
@@ -102,6 +104,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
this.properties = properties;
this.ignoreMissing = ignoreMissing;
this.firstOnly = firstOnly;
+ this.databaseFile = databaseFile;
}
boolean isIgnoreMissing() {
@@ -121,8 +124,14 @@ public IngestDocument execute(IngestDocument ingestDocument) throws IOException
throw new IllegalArgumentException("field [" + field + "] is null, cannot extract geoip information.");
}
+ DatabaseReaderLazyLoader lazyLoader = this.supplier.get();
+ if (lazyLoader == null) {
+ tag(ingestDocument, databaseFile);
+ return ingestDocument;
+ }
+
if (ip instanceof String) {
- Map geoData = getGeoData((String) ip);
+ Map geoData = getGeoData(lazyLoader, (String) ip);
if (geoData.isEmpty() == false) {
ingestDocument.setFieldValue(targetField, geoData);
}
@@ -133,7 +142,7 @@ public IngestDocument execute(IngestDocument ingestDocument) throws IOException
if (ipAddr instanceof String == false) {
throw new IllegalArgumentException("array in field [" + field + "] should only contain strings");
}
- Map geoData = getGeoData((String) ipAddr);
+ Map geoData = getGeoData(lazyLoader, (String) ipAddr);
if (geoData.isEmpty()) {
geoDataList.add(null);
continue;
@@ -154,8 +163,7 @@ public IngestDocument execute(IngestDocument ingestDocument) throws IOException
return ingestDocument;
}
- private Map getGeoData(String ip) throws IOException {
- DatabaseReaderLazyLoader lazyLoader = this.supplier.get();
+ private Map getGeoData(DatabaseReaderLazyLoader lazyLoader, String ip) throws IOException {
try {
final String databaseType = lazyLoader.getDatabaseType();
final InetAddress ipAddress = InetAddresses.forString(ip);
@@ -393,11 +401,11 @@ public Processor create(
}
DatabaseReaderLazyLoader lazyLoader = databaseRegistry.getDatabase(databaseFile);
- if (lazyLoader == null && databaseRegistry.getAvailableDatabases().isEmpty() == false) {
+ if (useDatabaseUnavailableProcessor(lazyLoader, databaseRegistry.getAvailableDatabases())) {
+ return new DatabaseUnavailableProcessor(processorTag, description, databaseFile);
+ } else if (lazyLoader == null) {
throw newConfigurationException(TYPE, processorTag,
"database_file", "database file [" + databaseFile + "] doesn't exist");
- } else if (lazyLoader == null && databaseRegistry.getAvailableDatabases().isEmpty()) {
- return new DatabaseUnavailableProcessor(processorTag, description, databaseFile);
}
final String databaseType;
try {
@@ -431,7 +439,9 @@ public Processor create(
}
CheckedSupplier supplier = () -> {
DatabaseReaderLazyLoader loader = databaseRegistry.getDatabase(databaseFile);
- if (loader == null) {
+ if (useDatabaseUnavailableProcessor(loader, databaseRegistry.getAvailableDatabases())) {
+ return null;
+ } else if (loader == null) {
throw new ResourceNotFoundException("database file [" + databaseFile + "] doesn't exist");
}
// Only check whether the suffix has changed and not the entire database type.
@@ -467,8 +477,13 @@ public Processor create(
return valid;
};
return new GeoIpProcessor(processorTag, description, ipField, supplier, isValid, targetField, properties, ignoreMissing,
- firstOnly);
+ firstOnly, databaseFile);
+ }
+
+ private static boolean useDatabaseUnavailableProcessor(DatabaseReaderLazyLoader loader, Set availableDatabases) {
+ return loader == null && availableDatabases.isEmpty();
}
+
}
// Geoip2's AddressNotFoundException is checked and due to the fact that we need run their code
@@ -543,7 +558,7 @@ static class DatabaseUnavailableProcessor extends AbstractProcessor {
@Override
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
- ingestDocument.appendFieldValue("tags", "_geoip_database_unavailable_" + databaseName, true);
+ tag(ingestDocument, databaseName);
return ingestDocument;
}
@@ -556,4 +571,8 @@ public String getDatabaseName() {
return databaseName;
}
}
+
+ private static void tag(IngestDocument ingestDocument, String databaseName) {
+ ingestDocument.appendFieldValue("tags", "_geoip_database_unavailable_" + databaseName, true);
+ }
}
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java
index f065cc0473864..940940d9d2f15 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java
@@ -417,9 +417,18 @@ public void testUpdateDatabaseWhileIngesting() throws Exception {
assertThat(geoData.get("city_name"), equalTo("Linköping"));
}
{
+ // No databases are available, so assume that databases still need to be downloaded and therefor not fail:
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
databaseRegistry.removeStaleEntries(List.of("GeoLite2-City.mmdb"));
localDatabases.updateDatabase(geoIpConfigDir.resolve("GeoLite2-City.mmdb"), false);
+ processor.execute(ingestDocument);
+ Map, ?> geoData = (Map, ?>) ingestDocument.getSourceAndMetadata().get("geoip");
+ assertThat(geoData, nullValue());
+ }
+ {
+ // There are database available, but not the right one, so fail:
+ databaseRegistry.updateDatabase("GeoLite2-City-Test.mmdb", "md5", geoipTmpDir.resolve("GeoLite2-City-Test.mmdb"));
+ IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
Exception e = expectThrows(ResourceNotFoundException.class, () -> processor.execute(ingestDocument));
assertThat(e.getMessage(), equalTo("database file [GeoLite2-City.mmdb] doesn't exist"));
}
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java
index c99672aa30eb9..258fcb0a53bf4 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java
@@ -36,8 +36,8 @@
public class GeoIpProcessorTests extends ESTestCase {
public void testCity() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", "8.8.8.8");
@@ -60,8 +60,8 @@ public void testCity() throws Exception {
}
public void testNullValueWithIgnoreMissing() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, false, "filename");
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
Collections.singletonMap("source_field", null));
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
@@ -70,8 +70,8 @@ public void testNullValueWithIgnoreMissing() throws Exception {
}
public void testNonExistentWithIgnoreMissing() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, false, "filename");
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
processor.execute(ingestDocument);
@@ -79,8 +79,8 @@ public void testNonExistentWithIgnoreMissing() throws Exception {
}
public void testNullWithoutIgnoreMissing() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(),
Collections.singletonMap("source_field", null));
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
@@ -89,8 +89,8 @@ public void testNullWithoutIgnoreMissing() throws Exception {
}
public void testNonExistentWithoutIgnoreMissing() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap());
IngestDocument ingestDocument = new IngestDocument(originalIngestDocument);
Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument));
@@ -98,8 +98,8 @@ public void testNonExistentWithoutIgnoreMissing() throws Exception {
}
public void testCity_withIpV6() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
String address = "2602:306:33d3:8000::3257:9652";
Map document = new HashMap<>();
@@ -126,8 +126,8 @@ public void testCity_withIpV6() throws Exception {
}
public void testCityWithMissingLocation() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", "80.231.5.0");
@@ -142,8 +142,8 @@ public void testCityWithMissingLocation() throws Exception {
}
public void testCountry() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-Country.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-Country.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", "82.170.213.79");
@@ -161,8 +161,8 @@ public void testCountry() throws Exception {
}
public void testCountryWithMissingLocation() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-Country.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-Country.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", "80.231.5.0");
@@ -178,8 +178,8 @@ public void testCountryWithMissingLocation() throws Exception {
public void testAsn() throws Exception {
String ip = "82.171.64.0";
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-ASN.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-ASN.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", ip);
@@ -197,8 +197,8 @@ public void testAsn() throws Exception {
}
public void testAddressIsNotInTheDatabase() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", "127.0.0.1");
@@ -211,8 +211,8 @@ public void testAddressIsNotInTheDatabase() throws Exception {
* Don't silently do DNS lookups or anything trappy on bogus data
*/
public void testInvalid() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", "www.google.com");
@@ -222,8 +222,8 @@ public void testInvalid() throws Exception {
}
public void testListAllValid() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", Arrays.asList("8.8.8.8", "82.171.64.0"));
@@ -242,8 +242,8 @@ public void testListAllValid() throws Exception {
}
public void testListPartiallyValid() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", Arrays.asList("8.8.8.8", "127.0.0.1"));
@@ -262,8 +262,8 @@ public void testListPartiallyValid() throws Exception {
}
public void testListNoMatches() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename");
Map document = new HashMap<>();
document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.1"));
@@ -274,8 +274,8 @@ public void testListNoMatches() throws Exception {
}
public void testListFirstOnly() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, true);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, true, "filename");
Map document = new HashMap<>();
document.put("source_field", Arrays.asList("8.8.8.8", "127.0.0.1"));
@@ -292,8 +292,8 @@ public void testListFirstOnly() throws Exception {
}
public void testListFirstOnlyNoMatches() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, true);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, true, "filename");
Map document = new HashMap<>();
document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.2"));
@@ -304,8 +304,8 @@ public void testListFirstOnlyNoMatches() throws Exception {
}
public void testInvalidDatabase() throws Exception {
- GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field",
- loader("/GeoLite2-City.mmdb"), () -> false, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, true);
+ GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), null, "source_field", loader("/GeoLite2-City.mmdb"),
+ () -> false, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, true, "filename");
Map document = new HashMap<>();
document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.2"));
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index 2618c65c16a8a..0000000000000
--- a/modules/lang-expression/licenses/lucene-expressions-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-bcba5c75d867341e8de13ed960abe2fe425c28b6
\ No newline at end of file
diff --git a/modules/lang-expression/licenses/lucene-expressions-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..fe9352ca233c8
--- /dev/null
+++ b/modules/lang-expression/licenses/lucene-expressions-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+3bc34dea0b46e0f6429b054f848e2611d6e1d3e7
\ No newline at end of file
diff --git a/modules/legacy-geo/licenses/lucene-spatial-extras-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/modules/legacy-geo/licenses/lucene-spatial-extras-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index e266fe4cc804a..0000000000000
--- a/modules/legacy-geo/licenses/lucene-spatial-extras-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6664f830615782d0a3f91f9d1bdb003e7f252cc1
\ No newline at end of file
diff --git a/modules/legacy-geo/licenses/lucene-spatial-extras-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/modules/legacy-geo/licenses/lucene-spatial-extras-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..1c96b8e73ff65
--- /dev/null
+++ b/modules/legacy-geo/licenses/lucene-spatial-extras-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+d9f8584667012bf09c446512ee25191b5d00ceed
\ No newline at end of file
diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java
index 73d69ea47a3b7..2b7a6df40a3af 100644
--- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java
+++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.index.rankeval;
-import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@@ -62,7 +61,7 @@ public void testDCGAt() {
for (int i = 0; i < 6; i++) {
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
assertEquals(EXPECTED_DCG, dcg.evaluate("id", hits, rated).metricScore(), DELTA);
@@ -112,7 +111,7 @@ public void testDCGAtSixMissingRatings() {
}
}
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
EvalQueryQuality result = dcg.evaluate("id", hits, rated);
@@ -169,7 +168,7 @@ public void testDCGAtFourMoreRatings() {
SearchHit[] hits = new SearchHit[4];
for (int i = 0; i < 4; i++) {
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
DiscountedCumulativeGain dcg = new DiscountedCumulativeGain();
EvalQueryQuality result = dcg.evaluate("id", hits, ratedDocs);
diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java
index d0d27822c9332..adaa984b4a817 100644
--- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java
+++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.index.rankeval;
-import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.xcontent.NamedXContentRegistry;
@@ -46,7 +45,7 @@ public static EvalQueryQuality randomEvalQueryQuality() {
for (int i = 0; i < numberOfSearchHits; i++) {
RatedSearchHit ratedSearchHit = RatedSearchHitTests.randomRatedSearchHit();
// we need to associate each hit with an index name otherwise rendering will not work
- ratedSearchHit.getSearchHit().shard(new SearchShardTarget("_na_", new ShardId("index", "_na_", 0), null, OriginalIndices.NONE));
+ ratedSearchHit.getSearchHit().shard(new SearchShardTarget("_na_", new ShardId("index", "_na_", 0), null));
ratedHits.add(ratedSearchHit);
}
EvalQueryQuality evalQueryQuality = new EvalQueryQuality(randomAlphaOfLength(10),
diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java
index cc43c9ae2f9d6..1447419366c8b 100644
--- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java
+++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.index.rankeval;
-import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.xcontent.ToXContent;
@@ -105,7 +104,7 @@ private SearchHit[] createSearchHits(List rated, Integer[] releva
rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i]));
}
hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
return hits;
}
diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java
index 8bb61ccaa62d6..090041b80d56c 100644
--- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java
+++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.index.rankeval;
-import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.xcontent.ToXContent;
@@ -193,7 +192,7 @@ private static SearchHit[] createSearchHits(int from, int to, String index) {
SearchHit[] hits = new SearchHit[to + 1 - from];
for (int i = from; i <= to; i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
}
return hits;
}
diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java
index 91010256808dd..d86b1b89d4400 100644
--- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java
+++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.index.rankeval;
-import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.xcontent.ToXContent;
@@ -102,7 +101,7 @@ public void testIgnoreUnlabeled() {
// add an unlabeled search hit
SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3);
searchHits[2] = new SearchHit(2, "2", Collections.emptyMap(), Collections.emptyMap());
- searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
+ searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated);
assertEquals((double) 2 / 3, evaluated.metricScore(), 0.00001);
@@ -121,7 +120,7 @@ public void testNoRatedDocs() throws Exception {
SearchHit[] hits = new SearchHit[5];
for (int i = 0; i < 5; i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList());
assertEquals(0.0d, evaluated.metricScore(), 0.00001);
@@ -243,7 +242,7 @@ private static SearchHit[] toSearchHits(List rated, String index)
SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
}
return hits;
}
diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java
index 4b3e1190fd50e..d8b61ad190ef6 100644
--- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java
+++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java
@@ -9,7 +9,6 @@
package org.elasticsearch.index.rankeval;
import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.cluster.block.ClusterBlockException;
@@ -58,7 +57,7 @@ public class RankEvalResponseTests extends ESTestCase {
new IllegalArgumentException("Closed resource", new RuntimeException("Resource")),
new SearchPhaseExecutionException("search", "all shards failed",
new ShardSearchFailure[] { new ShardSearchFailure(new ParsingException(1, 2, "foobar", null),
- new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null, OriginalIndices.NONE)) }),
+ new SearchShardTarget("node_1", new ShardId("foo", "_na_", 1), null)) }),
new ElasticsearchException("Parsing failed",
new ParsingException(9, 42, "Wrong state", new NullPointerException("Unexpected null value"))) };
@@ -169,7 +168,7 @@ public void testToXContent() throws IOException {
private static RatedSearchHit searchHit(String index, int docId, Integer rating) {
SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap(), Collections.emptyMap());
- hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
+ hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
hit.score(1.0f);
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
}
diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java
index ec391438cd630..a129002b7d450 100644
--- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java
+++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.index.rankeval;
-import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.xcontent.ToXContent;
@@ -104,7 +103,7 @@ public void testNoRatedDocs() throws Exception {
SearchHit[] hits = new SearchHit[k];
for (int i = 0; i < k; i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null));
}
EvalQueryQuality evaluated = (new RecallAtK()).evaluate("id", hits, Collections.emptyList());
@@ -226,7 +225,7 @@ private static SearchHit[] toSearchHits(List rated, String index)
SearchHit[] hits = new SearchHit[rated.size()];
for (int i = 0; i < rated.size(); i++) {
hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap());
- hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE));
+ hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null));
}
return hits;
}
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index 0e519ea94d9c8..0000000000000
--- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-867be6eb541df21622eb235082a8e4208c54eee7
\ No newline at end of file
diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..8c6f53bfae64d
--- /dev/null
+++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+91d7bb3e0f54577efbd08f8527ab705f76b405be
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index 1990cd300b929..0000000000000
--- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-a2785359a7f98243098cdfb1ca472fcf1acfacef
\ No newline at end of file
diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..dc92541178560
--- /dev/null
+++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+2e37f30126a2ddc0231b929b82b2bc2e120af1c2
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index 78484c05f06a6..0000000000000
--- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-28956e99314340f6db98e8f95c5ef08547324935
\ No newline at end of file
diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..0a00260a3592c
--- /dev/null
+++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+efae6bf6515c2d767491b85a50946279f66e9836
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index d5e21bad61775..0000000000000
--- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6c1ec0b5af458a39821e86c7b49df25560a055b3
\ No newline at end of file
diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..cf21023f7195d
--- /dev/null
+++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+e50a65b19d22bc98ca57a9426c45138c71787154
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index 5aff83fc47315..0000000000000
--- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-baf4f8ca0bb5c0cdec2f96e82492b175c6df0c3d
\ No newline at end of file
diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..4bfee0a4dabe1
--- /dev/null
+++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+6de256a78d4c8838d70f4b3720ba9fb6242a20bf
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index f079864826e35..0000000000000
--- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4e7ca88f59f5df8caff2f3df1277831da315d2e2
\ No newline at end of file
diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..2c7e1a52098af
--- /dev/null
+++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+48030e67171008e5d76fb211091a2bd4d256f097
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0-snapshot-ba75dc5e6bf.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
deleted file mode 100644
index b7fbde8e78cfe..0000000000000
--- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0-snapshot-ba75dc5e6bf.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b1eb11edd2e148534f2a589aee3268100c2fc6fe
\ No newline at end of file
diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0-snapshot-cfd9f9f98f7.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
new file mode 100644
index 0000000000000..2d8b0dc4dd2a5
--- /dev/null
+++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.0.0-snapshot-cfd9f9f98f7.jar.sha1
@@ -0,0 +1 @@
+21a775f1b3a2912c02ac64ceb23746b259694938
\ No newline at end of file
diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
index a4bdda97635c1..c3f4772a3201e 100644
--- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
+++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java
@@ -26,6 +26,7 @@
import org.elasticsearch.core.Booleans;
import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.index.IndexSettings;
+import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction;
import org.elasticsearch.test.NotEqualMessageBuilder;
import org.elasticsearch.test.XContentTestUtils;
@@ -45,12 +46,17 @@
import java.util.Locale;
import java.util.Map;
import java.util.Set;
+import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import java.util.stream.IntStream;
+import static io.github.nik9000.mapmatcher.MapMatcher.assertMap;
+import static io.github.nik9000.mapmatcher.MapMatcher.matchesMap;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
+import static java.util.stream.Collectors.toList;
import static org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION;
import static org.elasticsearch.cluster.routing.UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING;
import static org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY;
@@ -128,6 +134,7 @@ public void testSearch() throws Exception {
count,
true,
true,
+ randomBoolean(),
i -> JsonXContent.contentBuilder().startObject()
.field("string", randomAlphaOfLength(10))
.field("int", randomInt(100))
@@ -151,7 +158,7 @@ public void testSearch() throws Exception {
assertStoredBinaryFields(count);
}
- public void testNewReplicasWork() throws Exception {
+ public void testNewReplicas() throws Exception {
if (isRunningAgainstOldCluster()) {
XContentBuilder mappingsAndSettings = jsonBuilder();
mappingsAndSettings.startObject();
@@ -180,16 +187,22 @@ public void testNewReplicasWork() throws Exception {
int numDocs = randomIntBetween(2000, 3000);
indexRandomDocuments(
- numDocs, true, false, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
+ numDocs,
+ true,
+ false,
+ randomBoolean(),
+ i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()
+ );
logger.info("Refreshing [{}]", index);
client().performRequest(new Request("POST", "/" + index + "/_refresh"));
} else {
+ // The test runs with two nodes so this should still go green.
final int numReplicas = 1;
final long startTime = System.currentTimeMillis();
logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index);
Request setNumberOfReplicas = new Request("PUT", "/" + index + "/_settings");
setNumberOfReplicas.setJsonEntity("{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}");
- Response response = client().performRequest(setNumberOfReplicas);
+ client().performRequest(setNumberOfReplicas);
ensureGreenLongWait(index);
@@ -210,6 +223,125 @@ public void testNewReplicasWork() throws Exception {
}
}
+ public void testSearchTimeSeriesMode() throws Exception {
+ assumeTrue("time series mode introduced in 8.0.0", getOldClusterVersion().onOrAfter(Version.V_8_0_0));
+ int numDocs;
+ if (isRunningAgainstOldCluster()) {
+ numDocs = createTimeSeriesModeIndex(1);
+ } else {
+ numDocs = countOfIndexedRandomDocuments();
+ }
+ assertCountAll(numDocs);
+ Request request = new Request("GET", "/" + index + "/_search");
+ XContentBuilder body = jsonBuilder().startObject();
+ body.field("size", 0);
+ body.startObject("aggs").startObject("check").startObject("scripted_metric");
+ {
+ body.field("init_script", "state.timeSeries = new HashSet()");
+ body.field("map_script", "state.timeSeries.add(doc['dim'].value)");
+ body.field("combine_script", "return state.timeSeries");
+ StringBuilder reduceScript = new StringBuilder();
+ reduceScript.append("Set timeSeries = new TreeSet();");
+ reduceScript.append("for (s in states) {");
+ reduceScript.append(" for (ts in s) {");
+ reduceScript.append(" boolean newTs = timeSeries.add(ts);");
+ reduceScript.append(" if (false == newTs) {");
+ reduceScript.append(" throw new IllegalArgumentException(ts + ' appeared in two shards');");
+ reduceScript.append(" }");
+ reduceScript.append(" }");
+ reduceScript.append("}");
+ reduceScript.append("return timeSeries;");
+ body.field("reduce_script", reduceScript.toString());
+ }
+ body.endObject().endObject().endObject();
+ body.endObject();
+ request.setJsonEntity(Strings.toString(body));
+ Map response = entityAsMap(client().performRequest(request));
+ assertMap(response, matchesMap().extraOk()
+ .entry(
+ "hits",
+ matchesMap().extraOk().entry("total", Map.of("value", numDocs, "relation", "eq")))
+ .entry("aggregations", Map.of("check", Map.of("value", IntStream.range(0, 10).mapToObj(i -> "dim" + i).collect(toList()))))
+ );
+ }
+
+ public void testNewReplicasTimeSeriesMode() throws Exception {
+ assumeTrue("time series mode introduced in 8.0.0", getOldClusterVersion().onOrAfter(Version.V_8_0_0));
+ if (isRunningAgainstOldCluster()) {
+ createTimeSeriesModeIndex(0);
+ } else {
+ // The test runs with two nodes so this should still go green.
+ final int numReplicas = 1;
+ final long startTime = System.currentTimeMillis();
+ logger.debug("--> creating [{}] replicas for index [{}]", numReplicas, index);
+ Request setNumberOfReplicas = new Request("PUT", "/" + index + "/_settings");
+ setNumberOfReplicas.setJsonEntity("{ \"index\": { \"number_of_replicas\" : " + numReplicas + " }}");
+ client().performRequest(setNumberOfReplicas);
+
+ ensureGreenLongWait(index);
+
+ logger.debug("--> index [{}] is green, took [{}] ms", index, (System.currentTimeMillis() - startTime));
+ Map recoverRsp = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_recovery")));
+ logger.debug("--> recovery status:\n{}", recoverRsp);
+
+ Set counts = new HashSet<>();
+ for (String node : dataNodes(index, client())) {
+ Request search = new Request("GET", "/" + index + "/_search");
+ search.addParameter("preference", "_only_nodes:" + node);
+ Map responseBody = entityAsMap(client().performRequest(search));
+ assertNoFailures(responseBody);
+ int hits = extractTotalHits(responseBody);
+ counts.add(hits);
+ }
+ assertEquals("All nodes should have a consistent number of documents", 1, counts.size());
+ }
+ }
+
+ private int createTimeSeriesModeIndex(int replicas) throws IOException {
+ XContentBuilder mappingsAndSettings = jsonBuilder();
+ mappingsAndSettings.startObject();
+ {
+ mappingsAndSettings.startObject("settings");
+ mappingsAndSettings.field("number_of_shards", 1);
+ mappingsAndSettings.field("number_of_replicas", replicas);
+ mappingsAndSettings.field("mode", "time_series");
+ mappingsAndSettings.field("routing_path", "dim");
+ mappingsAndSettings.endObject();
+ }
+ {
+ mappingsAndSettings.startObject("mappings");
+ mappingsAndSettings.startObject("properties");
+ {
+ mappingsAndSettings.startObject("@timestamp").field("type", "date").endObject();
+ mappingsAndSettings.startObject("dim").field("type", "keyword").field("time_series_dimension", true).endObject();
+ }
+ mappingsAndSettings.endObject();
+ mappingsAndSettings.endObject();
+ }
+ mappingsAndSettings.endObject();
+
+ Request createIndex = new Request("PUT", "/" + index);
+ createIndex.setJsonEntity(Strings.toString(mappingsAndSettings));
+ client().performRequest(createIndex);
+
+ int numDocs = randomIntBetween(2000, 3000);
+ long basetime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2021-01-01T00:00:00Z");
+ indexRandomDocuments(
+ numDocs,
+ true,
+ true,
+ false,
+ i -> JsonXContent.contentBuilder()
+ .startObject()
+ .field("@timestamp", basetime + TimeUnit.MINUTES.toMillis(i))
+ .field("dim", "dim" + (i % 10))
+ .endObject()
+ );
+ logger.info("Refreshing [{}]", index);
+ client().performRequest(new Request("POST", "/" + index + "/_refresh"));
+ return numDocs;
+ }
+
public void testClusterState() throws Exception {
if (isRunningAgainstOldCluster()) {
XContentBuilder mappingsAndSettings = jsonBuilder();
@@ -289,7 +421,12 @@ public void testShrink() throws IOException {
numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(
- numDocs, true, true, i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject());
+ numDocs,
+ true,
+ true,
+ randomBoolean(),
+ i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()
+ );
ensureGreen(index); // wait for source index to be available on both nodes before starting shrink
@@ -362,6 +499,7 @@ public void testShrinkAfterUpgrade() throws IOException {
numDocs,
true,
true,
+ randomBoolean(),
i -> JsonXContent.contentBuilder().startObject().field("field", "value").endObject()
);
} else {
@@ -459,14 +597,18 @@ public void testRollover() throws IOException {
assertEquals(expectedCount, extractTotalHits(count));
}
+ void assertCountAll(int count) throws IOException {
+ Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
+ assertNoFailures(response);
+ int numDocs = extractTotalHits(response);
+ logger.info("Found {} in old index", numDocs);
+ assertEquals(count, numDocs);
+ }
+
void assertBasicSearchWorks(int count) throws IOException {
logger.info("--> testing basic search");
{
- Map response = entityAsMap(client().performRequest(new Request("GET", "/" + index + "/_search")));
- assertNoFailures(response);
- int numDocs = extractTotalHits(response);
- logger.info("Found {} in old index", numDocs);
- assertEquals(count, numDocs);
+ assertCountAll(count);
}
logger.info("--> testing basic search with sort");
@@ -672,7 +814,7 @@ public void testRecovery() throws Exception {
}
final String mappings = randomBoolean() ? "\"_source\": { \"enabled\": false}" : null;
createIndex(index, settings.build(), mappings);
- indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ indexRandomDocuments(count, true, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
// make sure all recoveries are done
ensureGreen(index);
@@ -689,6 +831,7 @@ public void testRecovery() throws Exception {
count / 10,
false, // flushing here would invalidate the whole thing
false,
+ true,
i -> jsonBuilder().startObject().field("field", "value").endObject()
);
}
@@ -782,7 +925,7 @@ public void testSnapshotRestore() throws IOException {
settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean());
}
createIndex(index, settings.build());
- indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ indexRandomDocuments(count, true, true, randomBoolean(), i -> jsonBuilder().startObject().field("field", "value").endObject());
} else {
count = countOfIndexedRandomDocuments();
}
@@ -1136,18 +1279,17 @@ private void checkSnapshot(final String snapshotName, final int count, final Ver
}
}
- // TODO tests for upgrades after shrink. We've had trouble with shrink in the past.
-
private void indexRandomDocuments(
final int count,
final boolean flushAllowed,
final boolean saveInfo,
+ final boolean specifyId,
final CheckedFunction docSupplier)
throws IOException {
logger.info("Indexing {} random documents", count);
for (int i = 0; i < count; i++) {
logger.debug("Indexing document [{}]", i);
- Request createDocument = new Request("POST", "/" + index + "/_doc/" + i);
+ Request createDocument = new Request("POST", "/" + index + "/_doc/" + (specifyId ? i : ""));
createDocument.setJsonEntity(Strings.toString(docSupplier.apply(i)));
client().performRequest(createDocument);
if (rarely()) {
@@ -1489,7 +1631,13 @@ public void testEnableSoftDeletesOnRestore() throws Exception {
createIndex(index, settings.build());
ensureGreen(index);
int numDocs = randomIntBetween(0, 100);
- indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ indexRandomDocuments(
+ numDocs,
+ true,
+ true,
+ randomBoolean(),
+ i -> jsonBuilder().startObject().field("field", "value").endObject()
+ );
// create repo
XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject();
{
@@ -1543,7 +1691,13 @@ public void testForbidDisableSoftDeletesOnRestore() throws Exception {
createIndex(index, settings.build());
ensureGreen(index);
int numDocs = randomIntBetween(0, 100);
- indexRandomDocuments(numDocs, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject());
+ indexRandomDocuments(
+ numDocs,
+ true,
+ true,
+ randomBoolean(),
+ i -> jsonBuilder().startObject().field("field", "value").endObject()
+ );
// create repo
XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject();
{
@@ -1629,5 +1783,4 @@ public static void assertNumHits(String index, int numHits, int totalShards) thr
assertThat(XContentMapValues.extractValue("_shards.successful", resp), equalTo(totalShards));
assertThat(extractTotalHits(resp), equalTo(numHits));
}
-
}
diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/100_tsdb.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/100_tsdb.yml
index 1c7d4aeee8a82..a3c73d7f434d6 100644
--- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/100_tsdb.yml
+++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/100_tsdb.yml
@@ -12,6 +12,7 @@ setup:
settings:
index:
mode: time_series
+ routing_path: [metricset, k8s.pod.uid]
number_of_replicas: 0
number_of_shards: 2
mappings:
diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml
index 5aa5bd659a1c2..4f46d103c238f 100644
--- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml
+++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/remote_cluster/10_basic.yml
@@ -166,6 +166,7 @@ tsdb:
settings:
index:
mode: time_series
+ routing_path: [metricset, k8s.pod.uid]
number_of_replicas: 0
number_of_shards: 2
mappings:
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java
index 5747bd6c41647..c0fb264634712 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.packaging.test;
-import org.apache.http.client.fluent.Request;
import org.elasticsearch.packaging.util.Distribution;
import org.elasticsearch.packaging.util.FileUtils;
import org.elasticsearch.packaging.util.ServerUtils;
@@ -45,15 +44,10 @@ public static void filterDistros() {
}
public void test10Install() throws Exception {
- // security config tool would run as administrator and change the owner of the config file, which is elasticsearch
- // We can re-enable this when #77231 is merged, but the rest of the tests in class are also currently muted on windows
- assumeTrue("Don't run on windows", distribution.platform != Distribution.Platform.WINDOWS);
installation = installArchive(sh, distribution());
// Enable security for these tests only where it is necessary, until we can enable it for all
- // TODO: Remove this when https://github.com/elastic/elasticsearch/pull/77231 is merged
ServerUtils.enableSecurityFeatures(installation);
verifyArchiveInstallation(installation, distribution());
- installation.executables().securityConfigTool.run("");
}
public void test20NoAutoGenerationWhenAutoConfigurationDisabled() throws Exception {
@@ -88,12 +82,7 @@ public void test40VerifyAutogeneratedCredentials() throws Exception {
assertThat(parseElasticPassword(result.stdout), notNullValue());
assertThat(parseKibanaToken(result.stdout), notNullValue());
assertThat(parseFingerprint(result.stdout), notNullValue());
- String response = ServerUtils.makeRequest(
- Request.Get("https://localhost:9200"),
- "elastic",
- parseElasticPassword(result.stdout),
- ServerUtils.getCaCert(installation.config)
- );
+ String response = makeRequestAsElastic("https://localhost:9200", parseElasticPassword(result.stdout));
assertThat(response, containsString("You Know, for Search"));
}
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java
index b098b5f7403d4..72f4a37ce1853 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java
@@ -9,10 +9,12 @@
package org.elasticsearch.packaging.test;
import org.apache.http.client.fluent.Request;
+import org.elasticsearch.packaging.util.Distribution;
import org.elasticsearch.packaging.util.FileUtils;
import org.elasticsearch.packaging.util.Installation;
import org.elasticsearch.packaging.util.Platforms;
import org.elasticsearch.packaging.util.ServerUtils;
+import org.elasticsearch.packaging.util.Shell;
import org.elasticsearch.packaging.util.Shell.Result;
import org.junit.BeforeClass;
@@ -20,9 +22,13 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
+import java.util.Locale;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
import static java.nio.file.StandardOpenOption.APPEND;
import static java.nio.file.StandardOpenOption.CREATE;
+import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER;
import static org.elasticsearch.packaging.util.Archives.installArchive;
import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation;
@@ -30,7 +36,6 @@
import static org.elasticsearch.packaging.util.FileUtils.append;
import static org.elasticsearch.packaging.util.FileUtils.mv;
import static org.elasticsearch.packaging.util.FileUtils.rm;
-import static org.elasticsearch.packaging.util.ServerUtils.makeRequest;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
@@ -50,6 +55,23 @@ public static void filterDistros() {
public void test10Install() throws Exception {
installation = installArchive(sh, distribution());
verifyArchiveInstallation(installation, distribution());
+ setFileSuperuser("test_superuser", "test_superuser_password");
+ // See https://bugs.openjdk.java.net/browse/JDK-8267701. In short, when generating PKCS#12 keystores in JDK 12 and later
+ // the MAC algorithm used for integrity protection is incompatible with any previous JDK version. This affects us as we generate
+ // PKCS12 keystores on startup ( with the bundled JDK ) but we also need to run certain tests with a JDK other than the bundled
+ // one, and we still use JDK11 for that.
+ // We're manually setting the HMAC algorithm to something that is compatible with previous versions here. Moving forward, when
+ // min compat JDK is JDK17, we can remove this hack and use the standard security properties file.
+ final Path jdkSecurityProperties = installation.bundledJdk.resolve("conf").resolve("security").resolve("java.security");
+ List lines;
+ try (Stream allLines = Files.readAllLines(jdkSecurityProperties).stream()) {
+ lines = allLines.filter(s -> s.startsWith("#keystore.pkcs12.macAlgorithm") == false)
+ .filter(s -> s.startsWith("#keystore.pkcs12.macIterationCount") == false)
+ .collect(Collectors.toList());
+ }
+ lines.add("keystore.pkcs12.macAlgorithm = HmacPBESHA1");
+ lines.add("keystore.pkcs12.macIterationCount = 100000");
+ Files.write(jdkSecurityProperties, lines, TRUNCATE_EXISTING);
}
public void test20PluginsListWithNoPlugins() throws Exception {
@@ -70,7 +92,7 @@ public void test30MissingBundledJdk() throws Exception {
mv(installation.bundledJdk, relocatedJdk);
}
// ask for elasticsearch version to quickly exit if java is actually found (ie test failure)
- final Result runResult = sh.runIgnoreExitCode(bin.elasticsearch.toString() + " -v");
+ final Result runResult = sh.runIgnoreExitCode(bin.elasticsearch.toString() + " -V");
assertThat(runResult.exitCode, is(1));
assertThat(runResult.stderr, containsString("could not find java in bundled JDK"));
} finally {
@@ -107,28 +129,170 @@ public void test32SpecialCharactersInJdkPath() throws Exception {
}
}
- public void test50StartAndStop() throws Exception {
- // cleanup from previous test
- rm(installation.config("elasticsearch.keystore"));
+ public void test40AutoconfigurationNotTriggeredWhenNodeIsMeantToJoinExistingCluster() throws Exception {
+ // auto-config requires that the archive owner and the process user be the same,
+ Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
+ FileUtils.assertPathsDoNotExist(installation.data);
+ ServerUtils.addSettingToExistingConfiguration(installation, "discovery.seed_hosts", "[\"127.0.0.1:9300\"]");
+ startElasticsearch();
+ verifySecurityNotAutoConfigured(installation);
+ stopElasticsearch();
+ ServerUtils.removeSettingFromExistingConfiguration(installation, "discovery.seed_hosts");
+ Platforms.onWindows(() -> sh.chown(installation.config));
+ FileUtils.rm(installation.data);
+ }
+
+ public void test41AutoconfigurationNotTriggeredWhenNodeCannotContainData() throws Exception {
+ // auto-config requires that the archive owner and the process user be the same
+ Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
+ ServerUtils.addSettingToExistingConfiguration(installation, "node.roles", "[\"voting_only\", \"master\"]");
+ startElasticsearch();
+ verifySecurityNotAutoConfigured(installation);
+ stopElasticsearch();
+ ServerUtils.removeSettingFromExistingConfiguration(installation, "node.roles");
+ Platforms.onWindows(() -> sh.chown(installation.config));
+ FileUtils.rm(installation.data);
+ }
+
+ public void test42AutoconfigurationNotTriggeredWhenNodeCannotBecomeMaster() throws Exception {
+ // auto-config requires that the archive owner and the process user be the same
+ Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
+ ServerUtils.addSettingToExistingConfiguration(installation, "node.roles", "[\"ingest\"]");
+ startElasticsearch();
+ verifySecurityNotAutoConfigured(installation);
+ stopElasticsearch();
+ ServerUtils.removeSettingFromExistingConfiguration(installation, "node.roles");
+ Platforms.onWindows(() -> sh.chown(installation.config));
+ FileUtils.rm(installation.data);
+ }
+
+ public void test43AutoconfigurationNotTriggeredWhenTlsAlreadyConfigured() throws Exception {
+ // auto-config requires that the archive owner and the process user be the same
+ Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
+ ServerUtils.addSettingToExistingConfiguration(installation, "xpack.security.http.ssl.enabled", "false");
+ startElasticsearch();
+ verifySecurityNotAutoConfigured(installation);
+ stopElasticsearch();
+ ServerUtils.removeSettingFromExistingConfiguration(installation, "xpack.security.http.ssl.enabled");
+ Platforms.onWindows(() -> sh.chown(installation.config));
+ FileUtils.rm(installation.data);
+ }
+ public void test44AutoConfigurationNotTriggeredOnNotWriteableConfDir() throws Exception {
+ Platforms.onWindows(() -> {
+ // auto-config requires that the archive owner and the process user be the same
+ sh.chown(installation.config, installation.getOwner());
+ // prevent modifications to the config directory
+ sh.run(
+ String.format(
+ Locale.ROOT,
+ "$ACL = Get-ACL -Path '%s'; "
+ + "$AccessRule = New-Object System.Security.AccessControl.FileSystemAccessRule('%s','Write','Deny'); "
+ + "$ACL.SetAccessRule($AccessRule); "
+ + "$ACL | Set-Acl -Path '%s';",
+ installation.config,
+ installation.getOwner(),
+ installation.config
+ )
+ );
+ });
+ Platforms.onLinux(() -> { sh.run("chmod u-w " + installation.config); });
try {
startElasticsearch();
- } catch (Exception e) {
- if (Files.exists(installation.home.resolve("elasticsearch.pid"))) {
- String pid = FileUtils.slurp(installation.home.resolve("elasticsearch.pid")).trim();
- logger.info("Dumping jstack of elasticsearch process ({}) that failed to start", pid);
- sh.runIgnoreExitCode("jstack " + pid);
- }
- throw e;
+ verifySecurityNotAutoConfigured(installation);
+ // the node still starts, with Security enabled, but without TLS auto-configured (so only authentication)
+ runElasticsearchTests();
+ stopElasticsearch();
+ } finally {
+ Platforms.onWindows(() -> {
+ sh.run(
+ String.format(
+ Locale.ROOT,
+ "$ACL = Get-ACL -Path '%s'; "
+ + "$AccessRule = New-Object System.Security.AccessControl.FileSystemAccessRule('%s','Write','Deny'); "
+ + "$ACL.RemoveAccessRule($AccessRule); "
+ + "$ACL | Set-Acl -Path '%s';",
+ installation.config,
+ installation.getOwner(),
+ installation.config
+ )
+ );
+ sh.chown(installation.config);
+ });
+ Platforms.onLinux(() -> { sh.run("chmod u+w " + installation.config); });
+ FileUtils.rm(installation.data);
}
+ }
- assertThat(installation.logs.resolve("gc.log"), fileExists());
- ServerUtils.runElasticsearchTests();
+ public void test50AutoConfigurationFailsWhenCertificatesNotGenerated() throws Exception {
+ // auto-config requires that the archive owner and the process user be the same
+ Platforms.onWindows(() -> sh.chown(installation.config, installation.getOwner()));
+ FileUtils.assertPathsDoNotExist(installation.data);
+ Path tempDir = createTempDir("bc-backup");
+ Files.move(
+ installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk15on-1.64.jar"),
+ tempDir.resolve("bcprov-jdk15on-1.64.jar")
+ );
+ Shell.Result result = runElasticsearchStartCommand(null, false, false);
+ assertElasticsearchFailure(result, "java.lang.NoClassDefFoundError: org/bouncycastle/asn1/x509/GeneralName", null);
+ Files.move(
+ tempDir.resolve("bcprov-jdk15on-1.64.jar"),
+ installation.lib.resolve("tools").resolve("security-cli").resolve("bcprov-jdk15on-1.64.jar")
+ );
+ Platforms.onWindows(() -> sh.chown(installation.config));
+ FileUtils.rm(tempDir);
+ }
+
+ public void test51AutoConfigurationWithPasswordProtectedKeystore() throws Exception {
+ /* Windows issue awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */
+ assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS);
+ FileUtils.assertPathsDoNotExist(installation.data);
+ final Installation.Executables bin = installation.executables();
+ final String password = "some-keystore-password";
+ Platforms.onLinux(() -> bin.keystoreTool.run("passwd", password + "\n" + password + "\n"));
+ Platforms.onWindows(
+ () -> {
+ sh.run("Invoke-Command -ScriptBlock {echo '" + password + "'; echo '" + password + "'} | " + bin.keystoreTool + " passwd");
+ }
+ );
+ Shell.Result result = runElasticsearchStartCommand("some-wrong-password-here", false, false);
+ assertElasticsearchFailure(result, "Provided keystore password was incorrect", null);
+ verifySecurityNotAutoConfigured(installation);
+
+ awaitElasticsearchStartup(runElasticsearchStartCommand(password, true, true));
+ verifySecurityAutoConfigured(installation);
stopElasticsearch();
+
+ // Revert to an empty password for the rest of the tests
+ Platforms.onLinux(() -> bin.keystoreTool.run("passwd", password + "\n" + "" + "\n"));
+ Platforms.onWindows(
+ () -> sh.run("Invoke-Command -ScriptBlock {echo '" + password + "'; echo '" + "" + "'} | " + bin.keystoreTool + " passwd")
+ );
+ }
+
+ public void test52AutoConfigurationOnWindows() throws Exception {
+ assumeTrue(
+ "run this in place of test51AutoConfigurationWithPasswordProtectedKeystore on windows",
+ distribution.platform == Distribution.Platform.WINDOWS
+ );
+ sh.chown(installation.config, installation.getOwner());
+ FileUtils.assertPathsDoNotExist(installation.data);
+
+ startElasticsearch();
+ verifySecurityAutoConfigured(installation);
+ stopElasticsearch();
+ sh.chown(installation.config);
}
- public void test51EsJavaHomeOverride() throws Exception {
+ public void test60StartAndStop() throws Exception {
+ startElasticsearch();
+ assertThat(installation.logs.resolve("gc.log"), fileExists());
+ runElasticsearchTests();
+ stopElasticsearch();
+ }
+
+ public void test61EsJavaHomeOverride() throws Exception {
Platforms.onLinux(() -> {
String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim();
sh.getEnv().put("ES_JAVA_HOME", systemJavaHome1);
@@ -139,14 +303,14 @@ public void test51EsJavaHomeOverride() throws Exception {
});
startElasticsearch();
- ServerUtils.runElasticsearchTests();
+ runElasticsearchTests();
stopElasticsearch();
String systemJavaHome1 = sh.getEnv().get("ES_JAVA_HOME");
assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), containsString(systemJavaHome1));
}
- public void test51JavaHomeIgnored() throws Exception {
+ public void test62JavaHomeIgnored() throws Exception {
assumeTrue(distribution().hasJdk);
Platforms.onLinux(() -> {
String systemJavaHome1 = sh.run("echo $SYSTEM_JAVA_HOME").stdout.trim();
@@ -166,7 +330,7 @@ public void test51JavaHomeIgnored() throws Exception {
assertThat(runResult.stderr, containsString("warning: ignoring JAVA_HOME=" + systemJavaHome + "; using bundled JDK"));
startElasticsearch();
- ServerUtils.runElasticsearchTests();
+ runElasticsearchTests();
stopElasticsearch();
// if the JDK started with the bundled JDK then we know that JAVA_HOME was ignored
@@ -174,7 +338,7 @@ public void test51JavaHomeIgnored() throws Exception {
assertThat(FileUtils.slurpAllLogs(installation.logs, "elasticsearch.log", "*.log.gz"), containsString(bundledJdk));
}
- public void test52BundledJdkRemoved() throws Exception {
+ public void test63BundledJdkRemoved() throws Exception {
assumeThat(distribution().hasJdk, is(true));
Path relocatedJdk = installation.bundledJdk.getParent().resolve("jdk.relocated");
@@ -190,7 +354,7 @@ public void test52BundledJdkRemoved() throws Exception {
});
startElasticsearch();
- ServerUtils.runElasticsearchTests();
+ runElasticsearchTests();
stopElasticsearch();
String systemJavaHome1 = sh.getEnv().get("ES_JAVA_HOME");
@@ -200,7 +364,7 @@ public void test52BundledJdkRemoved() throws Exception {
}
}
- public void test53JavaHomeWithSpecialCharacters() throws Exception {
+ public void test64JavaHomeWithSpecialCharacters() throws Exception {
Platforms.onWindows(() -> {
String javaPath = "C:\\Program Files (x86)\\java";
try {
@@ -211,7 +375,7 @@ public void test53JavaHomeWithSpecialCharacters() throws Exception {
// verify ES can start, stop and run plugin list
startElasticsearch();
-
+ runElasticsearchTests();
stopElasticsearch();
String pluginListCommand = installation.bin + "/elasticsearch-plugin list";
@@ -236,7 +400,7 @@ public void test53JavaHomeWithSpecialCharacters() throws Exception {
// verify ES can start, stop and run plugin list
startElasticsearch();
-
+ runElasticsearchTests();
stopElasticsearch();
String pluginListCommand = installation.bin + "/elasticsearch-plugin list";
@@ -248,15 +412,13 @@ public void test53JavaHomeWithSpecialCharacters() throws Exception {
});
}
- public void test54ForceBundledJdkEmptyJavaHome() throws Exception {
+ public void test65ForceBundledJdkEmptyJavaHome() throws Exception {
assumeThat(distribution().hasJdk, is(true));
- // cleanup from previous test
- rm(installation.config("elasticsearch.keystore"));
sh.getEnv().put("ES_JAVA_HOME", "");
startElasticsearch();
- ServerUtils.runElasticsearchTests();
+ runElasticsearchTests();
stopElasticsearch();
}
@@ -265,25 +427,28 @@ public void test54ForceBundledJdkEmptyJavaHome() throws Exception {
*
* This test purposefully ignores the existence of the Windows POSIX sub-system.
*/
- public void test55InstallUnderPosix() throws Exception {
- assumeTrue("Only run this test on Unix-like systems", Platforms.WINDOWS == false);
+ public void test66InstallUnderPosix() throws Exception {
sh.getEnv().put("POSIXLY_CORRECT", "1");
startElasticsearch();
+ runElasticsearchTests();
stopElasticsearch();
}
public void test70CustomPathConfAndJvmOptions() throws Exception {
-
withCustomConfig(tempConf -> {
setHeap("512m", tempConf);
final List jvmOptions = List.of("-Dlog4j2.disable.jmx=true");
Files.write(tempConf.resolve("jvm.options"), jvmOptions, CREATE, APPEND);
sh.getEnv().put("ES_JAVA_OPTS", "-XX:-UseCompressedOops");
-
startElasticsearch();
- final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes"));
+ final String nodesResponse = ServerUtils.makeRequest(
+ Request.Get("https://localhost:9200/_nodes"),
+ "test_superuser",
+ "test_superuser_password",
+ ServerUtils.getCaCert(tempConf)
+ );
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\""));
@@ -299,7 +464,7 @@ public void test71CustomJvmOptionsDirectoryFile() throws Exception {
startElasticsearch();
- final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes"));
+ final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
stopElasticsearch();
@@ -322,7 +487,7 @@ public void test72CustomJvmOptionsDirectoryFilesAreProcessedInSortedOrder() thro
startElasticsearch();
- final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes"));
+ final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\""));
@@ -339,21 +504,40 @@ public void test73CustomJvmOptionsDirectoryFilesWithoutOptionsExtensionIgnored()
append(jvmOptionsIgnored, "-Xthis_is_not_a_valid_option\n");
startElasticsearch();
- ServerUtils.runElasticsearchTests();
+ runElasticsearchTests();
stopElasticsearch();
} finally {
rm(jvmOptionsIgnored);
}
}
- public void test80RelativePathConf() throws Exception {
+ public void test74CustomJvmOptionsTotalMemoryOverride() throws Exception {
+ final Path heapOptions = installation.config(Paths.get("jvm.options.d", "total_memory.options"));
+ try {
+ setHeap(null); // delete default options
+ // Work as though total system memory is 850MB
+ append(heapOptions, "-Des.total_memory_bytes=891289600\n");
+
+ startElasticsearch();
+
+ final String nodesStatsResponse = makeRequest("https://localhost:9200/_nodes/stats");
+ assertThat(nodesStatsResponse, containsString("\"adjusted_total_in_bytes\":891289600"));
+ final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
+ // 40% of 850MB
+ assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":356515840"));
+
+ stopElasticsearch();
+ } finally {
+ rm(heapOptions);
+ }
+ }
+ public void test80RelativePathConf() throws Exception {
withCustomConfig(tempConf -> {
append(tempConf.resolve("elasticsearch.yml"), "node.name: relative");
-
startElasticsearch();
- final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes"));
+ final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"name\":\"relative\""));
stopElasticsearch();
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java
index 57a5ed11fcee7..c5a7694c65d54 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java
@@ -20,10 +20,12 @@
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
+import java.util.stream.Collectors;
import static com.carrotsearch.randomizedtesting.RandomizedTest.assumeFalse;
import static java.nio.file.StandardOpenOption.APPEND;
import static java.nio.file.StandardOpenOption.CREATE;
+import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File;
import static org.elasticsearch.packaging.util.FileMatcher.file;
import static org.elasticsearch.packaging.util.FileMatcher.p600;
@@ -46,8 +48,8 @@ public static void cleanupFiles() {
public void test10Install() throws Exception {
install();
- // Enable security for this test only where it is necessary, until we can enable it for all
- ServerUtils.enableSecurityFeatures(installation);
+ // Disable security auto-configuration as we want to generate keys/certificates manually here
+ ServerUtils.disableSecurityAutoConfiguration(installation);
}
public void test20Help() {
@@ -95,7 +97,10 @@ public void test40RunWithCert() throws Exception {
final String certPath = escapePath(installation.config("certs/mynode/mynode.crt"));
final String caCertPath = escapePath(installation.config("certs/ca/ca.crt"));
- List yaml = List.of(
+ // Replace possibly auto-configured TLS settings with ones pointing to the material generated with certgen
+ // (we do disable auto-configuration above but for packaged installations TLS auto-config happens on installation time and is
+ // not affected by this setting
+ final List newTlsConfig = List.of(
"node.name: mynode",
"xpack.security.transport.ssl.key: " + keyPath,
"xpack.security.transport.ssl.certificate: " + certPath,
@@ -106,8 +111,18 @@ public void test40RunWithCert() throws Exception {
"xpack.security.transport.ssl.enabled: true",
"xpack.security.http.ssl.enabled: true"
);
-
- Files.write(installation.config("elasticsearch.yml"), yaml, CREATE, APPEND);
+ List existingConfig = Files.readAllLines(installation.config("elasticsearch.yml"));
+ List newConfig = existingConfig.stream()
+ .filter(l -> l.startsWith("node.name:") == false)
+ .filter(l -> l.startsWith("xpack.security.transport.ssl.") == false)
+ .filter(l -> l.startsWith("xpack.security.http.ssl.") == false)
+ .filter(l -> l.startsWith("xpack.security.enabled") == false)
+ .filter(l -> l.startsWith("http.host") == false)
+ .filter(l -> l.startsWith("cluster.initial_master_nodes") == false)
+ .collect(Collectors.toList());
+ newConfig.addAll(newTlsConfig);
+
+ Files.write(installation.config("elasticsearch.yml"), newConfig, TRUNCATE_EXISTING);
assertWhileRunning(() -> {
final String password = setElasticPassword();
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java
index 185e5807ff5c1..6dd68416c4298 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java
@@ -11,10 +11,10 @@
import org.apache.http.client.fluent.Request;
import org.elasticsearch.packaging.util.FileUtils;
import org.elasticsearch.packaging.util.Platforms;
+import org.elasticsearch.packaging.util.ServerUtils;
import org.junit.Before;
import static org.elasticsearch.packaging.util.FileUtils.append;
-import static org.elasticsearch.packaging.util.ServerUtils.makeRequest;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assume.assumeFalse;
@@ -27,9 +27,10 @@ public void filterDistros() {
public void test10Install() throws Exception {
install();
+ setFileSuperuser("test_superuser", "test_superuser_password");
}
- public void test60HostnameSubstitution() throws Exception {
+ public void test20HostnameSubstitution() throws Exception {
String hostnameKey = Platforms.WINDOWS ? "COMPUTERNAME" : "HOSTNAME";
sh.getEnv().put(hostnameKey, "mytesthost");
withCustomConfig(confPath -> {
@@ -37,10 +38,18 @@ public void test60HostnameSubstitution() throws Exception {
if (distribution.isPackage()) {
append(installation.envFile, "HOSTNAME=mytesthost");
}
+ // security auto-config requires that the archive owner and the node process user be the same
+ Platforms.onWindows(() -> sh.chown(confPath, installation.getOwner()));
assertWhileRunning(() -> {
- final String nameResponse = makeRequest(Request.Get("http://localhost:9200/_cat/nodes?h=name")).strip();
+ final String nameResponse = ServerUtils.makeRequest(
+ Request.Get("https://localhost:9200/_cat/nodes?h=name"),
+ "test_superuser",
+ "test_superuser_password",
+ ServerUtils.getCaCert(confPath)
+ ).strip();
assertThat(nameResponse, equalTo("mytesthost"));
});
+ Platforms.onWindows(() -> sh.chown(confPath));
});
}
}
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
index f6d5a4a845430..7c0ab7f9a22e5 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java
@@ -42,6 +42,7 @@
import static org.elasticsearch.packaging.util.FileMatcher.p600;
import static org.elasticsearch.packaging.util.FileMatcher.p644;
import static org.elasticsearch.packaging.util.FileMatcher.p660;
+import static org.elasticsearch.packaging.util.FileMatcher.p750;
import static org.elasticsearch.packaging.util.FileMatcher.p755;
import static org.elasticsearch.packaging.util.FileMatcher.p775;
import static org.elasticsearch.packaging.util.FileUtils.append;
@@ -49,6 +50,7 @@
import static org.elasticsearch.packaging.util.docker.Docker.chownWithPrivilegeEscalation;
import static org.elasticsearch.packaging.util.docker.Docker.copyFromContainer;
import static org.elasticsearch.packaging.util.docker.Docker.existsInContainer;
+import static org.elasticsearch.packaging.util.docker.Docker.findInContainer;
import static org.elasticsearch.packaging.util.docker.Docker.getContainerLogs;
import static org.elasticsearch.packaging.util.docker.Docker.getImageHealthcheck;
import static org.elasticsearch.packaging.util.docker.Docker.getImageLabels;
@@ -91,7 +93,6 @@
*/
public class DockerTests extends PackagingTestCase {
private Path tempDir;
- private static final String USERNAME = "elastic";
private static final String PASSWORD = "nothunter2";
@BeforeClass
@@ -114,16 +115,17 @@ public void teardownTest() {
/**
* Checks that the Docker image can be run, and that it passes various checks.
*/
- public void test010Install() {
+ public void test010Install() throws Exception {
verifyContainerInstallation(installation);
+ verifySecurityAutoConfigured(installation);
}
/**
* Check that security is enabled
*/
public void test011SecurityEnabledStatus() throws Exception {
- waitForElasticsearch(installation, USERNAME, PASSWORD);
- final int statusCode = ServerUtils.makeRequestAndGetStatus(Request.Get("http://localhost:9200"), USERNAME, "wrong_password", null);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
+ final int statusCode = makeRequestAsElastic("wrong_password");
assertThat(statusCode, equalTo(401));
}
@@ -218,7 +220,7 @@ public void test041AmazonCaCertsAreInTheKeystore() {
* Check that when the keystore is created on startup, it is created with the correct permissions.
*/
public void test042KeystorePermissionsAreCorrect() {
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
assertThat(installation.config("elasticsearch.keystore"), file(p660));
}
@@ -228,11 +230,11 @@ public void test042KeystorePermissionsAreCorrect() {
* is minimally functional.
*/
public void test050BasicApiTests() throws Exception {
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
assertTrue(existsInContainer(installation.logs.resolve("gc.log")));
- ServerUtils.runElasticsearchTests(USERNAME, PASSWORD);
+ runElasticsearchTestsAsElastic(PASSWORD);
}
/**
@@ -240,7 +242,11 @@ public void test050BasicApiTests() throws Exception {
*/
public void test070BindMountCustomPathConfAndJvmOptions() throws Exception {
copyFromContainer(installation.config("elasticsearch.yml"), tempDir.resolve("elasticsearch.yml"));
+ copyFromContainer(installation.config("elasticsearch.keystore"), tempDir.resolve("elasticsearch.keystore"));
copyFromContainer(installation.config("log4j2.properties"), tempDir.resolve("log4j2.properties"));
+ final Path autoConfigurationDir = findInContainer(installation.config, "d", "\"tls_auto_config_initial_node_*\"");
+ final String autoConfigurationDirName = autoConfigurationDir.getFileName().toString();
+ copyFromContainer(autoConfigurationDir, tempDir.resolve(autoConfigurationDirName));
// we have to disable Log4j from using JMX lest it will hit a security
// manager exception before we have configured logging; this will fail
@@ -252,7 +258,9 @@ public void test070BindMountCustomPathConfAndJvmOptions() throws Exception {
Files.setPosixFilePermissions(tempDir, fromString("rwxrwxrwx"));
// These permissions are necessary to run the tests under Vagrant
Files.setPosixFilePermissions(tempDir.resolve("elasticsearch.yml"), p644);
+ Files.setPosixFilePermissions(tempDir.resolve("elasticsearch.keystore"), p644);
Files.setPosixFilePermissions(tempDir.resolve("log4j2.properties"), p644);
+ Files.setPosixFilePermissions(tempDir.resolve(autoConfigurationDirName), p750);
// Restart the container
runContainer(
@@ -262,9 +270,9 @@ public void test070BindMountCustomPathConfAndJvmOptions() throws Exception {
.envVar("ELASTIC_PASSWORD", PASSWORD)
);
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
- final JsonNode nodes = getJson("/_nodes", USERNAME, PASSWORD).get("nodes");
+ final JsonNode nodes = getJson("/_nodes", "elastic", PASSWORD, ServerUtils.getCaCert(installation)).get("nodes");
final String nodeId = nodes.fieldNames().next();
final int heapSize = nodes.at("/" + nodeId + "/jvm/mem/heap_init_in_bytes").intValue();
@@ -290,10 +298,9 @@ public void test071BindMountCustomPathWithDifferentUID() throws Exception {
distribution(),
builder().volume(tempEsDataDir.toAbsolutePath(), installation.data).envVar("ELASTIC_PASSWORD", PASSWORD)
);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
- waitForElasticsearch(installation, USERNAME, PASSWORD);
-
- final JsonNode nodes = getJson("/_nodes", USERNAME, PASSWORD);
+ final JsonNode nodes = getJson("/_nodes", "elastic", PASSWORD, ServerUtils.getCaCert(installation));
assertThat(nodes.at("/_nodes/total").intValue(), equalTo(1));
assertThat(nodes.at("/_nodes/successful").intValue(), equalTo(1));
@@ -327,7 +334,11 @@ public void test072RunEsAsDifferentUserAndGroup() throws Exception {
copyFromContainer(installation.config("elasticsearch.yml"), tempEsConfigDir);
copyFromContainer(installation.config("jvm.options"), tempEsConfigDir);
+ copyFromContainer(installation.config("elasticsearch.keystore"), tempEsConfigDir);
copyFromContainer(installation.config("log4j2.properties"), tempEsConfigDir);
+ final Path autoConfigurationDir = findInContainer(installation.config, "d", "\"tls_auto_config_initial_node_*\"");
+ final String autoConfigurationDirName = autoConfigurationDir.getFileName().toString();
+ copyFromContainer(autoConfigurationDir, tempEsConfigDir.resolve(autoConfigurationDirName));
chownWithPrivilegeEscalation(tempEsConfigDir, "501:501");
chownWithPrivilegeEscalation(tempEsDataDir, "501:501");
@@ -343,7 +354,10 @@ public void test072RunEsAsDifferentUserAndGroup() throws Exception {
.volume(tempEsLogsDir.toAbsolutePath(), installation.logs)
);
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
+ rmDirWithPrivilegeEscalation(tempEsConfigDir);
+ rmDirWithPrivilegeEscalation(tempEsDataDir);
+ rmDirWithPrivilegeEscalation(tempEsLogsDir);
}
/**
@@ -354,7 +368,7 @@ public void test073RunEsAsDifferentUserAndGroupWithoutBindMounting() {
// Restart the container
runContainer(distribution(), builder().extraArgs("--group-add 0").uid(501, 501).envVar("ELASTIC_PASSWORD", PASSWORD));
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
}
/**
@@ -381,7 +395,7 @@ public void test080ConfigurePasswordThroughEnvironmentVariableFile() throws Exce
// If we configured security correctly, then this call will only work if we specify the correct credentials.
try {
- waitForElasticsearch("green", null, installation, "elastic", "hunter2");
+ waitForElasticsearch(installation, "elastic", "hunter2");
} catch (Exception e) {
throw new AssertionError(
"Failed to check whether Elasticsearch had started. This could be because "
@@ -391,7 +405,12 @@ public void test080ConfigurePasswordThroughEnvironmentVariableFile() throws Exce
}
// Also check that an unauthenticated call fails
- final int statusCode = Request.Get("http://localhost:9200/_nodes").execute().returnResponse().getStatusLine().getStatusCode();
+ final int statusCode = ServerUtils.makeRequestAndGetStatus(
+ Request.Get("https://localhost:9200"),
+ null,
+ null,
+ ServerUtils.getCaCert(installation)
+ );
assertThat("Expected server to require authentication", statusCode, equalTo(401));
}
@@ -526,7 +545,7 @@ public void test085EnvironmentVariablesAreRespectedUnderDockerExec() throws Exce
installation = runContainer(distribution(), builder().envVar("ELASTIC_PASSWORD", "hunter2"));
// The tool below requires a keystore, so ensure that ES is fully initialised before proceeding.
- waitForElasticsearch("green", null, installation, "elastic", "hunter2");
+ waitForElasticsearch(installation, "elastic", "hunter2");
sh.getEnv().put("http.host", "this.is.not.valid");
@@ -753,7 +772,7 @@ public void test110OrgOpencontainersLabels() throws Exception {
* Check that the container logs contain the expected content for Elasticsearch itself.
*/
public void test120DockerLogsIncludeElasticsearchLogs() {
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
final Result containerLogs = getContainerLogs();
assertThat("Container logs should contain full class names", containerLogs.stdout, containsString("org.elasticsearch.node.Node"));
@@ -766,16 +785,12 @@ public void test120DockerLogsIncludeElasticsearchLogs() {
public void test121CanUseStackLoggingConfig() {
runContainer(distribution(), builder().envVar("ES_LOG_STYLE", "file").envVar("ELASTIC_PASSWORD", PASSWORD));
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
final Result containerLogs = getContainerLogs();
final List stdout = containerLogs.stdout.lines().collect(Collectors.toList());
-
- assertThat(
- "Container logs should be formatted using the stack config",
- stdout.get(stdout.size() - 1),
- matchesPattern("^\\[\\d\\d\\d\\d-.*")
- );
+ // We select to look for a line near the beginning so that we don't stumble upon the stdout printing of auto-configured credentials
+ assertThat("Container logs should be formatted using the stack config", stdout.get(10), matchesPattern("^\\[\\d\\d\\d\\d-.*"));
assertThat("[logs/docker-cluster.log] should exist but it doesn't", existsInContainer("logs/docker-cluster.log"), is(true));
}
@@ -785,12 +800,12 @@ public void test121CanUseStackLoggingConfig() {
public void test122CanUseDockerLoggingConfig() {
runContainer(distribution(), builder().envVar("ES_LOG_STYLE", "console").envVar("ELASTIC_PASSWORD", PASSWORD));
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
final Result containerLogs = getContainerLogs();
final List stdout = containerLogs.stdout.lines().collect(Collectors.toList());
-
- assertThat("Container logs should be formatted using the docker config", stdout.get(stdout.size() - 1), startsWith("{\""));
+ // We select to look for a line near the beginning so that we don't stumble upon the stdout printing of auto-configured credentials
+ assertThat("Container logs should be formatted using the docker config", stdout.get(10), startsWith("{\""));
assertThat("[logs/docker-cluster.log] shouldn't exist but it does", existsInContainer("logs/docker-cluster.log"), is(false));
}
@@ -809,12 +824,12 @@ public void test123CannotUseUnknownLoggingConfig() {
public void test124CanRestartContainerWithStackLoggingConfig() {
runContainer(distribution(), builder().envVar("ES_LOG_STYLE", "file").envVar("ELASTIC_PASSWORD", PASSWORD));
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
restartContainer();
// If something went wrong running Elasticsearch the second time, this will fail.
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
}
/**
@@ -850,9 +865,9 @@ public void test131InitProcessHasCorrectPID() {
* Check that Elasticsearch reports per-node cgroup information.
*/
public void test140CgroupOsStatsAreAvailable() throws Exception {
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", PASSWORD);
- final JsonNode nodes = getJson("/_nodes/stats/os", USERNAME, PASSWORD).get("nodes");
+ final JsonNode nodes = getJson("/_nodes/stats/os", "elastic", PASSWORD, ServerUtils.getCaCert(installation)).get("nodes");
final String nodeId = nodes.fieldNames().next();
@@ -868,24 +883,48 @@ public void test140CgroupOsStatsAreAvailable() throws Exception {
* logic sets the correct heap size, based on the container limits.
*/
public void test150MachineDependentHeap() throws Exception {
+ final List xArgs = machineDependentHeapTest("942m", List.of());
+
+ // This is roughly 0.4 * 942
+ assertThat(xArgs, hasItems("-Xms376m", "-Xmx376m"));
+ }
+
+ /**
+ * Check that when available system memory is constrained by a total memory override as well as Docker,
+ * the machine-dependant heap sizing logic sets the correct heap size, preferring the override to the
+ * container limits.
+ */
+ public void test151MachineDependentHeapWithSizeOverride() throws Exception {
+ final List xArgs = machineDependentHeapTest(
+ "942m",
+ // 799014912 = 762m
+ List.of("-Des.total_memory_bytes=799014912")
+ );
+
+ // This is roughly 0.4 * 762, in particular it's NOT 0.4 * 942
+ assertThat(xArgs, hasItems("-Xms304m", "-Xmx304m"));
+ }
+
+ private List machineDependentHeapTest(final String containerMemory, final List extraJvmOptions) throws Exception {
// Start by ensuring `jvm.options` doesn't define any heap options
final Path jvmOptionsPath = tempDir.resolve("jvm.options");
final Path containerJvmOptionsPath = installation.config("jvm.options");
copyFromContainer(containerJvmOptionsPath, jvmOptionsPath);
- final List jvmOptions = Files.readAllLines(jvmOptionsPath)
- .stream()
- .filter(line -> (line.startsWith("-Xms") || line.startsWith("-Xmx")) == false)
- .collect(Collectors.toList());
+ final List jvmOptions = Stream.concat(
+ Files.readAllLines(jvmOptionsPath).stream().filter(line -> (line.startsWith("-Xms") || line.startsWith("-Xmx")) == false),
+ extraJvmOptions.stream()
+ ).collect(Collectors.toList());
Files.writeString(jvmOptionsPath, String.join("\n", jvmOptions));
// Now run the container, being explicit about the available memory
runContainer(
distribution(),
- builder().memory("942m").volume(jvmOptionsPath, containerJvmOptionsPath).envVar("ELASTIC_PASSWORD", PASSWORD)
+ builder().memory(containerMemory).volume(jvmOptionsPath, containerJvmOptionsPath).envVar("ELASTIC_PASSWORD", PASSWORD)
);
- waitForElasticsearch(installation, USERNAME, PASSWORD);
+
+ waitForElasticsearch(installation, "elastic", PASSWORD);
// Grab the container output and find the line where it print the JVM arguments. This will
// let us see what the automatic heap sizing calculated.
@@ -897,12 +936,9 @@ public void test150MachineDependentHeap() throws Exception {
final JsonNode jsonNode = new ObjectMapper().readTree(jvmArgumentsLine.get());
final String argsStr = jsonNode.get("message").textValue();
- final List xArgs = Arrays.stream(argsStr.substring(1, argsStr.length() - 1).split(",\\s*"))
+ return Arrays.stream(argsStr.substring(1, argsStr.length() - 1).split(",\\s*"))
.filter(arg -> arg.startsWith("-X"))
.collect(Collectors.toList());
-
- // This is roughly 0.4 * 942
- assertThat(xArgs, hasItems("-Xms376m", "-Xmx376m"));
}
/**
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java
index e286720007488..bdac50050c5bd 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java
@@ -55,8 +55,10 @@ public class KeystoreManagementTests extends PackagingTestCase {
public static final String ERROR_CORRUPTED_KEYSTORE = "Keystore has been corrupted or tampered with";
public static final String ERROR_KEYSTORE_NOT_PASSWORD_PROTECTED = "ERROR: Keystore is not password-protected";
public static final String ERROR_KEYSTORE_NOT_FOUND = "ERROR: Elasticsearch keystore not found";
- private static final String USERNAME = "elastic";
- private static final String PASSWORD = "nothunter2";
+ private static final String ELASTIC_PASSWORD = "nothunter2";
+ private static final String FILE_REALM_SUPERUSER = "test-user";
+ private static final String FILE_REALM_SUPERUSER_PASSWORD = "test-user-password";
+ private static final String KEYSTORE_PASSWORD = "keystore-password";
/** Test initial archive state */
public void test10InstallArchiveDistribution() throws Exception {
@@ -64,6 +66,9 @@ public void test10InstallArchiveDistribution() throws Exception {
installation = installArchive(sh, distribution);
verifyArchiveInstallation(installation, distribution());
+ // Add a user for tests to use.
+ // TODO: Possibly capture autoconfigured password from running the node the first time
+ setFileSuperuser(FILE_REALM_SUPERUSER, FILE_REALM_SUPERUSER_PASSWORD);
final Installation.Executables bin = installation.executables();
Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool + " has-passwd");
@@ -79,6 +84,7 @@ public void test11InstallPackageDistribution() throws Exception {
installation = installPackage(sh, distribution);
assertInstalled(distribution);
verifyPackageInstallation(installation, distribution, sh);
+ setFileSuperuser(FILE_REALM_SUPERUSER, FILE_REALM_SUPERUSER_PASSWORD);
final Installation.Executables bin = installation.executables();
Shell.Result r = sh.runIgnoreExitCode(bin.keystoreTool + " has-passwd");
@@ -108,50 +114,20 @@ public void test12InstallDockerDistribution() throws Exception {
assertThat(r2.stdout, containsString("keystore.seed"));
}
- public void test20CreateKeystoreManually() throws Exception {
- rmKeystoreIfExists();
- createKeystore(null);
-
- final Installation.Executables bin = installation.executables();
- verifyKeystorePermissions();
-
- Shell.Result r = bin.keystoreTool.run("list");
- assertThat(r.stdout, containsString("keystore.seed"));
- }
-
- public void test30AutoCreateKeystore() throws Exception {
- assumeTrue("Packages and docker are installed with a keystore file", distribution.isArchive());
- rmKeystoreIfExists();
-
- startElasticsearch();
- stopElasticsearch();
-
- Platforms.onWindows(() -> sh.chown(installation.config("elasticsearch.keystore")));
-
- verifyKeystorePermissions();
-
- final Installation.Executables bin = installation.executables();
- Shell.Result r = bin.keystoreTool.run("list");
- assertThat(r.stdout, containsString("keystore.seed"));
- }
-
- public void test40KeystorePasswordOnStandardInput() throws Exception {
+ public void test20KeystorePasswordOnStandardInput() throws Exception {
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
assumeThat(installation, is(notNullValue()));
- String password = "^|<>\\&exit"; // code insertion on Windows if special characters are not escaped
-
- rmKeystoreIfExists();
- createKeystore(password);
+ createKeystore(KEYSTORE_PASSWORD);
assertPasswordProtectedKeystore();
- awaitElasticsearchStartup(runElasticsearchStartCommand(password, true, false));
- ServerUtils.runElasticsearchTests();
+ awaitElasticsearchStartup(runElasticsearchStartCommand(KEYSTORE_PASSWORD, true, false));
+ runElasticsearchTests();
stopElasticsearch();
}
- public void test41WrongKeystorePasswordOnStandardInput() throws Exception {
+ public void test21WrongKeystorePasswordOnStandardInput() throws Exception {
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
assumeThat(installation, is(notNullValue()));
@@ -161,25 +137,20 @@ public void test41WrongKeystorePasswordOnStandardInput() throws Exception {
assertElasticsearchFailure(result, Arrays.asList(ERROR_INCORRECT_PASSWORD, ERROR_CORRUPTED_KEYSTORE), null);
}
- public void test42KeystorePasswordOnTty() throws Exception {
+ public void test22KeystorePasswordOnTty() throws Exception {
/* Windows issue awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */
assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS);
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
assumeThat(installation, is(notNullValue()));
- String password = "keystorepass";
-
- rmKeystoreIfExists();
- createKeystore(password);
-
assertPasswordProtectedKeystore();
- awaitElasticsearchStartup(runElasticsearchStartCommand(password, true, true));
- ServerUtils.runElasticsearchTests();
+ awaitElasticsearchStartup(runElasticsearchStartCommand(KEYSTORE_PASSWORD, true, true));
+ runElasticsearchTests();
stopElasticsearch();
}
- public void test43WrongKeystorePasswordOnTty() throws Exception {
+ public void test23WrongKeystorePasswordOnTty() throws Exception {
/* Windows issue awaits fix: https://github.com/elastic/elasticsearch/issues/49340 */
assumeTrue("expect command isn't on Windows", distribution.platform != Distribution.Platform.WINDOWS);
assumeTrue("packages will use systemd, which doesn't handle stdin", distribution.isArchive());
@@ -196,26 +167,19 @@ public void test43WrongKeystorePasswordOnTty() throws Exception {
* If we have an encrypted keystore, we shouldn't require a password to
* view help information.
*/
- public void test44EncryptedKeystoreAllowsHelpMessage() throws Exception {
+ public void test24EncryptedKeystoreAllowsHelpMessage() throws Exception {
assumeTrue("users call elasticsearch directly in archive case", distribution.isArchive());
- String password = "keystorepass";
-
- rmKeystoreIfExists();
- createKeystore(password);
-
assertPasswordProtectedKeystore();
Shell.Result r = installation.executables().elasticsearch.run("--help");
assertThat(r.stdout, startsWith("Starts Elasticsearch"));
}
- public void test50KeystorePasswordFromFile() throws Exception {
+ public void test30KeystorePasswordFromFile() throws Exception {
assumeTrue("only for systemd", Platforms.isSystemd() && distribution().isPackage());
- String password = "!@#$%^&*()|\\<>/?";
Path esKeystorePassphraseFile = installation.config.resolve("eks");
- rmKeystoreIfExists();
- createKeystore(password);
+ setKeystorePassword(KEYSTORE_PASSWORD);
assertPasswordProtectedKeystore();
@@ -223,17 +187,17 @@ public void test50KeystorePasswordFromFile() throws Exception {
sh.run("sudo systemctl set-environment ES_KEYSTORE_PASSPHRASE_FILE=" + esKeystorePassphraseFile);
Files.createFile(esKeystorePassphraseFile);
- Files.write(esKeystorePassphraseFile, List.of(password));
+ Files.write(esKeystorePassphraseFile, List.of(KEYSTORE_PASSWORD));
startElasticsearch();
- ServerUtils.runElasticsearchTests();
+ runElasticsearchTests();
stopElasticsearch();
} finally {
sh.run("sudo systemctl unset-environment ES_KEYSTORE_PASSPHRASE_FILE");
}
}
- public void test51WrongKeystorePasswordFromFile() throws Exception {
+ public void test31WrongKeystorePasswordFromFile() throws Exception {
assumeTrue("only for systemd", Platforms.isSystemd() && distribution().isPackage());
Path esKeystorePassphraseFile = installation.config.resolve("eks");
@@ -261,42 +225,40 @@ public void test51WrongKeystorePasswordFromFile() throws Exception {
* Check that we can mount a password-protected keystore to a docker image
* and provide a password via an environment variable.
*/
- @AwaitsFix(bugUrl = "Keystore fails to save with resource busy")
- public void test60DockerEnvironmentVariablePassword() throws Exception {
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/76124")
+ public void test40DockerEnvironmentVariablePassword() throws Exception {
assumeTrue(distribution().isDocker());
- String password = "keystore-password";
- Path localConfigDir = getMountedLocalConfDirWithKeystore(password, installation.config);
+ Path localConfigDir = getMountedLocalConfDirWithKeystore(KEYSTORE_PASSWORD, installation.config);
// restart ES with password and mounted config dir containing password protected keystore
runContainer(
distribution(),
builder().volume(localConfigDir.resolve("config"), installation.config)
- .envVar("KEYSTORE_PASSWORD", password)
- .envVar("ELASTIC_PASSWORD", PASSWORD)
+ .envVar("KEYSTORE_PASSWORD", KEYSTORE_PASSWORD)
+ .envVar("ELASTIC_PASSWORD", ELASTIC_PASSWORD)
);
- waitForElasticsearch(installation, USERNAME, PASSWORD);
- ServerUtils.runElasticsearchTests(USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", ELASTIC_PASSWORD);
+ runElasticsearchTestsAsElastic(ELASTIC_PASSWORD);
}
/**
* Check that we can mount a password-protected keystore to a docker image
* and provide a password via a file, pointed at from an environment variable.
*/
- @AwaitsFix(bugUrl = "Keystore fails to save with resource busy")
- public void test61DockerEnvironmentVariablePasswordFromFile() throws Exception {
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/76124")
+ public void test41DockerEnvironmentVariablePasswordFromFile() throws Exception {
assumeTrue(distribution().isDocker());
Path tempDir = null;
try {
tempDir = createTempDir(KeystoreManagementTests.class.getSimpleName());
- String password = "keystore-password";
String passwordFilename = "password.txt";
- Files.writeString(tempDir.resolve(passwordFilename), password + "\n");
+ Files.writeString(tempDir.resolve(passwordFilename), KEYSTORE_PASSWORD + "\n");
Files.setPosixFilePermissions(tempDir.resolve(passwordFilename), p600);
- Path localConfigDir = getMountedLocalConfDirWithKeystore(password, installation.config);
+ Path localConfigDir = getMountedLocalConfDirWithKeystore(KEYSTORE_PASSWORD, installation.config);
// restart ES with password and mounted config dir containing password protected keystore
runContainer(
@@ -304,11 +266,11 @@ public void test61DockerEnvironmentVariablePasswordFromFile() throws Exception {
builder().volume(localConfigDir.resolve("config"), installation.config)
.volume(tempDir, "/run/secrets")
.envVar("KEYSTORE_PASSWORD_FILE", "/run/secrets/" + passwordFilename)
- .envVar("ELASTIC_PASSWORD", PASSWORD)
+ .envVar("ELASTIC_PASSWORD", ELASTIC_PASSWORD)
);
- waitForElasticsearch(installation, USERNAME, PASSWORD);
- ServerUtils.runElasticsearchTests(USERNAME, PASSWORD);
+ waitForElasticsearch(installation, "elastic", ELASTIC_PASSWORD);
+ runElasticsearchTestsAsElastic(ELASTIC_PASSWORD);
} finally {
if (tempDir != null) {
rm(tempDir);
@@ -320,12 +282,11 @@ public void test61DockerEnvironmentVariablePasswordFromFile() throws Exception {
* Check that if we provide the wrong password for a mounted and password-protected
* keystore, Elasticsearch doesn't start.
*/
- @AwaitsFix(bugUrl = "Keystore fails to save with resource busy")
- public void test62DockerEnvironmentVariableBadPassword() throws Exception {
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/76124")
+ public void test42DockerEnvironmentVariableBadPassword() throws Exception {
assumeTrue(distribution().isDocker());
- String password = "keystore-password";
- Path localConfigPath = getMountedLocalConfDirWithKeystore(password, installation.config);
+ Path localConfigPath = getMountedLocalConfDirWithKeystore(KEYSTORE_PASSWORD, installation.config);
// restart ES with password and mounted config dir containing password protected keystore
Shell.Result r = runContainerExpectingFailure(
@@ -335,6 +296,38 @@ public void test62DockerEnvironmentVariableBadPassword() throws Exception {
assertThat(r.stderr, containsString(ERROR_INCORRECT_PASSWORD));
}
+ public void test50CreateKeystoreManually() throws Exception {
+ // Run this test last so that removing the existing keystore doesn't make subsequent tests fail
+ rmKeystoreIfExists();
+ createKeystore(null);
+
+ final Installation.Executables bin = installation.executables();
+ verifyKeystorePermissions();
+
+ Shell.Result r = bin.keystoreTool.run("list");
+ assertThat(r.stdout, containsString("keystore.seed"));
+ }
+
+ public void test60AutoCreateKeystore() throws Exception {
+ // Run this test last so that removing the existing keystore doesn't make subsequent tests fail
+ assumeTrue("Packages and docker are installed with a keystore file", distribution.isArchive());
+ rmKeystoreIfExists();
+ // Elasticsearch was auto-configured for security. We need to remove that configuration as it depended on settings in the previous
+ // keystore
+ ServerUtils.disableSecurityFeatures(installation);
+
+ startElasticsearch();
+ stopElasticsearch();
+
+ Platforms.onWindows(() -> sh.chown(installation.config("elasticsearch.keystore")));
+
+ verifyKeystorePermissions();
+
+ final Installation.Executables bin = installation.executables();
+ Shell.Result r = bin.keystoreTool.run("list");
+ assertThat(r.stdout, containsString("keystore.seed"));
+ }
+
/**
* In the Docker context, it's a little bit tricky to get a password-protected
* keystore. All of the utilities we'd want to use are on the Docker image.
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java
index 998e3bf6d53c5..4047277f0b1ca 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java
@@ -8,7 +8,6 @@
package org.elasticsearch.packaging.test;
-import org.apache.http.client.fluent.Request;
import org.elasticsearch.packaging.util.FileUtils;
import org.elasticsearch.packaging.util.Packages;
import org.elasticsearch.packaging.util.Shell.Result;
@@ -40,8 +39,6 @@
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
import static org.elasticsearch.packaging.util.Platforms.getOsRelease;
import static org.elasticsearch.packaging.util.Platforms.isSystemd;
-import static org.elasticsearch.packaging.util.ServerUtils.makeRequest;
-import static org.elasticsearch.packaging.util.ServerUtils.runElasticsearchTests;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.containsString;
@@ -62,6 +59,7 @@ public void test10InstallPackage() throws Exception {
installation = installPackage(sh, distribution());
assertInstalled(distribution());
verifyPackageInstallation(installation, distribution(), sh);
+ setFileSuperuser("test_superuser", "test_superuser_password");
}
public void test20PluginsCommandWhenNoPlugins() {
@@ -126,7 +124,7 @@ public void test34CustomJvmOptionsDirectoryFile() throws Exception {
startElasticsearch();
- final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes"));
+ final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
stopElasticsearch();
@@ -211,18 +209,24 @@ public void test50Remove() throws Exception {
}
public void test60Reinstall() throws Exception {
- install();
- assertInstalled(distribution());
- verifyPackageInstallation(installation, distribution(), sh);
+ try {
+ install();
+ assertInstalled(distribution());
+ verifyPackageInstallation(installation, distribution(), sh);
- remove(distribution());
- assertRemoved(distribution());
+ remove(distribution());
+ assertRemoved(distribution());
+ } finally {
+ cleanup();
+ }
}
public void test70RestartServer() throws Exception {
try {
install();
assertInstalled(distribution());
+ // Recreate file realm users that have been deleted in earlier tests
+ setFileSuperuser("test_superuser", "test_superuser_password");
startElasticsearch();
restartElasticsearch(sh, installation);
@@ -233,6 +237,34 @@ public void test70RestartServer() throws Exception {
}
}
+ public void test71JvmOptionsTotalMemoryOverride() throws Exception {
+ try {
+ install();
+ assertPathsExist(installation.envFile);
+ setHeap(null);
+
+ // Recreate file realm users that have been deleted in earlier tests
+ setFileSuperuser("test_superuser", "test_superuser_password");
+
+ withCustomConfig(tempConf -> {
+ // Work as though total system memory is 850MB
+ append(installation.envFile, "ES_JAVA_OPTS=\"-Des.total_memory_bytes=891289600\"");
+
+ startElasticsearch();
+
+ final String nodesStatsResponse = makeRequest("https://localhost:9200/_nodes/stats");
+ assertThat(nodesStatsResponse, containsString("\"adjusted_total_in_bytes\":891289600"));
+
+ // 40% of 850MB
+ assertThat(sh.run("ps auwwx").stdout, containsString("-Xms340m -Xmx340m"));
+
+ stopElasticsearch();
+ });
+ } finally {
+ cleanup();
+ }
+ }
+
public void test72TestRuntimeDirectory() throws Exception {
try {
install();
@@ -282,13 +314,15 @@ public void test81CustomPathConfAndJvmOptions() throws Exception {
assertPathsExist(installation.envFile);
stopElasticsearch();
+ // Recreate file realm users that have been deleted in earlier tests
+ setFileSuperuser("test_superuser", "test_superuser_password");
withCustomConfig(tempConf -> {
append(installation.envFile, "ES_JAVA_OPTS=\"-Xmx512m -Xms512m -XX:-UseCompressedOops\"");
startElasticsearch();
- final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes"));
+ final String nodesResponse = makeRequest("https://localhost:9200/_nodes");
assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912"));
assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\""));
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java
index 94f86aedcea79..d7f03fb2a0298 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java
@@ -12,13 +12,13 @@
import org.apache.http.entity.ContentType;
import org.elasticsearch.packaging.util.Distribution;
import org.elasticsearch.packaging.util.Packages;
+import org.elasticsearch.packaging.util.ServerUtils;
import java.nio.file.Paths;
import static org.elasticsearch.packaging.util.Packages.assertInstalled;
import static org.elasticsearch.packaging.util.Packages.installPackage;
import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
-import static org.elasticsearch.packaging.util.ServerUtils.makeRequest;
import static org.hamcrest.Matchers.containsString;
public class PackageUpgradeTests extends PackagingTestCase {
@@ -32,6 +32,9 @@ public class PackageUpgradeTests extends PackagingTestCase {
public void test10InstallBwcVersion() throws Exception {
installation = installPackage(sh, bwcDistribution);
assertInstalled(bwcDistribution);
+ // TODO: Add more tests here to assert behavior when updating from < v8 to > v8 with implicit/explicit behavior,
+ // maybe as part of https://github.com/elastic/elasticsearch/pull/76879
+ ServerUtils.disableSecurityFeatures(installation);
}
public void test11ModifyKeystore() throws Exception {
@@ -44,25 +47,25 @@ public void test12SetupBwcVersion() throws Exception {
startElasticsearch();
// create indexes explicitly with 0 replicas so when restarting we can reach green state
- makeRequest(
+ ServerUtils.makeRequest(
Request.Put("http://localhost:9200/library")
.bodyString("{\"settings\":{\"index\":{\"number_of_replicas\":0}}}", ContentType.APPLICATION_JSON)
);
- makeRequest(
+ ServerUtils.makeRequest(
Request.Put("http://localhost:9200/library2")
.bodyString("{\"settings\":{\"index\":{\"number_of_replicas\":0}}}", ContentType.APPLICATION_JSON)
);
// add some docs
- makeRequest(
+ ServerUtils.makeRequest(
Request.Post("http://localhost:9200/library/_doc/1?refresh=true&pretty")
.bodyString("{ \"title\": \"Elasticsearch - The Definitive Guide\"}", ContentType.APPLICATION_JSON)
);
- makeRequest(
+ ServerUtils.makeRequest(
Request.Post("http://localhost:9200/library/_doc/2?refresh=true&pretty")
.bodyString("{ \"title\": \"Brave New World\"}", ContentType.APPLICATION_JSON)
);
- makeRequest(
+ ServerUtils.makeRequest(
Request.Post("http://localhost:9200/library2/_doc/1?refresh=true&pretty")
.bodyString("{ \"title\": \"The Left Hand of Darkness\"}", ContentType.APPLICATION_JSON)
);
@@ -78,9 +81,12 @@ public void test20InstallUpgradedVersion() throws Exception {
installation = Packages.forceUpgradePackage(sh, distribution);
} else {
installation = Packages.upgradePackage(sh, distribution);
+ verifySecurityNotAutoConfigured(installation);
}
assertInstalled(distribution);
verifyPackageInstallation(installation, distribution, sh);
+ // Upgrade overwrites the configuration file because we run with --force-confnew so we need to disable security again
+ ServerUtils.disableSecurityFeatures(installation);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/76283")
@@ -89,13 +95,11 @@ public void test21CheckUpgradedVersion() throws Exception {
}
private void assertDocsExist() throws Exception {
- // We can properly handle this as part of https://github.com/elastic/elasticsearch/issues/75940
- // For now we can use elastic with "keystore.seed" as we set it explicitly in PackageUpgradeTests#test11ModifyKeystore
- String response1 = makeRequest(Request.Get("http://localhost:9200/library/_doc/1?pretty"), "elastic", "keystore_seed", null);
+ String response1 = ServerUtils.makeRequest(Request.Get("http://localhost:9200/library/_doc/1?pretty"));
assertThat(response1, containsString("Elasticsearch"));
- String response2 = makeRequest(Request.Get("http://localhost:9200/library/_doc/2?pretty"), "elastic", "keystore_seed", null);
+ String response2 = ServerUtils.makeRequest(Request.Get("http://localhost:9200/library/_doc/2?pretty"));
assertThat(response2, containsString("World"));
- String response3 = makeRequest(Request.Get("http://localhost:9200/library2/_doc/1?pretty"), "elastic", "keystore_seed", null);
+ String response3 = ServerUtils.makeRequest(Request.Get("http://localhost:9200/library2/_doc/1?pretty"));
assertThat(response3, containsString("Darkness"));
}
}
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagesSecurityAutoConfigurationTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagesSecurityAutoConfigurationTests.java
new file mode 100644
index 0000000000000..1affcd7646f96
--- /dev/null
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagesSecurityAutoConfigurationTests.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.packaging.test;
+
+import org.elasticsearch.packaging.util.Installation;
+import org.elasticsearch.packaging.util.Packages;
+import org.junit.BeforeClass;
+
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.List;
+import java.util.Optional;
+import java.util.function.Predicate;
+
+import static org.elasticsearch.packaging.util.FileUtils.append;
+import static org.elasticsearch.packaging.util.Packages.assertInstalled;
+import static org.elasticsearch.packaging.util.Packages.assertRemoved;
+import static org.elasticsearch.packaging.util.Packages.installPackage;
+import static org.elasticsearch.packaging.util.Packages.verifyPackageInstallation;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasItem;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assume.assumeTrue;
+
+public class PackagesSecurityAutoConfigurationTests extends PackagingTestCase {
+
+ @BeforeClass
+ public static void filterDistros() {
+ assumeTrue("rpm or deb", distribution.isPackage());
+ }
+
+ public void test10SecurityAutoConfiguredOnPackageInstall() throws Exception {
+ assertRemoved(distribution());
+ installation = installPackage(sh, distribution(), successfulAutoConfiguration());
+ assertInstalled(distribution());
+ verifyPackageInstallation(installation, distribution(), sh);
+ verifySecurityAutoConfigured(installation);
+ assertNotNull(installation.getElasticPassword());
+ }
+
+ public void test20SecurityNotAutoConfiguredOnReInstallation() throws Exception {
+ // we are testing force upgrading in the current version
+ // In such a case, security remains configured from the initial installation, we don't run it again.
+ Optional autoConfigDirName = getAutoConfigDirName(installation);
+ installation = Packages.forceUpgradePackage(sh, distribution);
+ assertInstalled(distribution);
+ verifyPackageInstallation(installation, distribution, sh);
+ verifySecurityAutoConfigured(installation);
+ // Since we did not auto-configure the second time, the directory name should be the same
+ assertThat(autoConfigDirName.isPresent(), is(true));
+ assertThat(getAutoConfigDirName(installation).isPresent(), is(true));
+ assertThat(getAutoConfigDirName(installation).get(), equalTo(autoConfigDirName.get()));
+ }
+
+ public void test30SecurityNotAutoConfiguredWhenExistingDataDir() throws Exception {
+ // This is a contrived example for packages where in a new installation, there is an
+ // existing data directory but the rest of the package tracked config files were removed
+ final Path dataPath = installation.data;
+ cleanup();
+ Files.createDirectory(dataPath);
+ append(dataPath.resolve("foo"), "some data");
+ installation = installPackage(sh, distribution(), existingSecurityConfiguration());
+ verifySecurityNotAutoConfigured(installation);
+ }
+
+ public void test40SecurityNotAutoConfiguredWhenExistingKeystoreUnknownPassword() throws Exception {
+ // This is a contrived example for packages where in a new installation, there is an
+ // existing elasticsearch.keystore file within $ES_PATH_CONF and it's password-protected
+ final Installation.Executables bin = installation.executables();
+ bin.keystoreTool.run("passwd", "some_password\nsome_password\n");
+ final Path tempDir = createTempDir("existing-keystore-config");
+ final Path confPath = installation.config;
+ Files.copy(
+ confPath.resolve("elasticsearch.keystore"),
+ tempDir.resolve("elasticsearch.keystore"),
+ StandardCopyOption.COPY_ATTRIBUTES
+ );
+ cleanup();
+ Files.createDirectory(confPath);
+ Files.copy(
+ tempDir.resolve("elasticsearch.keystore"),
+ confPath.resolve("elasticsearch.keystore"),
+ StandardCopyOption.COPY_ATTRIBUTES
+ );
+ installation = installPackage(sh, distribution(), errorOutput());
+ List configLines = Files.readAllLines(installation.config("elasticsearch.yml"));
+ assertThat(configLines, not(hasItem("# have been automatically generated in order to configure Security. #")));
+ }
+
+ private Predicate successfulAutoConfiguration() {
+ Predicate p1 = output -> output.contains("Authentication and Authorization are enabled.");
+ Predicate p2 = output -> output.contains("TLS for the transport and the http layers is enabled and configured.");
+ Predicate p3 = output -> output.contains("The password of the elastic superuser will be set to:");
+ return p1.and(p2).and(p3);
+ }
+
+ private Predicate existingSecurityConfiguration() {
+ return output -> output.contains("Security features appear to be already configured.");
+ }
+
+ private Predicate errorOutput() {
+ Predicate p1 = output -> output.contains("Failed to auto-configure security features.");
+ Predicate p2 = output -> output.contains("Authentication and Authorization are enabled.");
+ Predicate p3 = output -> output.contains("You can use elasticsearch-reset-elastic-password to set a password");
+ Predicate p4 = output -> output.contains("for the elastic user.");
+ return p1.and(p2).and(p3).and(p4);
+ }
+
+}
diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java
index 3f92a739b2e25..c3c228ae7ec2b 100644
--- a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java
+++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java
@@ -15,23 +15,29 @@
import com.carrotsearch.randomizedtesting.annotations.TestMethodProviders;
import com.carrotsearch.randomizedtesting.annotations.Timeout;
+import org.apache.http.client.fluent.Request;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.Version;
import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.core.CheckedRunnable;
+import org.elasticsearch.core.Tuple;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.packaging.util.Archives;
import org.elasticsearch.packaging.util.Distribution;
+import org.elasticsearch.packaging.util.FileMatcher;
import org.elasticsearch.packaging.util.FileUtils;
import org.elasticsearch.packaging.util.Installation;
import org.elasticsearch.packaging.util.Packages;
import org.elasticsearch.packaging.util.Platforms;
+import org.elasticsearch.packaging.util.ServerUtils;
import org.elasticsearch.packaging.util.Shell;
import org.elasticsearch.packaging.util.docker.Docker;
+import org.elasticsearch.packaging.util.docker.DockerFileMatcher;
import org.elasticsearch.packaging.util.docker.DockerShell;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matcher;
+import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
@@ -56,19 +62,35 @@
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermissions;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
+import java.util.Optional;
import java.util.concurrent.TimeUnit;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
import static org.elasticsearch.packaging.util.Cleanup.cleanEverything;
import static org.elasticsearch.packaging.util.FileExistenceMatchers.fileExists;
+import static org.elasticsearch.packaging.util.FileMatcher.Fileness.Directory;
+import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File;
+import static org.elasticsearch.packaging.util.FileMatcher.p660;
+import static org.elasticsearch.packaging.util.FileMatcher.p750;
import static org.elasticsearch.packaging.util.FileUtils.append;
+import static org.elasticsearch.packaging.util.FileUtils.rm;
+import static org.elasticsearch.packaging.util.docker.Docker.copyFromContainer;
import static org.elasticsearch.packaging.util.docker.Docker.ensureImageIsLoaded;
import static org.elasticsearch.packaging.util.docker.Docker.removeContainer;
import static org.hamcrest.CoreMatchers.anyOf;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.Matchers.contains;
+import static org.hamcrest.Matchers.hasItem;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
@@ -115,6 +137,7 @@ public abstract class PackagingTestCase extends Assert {
// the current installation of the distribution being tested
protected static Installation installation;
+ protected static Tuple fileSuperuserForInstallation;
private static boolean failed;
@@ -239,6 +262,7 @@ protected static void install() throws Exception {
protected static void cleanup() throws Exception {
installation = null;
+ fileSuperuserForInstallation = null;
cleanEverything();
}
@@ -387,7 +411,19 @@ public Shell.Result awaitElasticsearchStartupWithResult(Shell.Result result, int
* @throws Exception if Elasticsearch can't start
*/
public void startElasticsearch() throws Exception {
- awaitElasticsearchStartup(runElasticsearchStartCommand(null, true, false));
+ try {
+ awaitElasticsearchStartup(runElasticsearchStartCommand(null, true, false));
+ } catch (Exception e) {
+ if (Files.exists(installation.home.resolve("elasticsearch.pid"))) {
+ String pid = FileUtils.slurp(installation.home.resolve("elasticsearch.pid")).trim();
+ logger.info("elasticsearch process ({}) failed to start", pid);
+ if (sh.run("jps").stdout.contains(pid)) {
+ logger.info("Dumping jstack of elasticsearch process ({}) ", pid);
+ sh.runIgnoreExitCode("jstack " + pid);
+ }
+ }
+ throw e;
+ }
}
public void assertElasticsearchFailure(Shell.Result result, String expectedMessage, Packages.JournaldWrapper journaldWrapper) {
@@ -421,8 +457,8 @@ public void assertElasticsearchFailure(Shell.Result result, List expecte
sh.runIgnoreExitCode("Wait-Process -Timeout " + Archives.ES_STARTUP_SLEEP_TIME_SECONDS + " -Id " + wrapperPid);
sh.runIgnoreExitCode(
"Get-EventSubscriber | "
- + "where {($_.EventName -eq 'OutputDataReceived' -Or $_.EventName -eq 'ErrorDataReceived' |"
- + "Unregister-EventSubscriber -Force"
+ + "Where-Object {($_.EventName -eq 'OutputDataReceived') -or ($_.EventName -eq 'ErrorDataReceived')} | "
+ + "Unregister-Event -Force"
);
assertThat(FileUtils.slurp(Archives.getPowershellErrorPath(installation)), anyOf(stringMatchers));
@@ -433,6 +469,50 @@ public void assertElasticsearchFailure(Shell.Result result, List expecte
}
}
+ public void setFileSuperuser(String username, String password) {
+ assertThat(installation, Matchers.not(Matchers.nullValue()));
+ assertThat(fileSuperuserForInstallation, Matchers.nullValue());
+ Shell.Result result = sh.run(
+ installation.executables().usersTool + " useradd " + username + " -p " + password + " -r " + "superuser"
+ );
+ assertThat(result.isSuccess(), is(true));
+ fileSuperuserForInstallation = new Tuple<>(username, password);
+ }
+
+ public void runElasticsearchTestsAsElastic(String elasticPassword) throws Exception {
+ ServerUtils.runElasticsearchTests("elastic", elasticPassword, ServerUtils.getCaCert(installation));
+ }
+
+ public void runElasticsearchTests() throws Exception {
+ ServerUtils.runElasticsearchTests(
+ fileSuperuserForInstallation != null ? fileSuperuserForInstallation.v1() : null,
+ fileSuperuserForInstallation != null ? fileSuperuserForInstallation.v2() : null,
+ ServerUtils.getCaCert(installation)
+ );
+ }
+
+ public String makeRequest(String request) throws Exception {
+ return ServerUtils.makeRequest(
+ Request.Get(request),
+ fileSuperuserForInstallation != null ? fileSuperuserForInstallation.v1() : null,
+ fileSuperuserForInstallation != null ? fileSuperuserForInstallation.v2() : null,
+ ServerUtils.getCaCert(installation)
+ );
+ }
+
+ public String makeRequestAsElastic(String request, String elasticPassword) throws Exception {
+ return ServerUtils.makeRequest(Request.Get(request), "elastic", elasticPassword, ServerUtils.getCaCert(installation));
+ }
+
+ public int makeRequestAsElastic(String elasticPassword) throws Exception {
+ return ServerUtils.makeRequestAndGetStatus(
+ Request.Get("https://localhost:9200"),
+ "elastic",
+ elasticPassword,
+ ServerUtils.getCaCert(installation)
+ );
+ }
+
public static Path getRootTempDir() {
if (distribution().isPackage()) {
// The custom config directory is not under /tmp or /var/tmp because
@@ -470,7 +550,8 @@ public void withCustomConfig(CheckedConsumer action) throws Exc
Path tempConf = tempDir.resolve("elasticsearch");
FileUtils.copyDirectory(installation.config, tempConf);
- Platforms.onLinux(() -> sh.run("chown -R elasticsearch:elasticsearch " + tempDir));
+ // this is what install does
+ sh.chown(tempDir);
if (distribution.isPackage()) {
Files.copy(installation.envFile, tempDir.resolve("elasticsearch.bk"), StandardCopyOption.COPY_ATTRIBUTES);// backup
@@ -479,6 +560,19 @@ public void withCustomConfig(CheckedConsumer action) throws Exc
sh.getEnv().put("ES_PATH_CONF", tempConf.toString());
}
+ // Auto-configuration file paths are absolute so we need to replace them in the config now that we copied them to tempConf
+ // if auto-configuration has happened. Otherwise, the action below is a no-op.
+ Path yml = tempConf.resolve("elasticsearch.yml");
+ List lines;
+ try (Stream allLines = Files.readAllLines(yml).stream()) {
+ lines = allLines.map(l -> {
+ if (l.contains(installation.config.toString())) {
+ return l.replace(installation.config.toString(), tempConf.toString());
+ }
+ return l;
+ }).collect(Collectors.toList());
+ }
+ Files.write(yml, lines, TRUNCATE_EXISTING);
action.accept(tempConf);
if (distribution.isPackage()) {
IOUtils.rm(installation.envFile);
@@ -489,6 +583,17 @@ public void withCustomConfig(CheckedConsumer action) throws Exc
IOUtils.rm(tempDir);
}
+ public void withCustomConfigOwner(String tempOwner, Predicate predicate, CheckedRunnable action)
+ throws Exception {
+ if (predicate.test(installation.distribution.platform)) {
+ sh.chown(installation.config, tempOwner);
+ action.run();
+ sh.chown(installation.config);
+ } else {
+ action.run();
+ }
+ }
+
/**
* Manually set the heap size with a jvm.options.d file. This will be reset before each test.
*/
@@ -549,4 +654,109 @@ public static void assertBusy(CheckedRunnable