From f32d4158d136cae5dbbd94de600dbed96fce88ad Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 19 Feb 2020 16:47:29 -0800 Subject: [PATCH 1/6] Consolidate docker availability build logic Signed-off-by: Mark Vieira --- build.gradle | 1 + .../elasticsearch/gradle/BuildPlugin.groovy | 48 --- .../gradle/DistributionDownloadPlugin.java | 12 +- .../gradle/ElasticsearchDistribution.java | 21 + .../gradle/docker/DockerSupportPlugin.java | 61 +++ .../gradle/docker/DockerSupportService.java | 375 ++++++++++++++++++ .../gradle/test/DistroTestPlugin.java | 140 ++----- .../testclusters/TestClustersPlugin.java | 4 +- .../testfixtures/TestFixturesPlugin.java | 172 ++++---- .../gradle/tool/Boilerplate.java | 4 + .../gradle/tool/DockerUtils.java | 234 ----------- .../elasticsearch.docker-support.properties | 1 + .../DockerSupportServiceTests.java} | 10 +- distribution/docker/build.gradle | 24 +- plugins/repository-hdfs/build.gradle | 2 +- qa/remote-clusters/build.gradle | 38 +- 16 files changed, 629 insertions(+), 518 deletions(-) create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java delete mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/tool/DockerUtils.java create mode 100644 buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.docker-support.properties rename buildSrc/src/test/java/org/elasticsearch/gradle/{test/DistroTestPluginTests.java => docker/DockerSupportServiceTests.java} (89%) diff --git a/build.gradle b/build.gradle index bae423c06ac68..1a10c8946925a 100644 --- a/build.gradle +++ b/build.gradle @@ -34,6 +34,7 @@ import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure plugins { id 'lifecycle-base' + id 'elasticsearch.docker-support' id 'elasticsearch.global-build-info' id "com.diffplug.gradle.spotless" version "3.24.2" apply false } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index ac168bd6fd4e6..8b8c088c86244 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -32,7 +32,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.ErrorReportingTestListener import org.elasticsearch.gradle.testclusters.ElasticsearchCluster import org.elasticsearch.gradle.testclusters.TestClustersPlugin -import org.elasticsearch.gradle.testclusters.TestDistribution import org.elasticsearch.gradle.tool.Boilerplate import org.gradle.api.Action import org.gradle.api.GradleException @@ -83,8 +82,6 @@ import java.nio.charset.StandardCharsets import java.nio.file.Files import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure -import static org.elasticsearch.gradle.tool.DockerUtils.assertDockerIsAvailable -import static org.elasticsearch.gradle.tool.DockerUtils.getDockerPath /** * Encapsulates build configuration for elasticsearch projects. @@ -186,51 +183,6 @@ class BuildPlugin implements Plugin { } } - static void requireDocker(final Task task) { - final Project rootProject = task.project.rootProject - ExtraPropertiesExtension ext = rootProject.extensions.getByType(ExtraPropertiesExtension) - - if (rootProject.hasProperty('requiresDocker') == false) { - /* - * This is our first time encountering a task that requires Docker. We will add an extension that will let us track the tasks - * that register as requiring Docker. We will add a delayed execution that when the task graph is ready if any such tasks are - * in the task graph, then we check two things: - * - the Docker binary is available - * - we can execute a Docker command that requires privileges - * - * If either of these fail, we fail the build. - */ - - // check if the Docker binary exists and record its path - final String dockerBinary = getDockerPath().orElse(null) - - final boolean buildDocker - final String buildDockerProperty = System.getProperty("build.docker") - if (buildDockerProperty == null) { - buildDocker = dockerBinary != null - } else if (buildDockerProperty == "true") { - buildDocker = true - } else if (buildDockerProperty == "false") { - buildDocker = false - } else { - throw new IllegalArgumentException( - "expected build.docker to be unset or one of \"true\" or \"false\" but was [" + buildDockerProperty + "]") - } - - ext.set('buildDocker', buildDocker) - ext.set('requiresDocker', []) - rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph -> - final List tasks = taskGraph.allTasks.intersect(ext.get('requiresDocker') as List).collect { " ${it.path}".toString()} - - if (tasks.isEmpty() == false) { - assertDockerIsAvailable(task.project, tasks) - } - } - } - - (ext.get('requiresDocker') as List).add(task) - } - /** Add a check before gradle execution phase which ensures java home for the given java version is set. */ static void requireJavaHome(Task task, int version) { // use root project for global accounting diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 4916e7847508f..63aedd6566d89 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -22,8 +22,11 @@ import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor; import org.elasticsearch.gradle.ElasticsearchDistribution.Platform; import org.elasticsearch.gradle.ElasticsearchDistribution.Type; +import org.elasticsearch.gradle.docker.DockerSupportPlugin; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.elasticsearch.gradle.info.BuildParams; import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin; +import org.elasticsearch.gradle.tool.Boilerplate; import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; @@ -37,6 +40,7 @@ import org.gradle.api.credentials.HttpHeaderCredentials; import org.gradle.api.file.FileTree; import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.api.provider.Provider; import org.gradle.api.tasks.Sync; import org.gradle.api.tasks.TaskProvider; import org.gradle.authentication.http.HttpHeaderAuthentication; @@ -71,11 +75,17 @@ public class DistributionDownloadPlugin implements Plugin { public void apply(Project project) { // this is needed for isInternal project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); + project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); + + Provider dockerSupport = Boilerplate.getBuildService( + project.getGradle().getSharedServices(), + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ); distributionsContainer = project.container(ElasticsearchDistribution.class, name -> { Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name); Configuration extractedConfiguration = project.getConfigurations().create("es_distro_extracted_" + name); - return new ElasticsearchDistribution(name, project.getObjects(), fileConfiguration, extractedConfiguration); + return new ElasticsearchDistribution(name, project.getObjects(), dockerSupport, fileConfiguration, extractedConfiguration); }); project.getExtensions().add(CONTAINER_NAME, distributionsContainer); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java index ff6b53fa29447..83eaa87850114 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java @@ -19,13 +19,16 @@ package org.elasticsearch.gradle; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.gradle.api.Buildable; import org.gradle.api.artifacts.Configuration; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; +import org.gradle.api.provider.Provider; import org.gradle.api.tasks.TaskDependency; import java.io.File; +import java.util.Collections; import java.util.Iterator; import java.util.Locale; @@ -110,6 +113,7 @@ public String toString() { } private final String name; + private final Provider dockerSupport; // pkg private so plugin can configure final Configuration configuration; private final Extracted extracted; @@ -119,14 +123,17 @@ public String toString() { private final Property platform; private final Property flavor; private final Property bundledJdk; + private final Property required; ElasticsearchDistribution( String name, ObjectFactory objectFactory, + Provider dockerSupport, Configuration fileConfiguration, Configuration extractedConfiguration ) { this.name = name; + this.dockerSupport = dockerSupport; this.configuration = fileConfiguration; this.version = objectFactory.property(String.class).convention(VersionProperties.getElasticsearch()); this.type = objectFactory.property(Type.class); @@ -134,6 +141,7 @@ public String toString() { this.platform = objectFactory.property(Platform.class); this.flavor = objectFactory.property(Flavor.class); this.bundledJdk = objectFactory.property(Boolean.class); + this.required = objectFactory.property(Boolean.class).convention(true); this.extracted = new Extracted(extractedConfiguration); } @@ -182,6 +190,14 @@ public void setBundledJdk(Boolean bundledJdk) { this.bundledJdk.set(bundledJdk); } + public boolean isRequired() { + return this.required.get(); + } + + public void setRequired(boolean required) { + this.required.set(required); + } + @Override public String toString() { return configuration.getSingleFile().toString(); @@ -203,6 +219,11 @@ public Extracted getExtracted() { @Override public TaskDependency getBuildDependencies() { + // For non-required Docker distributions, skip building the distribution is Docker is unavailable + if (getType() == Type.DOCKER && isRequired() == false && dockerSupport.get().getDockerAvailability().isAvailable == false) { + return task -> Collections.emptySet(); + } + return configuration.getBuildDependencies(); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java new file mode 100644 index 0000000000000..cb974aebee3ca --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java @@ -0,0 +1,61 @@ +package org.elasticsearch.gradle.docker; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.api.provider.Provider; + +import java.io.File; +import java.util.List; +import java.util.stream.Collectors; + +/** + *

Plugin providing {@link DockerSupportService} for detecting Docker installations and determining requirements for Docker-based + * Elasticsearch build tasks.

+ * + *

Additionally registers a task graph listener used to assert a compatible Docker installation exists when task requiring Docker are + * scheduled for execution. Tasks may declare a Docker requirement via an extra property. If a compatible Docker installation is not + * available on the build system an exception will be thrown prior to task execution.

+ * + *
+ *     task myDockerTask {
+ *         ext.requiresDocker = true
+ *     }
+ * 
+ */ +public class DockerSupportPlugin implements Plugin { + public static final String DOCKER_SUPPORT_SERVICE_NAME = "dockerSupportService"; + public static final String DOCKER_ON_LINUX_EXCLUSIONS_FILE = ".ci/dockerOnLinuxExclusions"; + public static final String REQUIRES_DOCKER_ATTRIBUTE = "requiresDocker"; + + @Override + public void apply(Project project) { + if (project != project.getRootProject()) { + throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project."); + } + + Provider dockerSupportServiceProvider = project.getGradle() + .getSharedServices() + .registerIfAbsent( + DOCKER_SUPPORT_SERVICE_NAME, + DockerSupportService.class, + spec -> spec.parameters( + params -> { params.setExclusionsFile(new File(project.getRootDir(), DOCKER_ON_LINUX_EXCLUSIONS_FILE)); } + ) + ); + + // Ensure that if any tasks declare they require docker, we assert an available Docker installation exists + project.getGradle().getTaskGraph().whenReady(graph -> { + List dockerTasks = graph.getAllTasks().stream().filter(task -> { + ExtraPropertiesExtension ext = task.getExtensions().getExtraProperties(); + return ext.has(REQUIRES_DOCKER_ATTRIBUTE) && ext.get(REQUIRES_DOCKER_ATTRIBUTE).equals("true"); + }).map(Task::getPath).collect(Collectors.toList()); + + if (dockerTasks.isEmpty() == false) { + dockerSupportServiceProvider.get().assertDockerIsAvailable(dockerTasks); + } + }); + } + +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java new file mode 100644 index 0000000000000..5360b43c4afd2 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java @@ -0,0 +1,375 @@ +package org.elasticsearch.gradle.docker; + +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.info.BuildParams; +import org.gradle.api.GradleException; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.services.BuildService; +import org.gradle.api.services.BuildServiceParameters; +import org.gradle.process.ExecOperations; +import org.gradle.process.ExecResult; + +import javax.inject.Inject; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * Build service for detecting available Docker installation and checking for compatibility with Elasticsearch Docker image build + * requirements. This includes a minimum version requirement, as well as the ability to run privileged commands. + */ +public abstract class DockerSupportService implements BuildService { + + private static Logger LOGGER = Logging.getLogger(DockerSupportService.class); + // Defines the possible locations of the Docker CLI. These will be searched in order. + private static String[] DOCKER_BINARIES = { "/usr/bin/docker", "/usr/local/bin/docker" }; + private static String[] DOCKER_COMPOSE_BINARIES = { "/usr/local/bin/docker-compose", "/usr/bin/docker-compose" }; + private static final Version MINIMUM_DOCKER_VERSION = Version.fromString("17.05.0"); + + private final ExecOperations execOperations; + private DockerAvailability dockerAvailability; + + @Inject + public DockerSupportService(ExecOperations execOperations) { + this.execOperations = execOperations; + } + + /** + * Searches for a functional Docker installation, and returns information about the search. + * + * @return the results of the search. + */ + public DockerAvailability getDockerAvailability() { + if (this.dockerAvailability == null) { + String dockerPath = null; + Result lastResult = null; + Version version = null; + boolean isVersionHighEnough = false; + boolean isComposeAvailable = false; + + // Check if the Docker binary exists + final Optional dockerBinary = getDockerPath(); + if (isBlacklistedOs() == false && dockerBinary.isPresent()) { + dockerPath = dockerBinary.get(); + + // Since we use a multi-stage Docker build, check the Docker version meets minimum requirement + lastResult = runCommand(dockerPath, "version", "--format", "{{.Server.Version}}"); + + if (lastResult.isSuccess()) { + version = Version.fromString(lastResult.stdout.trim(), Version.Mode.RELAXED); + + isVersionHighEnough = version.onOrAfter(MINIMUM_DOCKER_VERSION); + + if (isVersionHighEnough) { + // Check that we can execute a privileged command + lastResult = runCommand(dockerPath, "images"); + + // If docker all checks out, see if docker-compose is available and working + Optional composePath = getDockerComposePath(); + if (lastResult.isSuccess() && composePath.isPresent()) { + isComposeAvailable = runCommand(composePath.get(), "version").isSuccess(); + } + } + } + } + + boolean isAvailable = isVersionHighEnough && lastResult != null && lastResult.isSuccess(); + + this.dockerAvailability = new DockerAvailability( + isAvailable, + isComposeAvailable, + isVersionHighEnough, + dockerPath, + version, + lastResult + ); + } + + return this.dockerAvailability; + } + + /** + * Given a list of tasks that requires Docker, check whether Docker is available, otherwise throw an exception. + * + * @throws GradleException if Docker is not available. The exception message gives the reason. + */ + void assertDockerIsAvailable(List tasks) { + DockerAvailability availability = getDockerAvailability(); + + // Docker installation is available and compatible + if (availability.isAvailable) { + return; + } + + // No Docker binary was located + if (availability.path == null) { + final String message = String.format( + Locale.ROOT, + "Docker (checked [%s]) is required to run the following task%s: \n%s", + String.join(", ", DOCKER_BINARIES), + tasks.size() > 1 ? "s" : "", + String.join("\n", tasks) + ); + throwDockerRequiredException(message); + } + + // Docker binaries were located, but did not meet the minimum version requirement + if (availability.lastCommand.isSuccess() && availability.isVersionHighEnough == false) { + final String message = String.format( + Locale.ROOT, + "building Docker images requires minimum Docker version of %s due to use of multi-stage builds yet was [%s]", + MINIMUM_DOCKER_VERSION, + availability.version + ); + throwDockerRequiredException(message); + } + + // Some other problem, print the error + final String message = String.format( + Locale.ROOT, + "a problem occurred while using Docker from [%s]%s yet it is required to run the following task%s: \n%s\n" + + "the problem is that Docker exited with exit code [%d] with standard error output:\n%s", + availability.path, + availability.version == null ? "" : " v" + availability.version, + tasks.size() > 1 ? "s" : "", + String.join("\n", tasks), + availability.lastCommand.exitCode, + availability.lastCommand.stderr.trim() + ); + throwDockerRequiredException(message); + } + + private boolean isBlacklistedOs() { + // We don't attempt to check the current flavor and version of Linux unless we're + // running in CI, because we don't want to stop people running the Docker tests in + // their own environments if they really want to. + if (BuildParams.isCi() == false) { + return false; + } + + // Only some hosts in CI are configured with Docker. We attempt to work out the OS + // and version, so that we know whether to expect to find Docker. We don't attempt + // to probe for whether Docker is available, because that doesn't tell us whether + // Docker is unavailable when it should be. + final Path osRelease = Paths.get("/etc/os-release"); + + if (Files.exists(osRelease)) { + Map values; + + try { + final List osReleaseLines = Files.readAllLines(osRelease); + values = parseOsRelease(osReleaseLines); + } catch (IOException e) { + throw new GradleException("Failed to read /etc/os-release", e); + } + + final String id = deriveId(values); + final boolean blacklisted = getLinuxExclusionList().contains(id); + + if (blacklisted) { + LOGGER.warn("Linux OS id [{}] is present in the Docker exclude list. Tasks requiring Docker will be disabled.", id); + } + + return blacklisted; + } + + return false; + } + + private List getLinuxExclusionList() { + File exclusionsFile = getParameters().getExclusionsFile(); + + if (exclusionsFile.exists()) { + try { + return Files.readAllLines(exclusionsFile.toPath()) + .stream() + .map(String::trim) + .filter(line -> (line.isEmpty() || line.startsWith("#")) == false) + .collect(Collectors.toList()); + } catch (IOException e) { + throw new GradleException("Failed to read " + exclusionsFile.getAbsolutePath(), e); + } + } else { + return Collections.emptyList(); + } + } + + // visible for testing + static String deriveId(Map values) { + return values.get("ID") + "-" + values.get("VERSION_ID"); + } + + // visible for testing + static Map parseOsRelease(final List osReleaseLines) { + final Map values = new HashMap<>(); + + osReleaseLines.stream().map(String::trim).filter(line -> (line.isEmpty() || line.startsWith("#")) == false).forEach(line -> { + final String[] parts = line.split("=", 2); + final String key = parts[0]; + // remove optional leading and trailing quotes and whitespace + final String value = parts[1].replaceAll("^['\"]?\\s*", "").replaceAll("\\s*['\"]?$", ""); + + values.put(key, value); + }); + + return values; + } + + /** + * Searches the entries in {@link #DOCKER_BINARIES} for the Docker CLI. This method does + * not check whether the Docker installation appears usable, see {@link #getDockerAvailability()} + * instead. + * + * @return the path to a CLI, if available. + */ + private Optional getDockerPath() { + // Check if the Docker binary exists + return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + } + + /** + * Searches the entries in {@link #DOCKER_COMPOSE_BINARIES} for the Docker Compose CLI. This method does + * not check whether the installation appears usable, see {@link #getDockerAvailability()} instead. + * + * @return the path to a CLI, if available. + */ + private Optional getDockerComposePath() { + // Check if the Docker binary exists + return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + } + + private void throwDockerRequiredException(final String message) { + throwDockerRequiredException(message, null); + } + + private void throwDockerRequiredException(final String message, Exception e) { + throw new GradleException( + message + "\nyou can address this by attending to the reported issue, " + "removing the offending tasks from being executed.", + e + ); + } + + /** + * Runs a command and captures the exit code, standard output and standard error. + * + * @param args the command and any arguments to execute + * @return a object that captures the result of running the command. If an exception occurring + * while running the command, or the process was killed after reaching the 10s timeout, + * then the exit code will be -1. + */ + private Result runCommand(String... args) { + if (args.length == 0) { + throw new IllegalArgumentException("Cannot execute with no command"); + } + + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + ByteArrayOutputStream stderr = new ByteArrayOutputStream(); + + final ExecResult execResult = execOperations.exec(spec -> { + // The redundant cast is to silence a compiler warning. + spec.setCommandLine((Object[]) args); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + spec.setIgnoreExitValue(true); + }); + return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString()); + } + + /** + * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability()}}. + */ + public static class DockerAvailability { + /** + * Indicates whether Docker is available and meets the required criteria. + * True if, and only if, Docker is: + *
    + *
  • Installed
  • + *
  • Executable
  • + *
  • Is at least version compatibile with minimum version
  • + *
  • Can execute a command that requires privileges
  • + *
+ */ + public final boolean isAvailable; + + /** + * True if docker-compose is available. + */ + public final boolean isComposeAvailable; + + /** + * True if the installed Docker version is >= 17.05 + */ + public final boolean isVersionHighEnough; + + /** + * The path to the Docker CLI, or null + */ + public final String path; + + /** + * The installed Docker version, or null + */ + public final Version version; + + /** + * Information about the last command executes while probing Docker, or null. + */ + final Result lastCommand; + + DockerAvailability( + boolean isAvailable, + boolean isComposeAvailable, + boolean isVersionHighEnough, + String path, + Version version, + Result lastCommand + ) { + this.isAvailable = isAvailable; + this.isComposeAvailable = isComposeAvailable; + this.isVersionHighEnough = isVersionHighEnough; + this.path = path; + this.version = version; + this.lastCommand = lastCommand; + } + } + + /** + * This class models the result of running a command. It captures the exit code, standard output and standard error. + */ + private static class Result { + final int exitCode; + final String stdout; + final String stderr; + + Result(int exitCode, String stdout, String stderr) { + this.exitCode = exitCode; + this.stdout = stdout; + this.stderr = stderr; + } + + boolean isSuccess() { + return exitCode == 0; + } + + public String toString() { + return "exitCode = [" + exitCode + "] " + "stdout = [" + stdout.trim() + "] " + "stderr = [" + stderr.trim() + "]"; + } + } + + interface Parameters extends BuildServiceParameters { + File getExclusionsFile(); + + void setExclusionsFile(File exclusionsFile); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java index 79ac452307da2..8e4aa01275c91 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java @@ -27,22 +27,21 @@ import org.elasticsearch.gradle.ElasticsearchDistribution.Type; import org.elasticsearch.gradle.Jdk; import org.elasticsearch.gradle.JdkDownloadPlugin; -import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.docker.DockerSupportPlugin; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.elasticsearch.gradle.info.BuildParams; +import org.elasticsearch.gradle.tool.Boilerplate; import org.elasticsearch.gradle.vagrant.BatsProgressLogger; import org.elasticsearch.gradle.vagrant.VagrantBasePlugin; import org.elasticsearch.gradle.vagrant.VagrantExtension; -import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.Directory; -import org.gradle.api.logging.Logger; -import org.gradle.api.logging.Logging; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.provider.Provider; @@ -56,7 +55,6 @@ import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -71,8 +69,6 @@ import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath; public class DistroTestPlugin implements Plugin { - private static final Logger logger = Logging.getLogger(DistroTestPlugin.class); - private static final String SYSTEM_JDK_VERSION = "11.0.2+9"; private static final String SYSTEM_JDK_VENDOR = "openjdk"; private static final String GRADLE_JDK_VERSION = "13.0.1+9@cec27d702aa74d5a8630c65ae61e4305"; @@ -90,11 +86,15 @@ public class DistroTestPlugin implements Plugin { @Override public void apply(Project project) { - final boolean runDockerTests = shouldRunDockerTests(project); - + project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); project.getPluginManager().apply(DistributionDownloadPlugin.class); project.getPluginManager().apply("elasticsearch.build"); + Provider dockerSupport = Boilerplate.getBuildService( + project.getGradle().getSharedServices(), + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ); + // TODO: it would be useful to also have the SYSTEM_JAVA_HOME setup in the root project, so that running from GCP only needs // a java for gradle to run, and the tests are self sufficient and consistent with the java they use @@ -103,17 +103,15 @@ public void apply(Project project) { Provider upgradeDir = project.getLayout().getBuildDirectory().dir("packaging/upgrade"); Provider pluginsDir = project.getLayout().getBuildDirectory().dir("packaging/plugins"); - List distributions = configureDistributions(project, upgradeVersion, runDockerTests); + List distributions = configureDistributions(project, upgradeVersion); TaskProvider copyDistributionsTask = configureCopyDistributionsTask(project, distributionsDir); TaskProvider copyUpgradeTask = configureCopyUpgradeTask(project, upgradeVersion, upgradeDir); TaskProvider copyPluginsTask = configureCopyPluginsTask(project, pluginsDir); TaskProvider destructiveDistroTest = project.getTasks().register("destructiveDistroTest"); for (ElasticsearchDistribution distribution : distributions) { - if (distribution.getType() != Type.DOCKER || runDockerTests) { - TaskProvider destructiveTask = configureDistroTest(project, distribution); - destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask)); - } + TaskProvider destructiveTask = configureDistroTest(project, distribution, dockerSupport); + destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask)); } Map> batsTests = new HashMap<>(); configureBatsTest(project, "plugins", distributionsDir, copyDistributionsTask, copyPluginsTask).configure( @@ -324,8 +322,14 @@ private static TaskProvider configureVMWrapperTask( }); } - private static TaskProvider configureDistroTest(Project project, ElasticsearchDistribution distribution) { + private static TaskProvider configureDistroTest( + Project project, + ElasticsearchDistribution distribution, + Provider dockerSupport + ) { return project.getTasks().register(destructiveDistroTestTaskName(distribution), Test.class, t -> { + // Disable Docker distribution tests unless a Docker installation is available + t.onlyIf(t2 -> distribution.getType() != Type.DOCKER || dockerSupport.get().getDockerAvailability().isAvailable); t.getOutputs().doNotCacheIf("Build cache is disabled for packaging tests", Specs.satisfyAll()); t.setMaxParallelForks(1); t.setWorkingDir(project.getProjectDir()); @@ -354,7 +358,7 @@ private static TaskProvider configureBatsTest( }); } - private List configureDistributions(Project project, Version upgradeVersion, boolean runDockerTests) { + private List configureDistributions(Project project, Version upgradeVersion) { NamedDomainObjectContainer distributions = DistributionDownloadPlugin.getContainer(project); List currentDistros = new ArrayList<>(); List upgradeDistros = new ArrayList<>(); @@ -363,7 +367,7 @@ private List configureDistributions(Project project, for (Flavor flavor : Flavor.values()) { for (boolean bundledJdk : Arrays.asList(true, false)) { // All our Docker images include a bundled JDK so it doesn't make sense to test without one - boolean skip = type == Type.DOCKER && (runDockerTests == false || bundledJdk == false); + boolean skip = type == Type.DOCKER && bundledJdk == false; if (skip == false) { addDistro(distributions, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch(), currentDistros); @@ -430,7 +434,6 @@ private static void addDistro( String version, List container ) { - String name = distroId(type, platform, flavor, bundledJdk) + "-" + version; if (distributions.findByName(name) != null) { return; @@ -444,6 +447,13 @@ private static void addDistro( d.setBundledJdk(bundledJdk); d.setVersion(version); }); + + // Allow us to gracefully omit building Docker distributions if Docker is not available on the system. + // In such a case as we can't build the Docker images we'll simply skip the corresponding tests. + if (type == Type.DOCKER) { + distro.setRequired(false); + } + container.add(distro); } @@ -460,98 +470,4 @@ private static String destructiveDistroTestTaskName(ElasticsearchDistribution di Type type = distro.getType(); return "destructiveDistroTest." + distroId(type, distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk()); } - - static Map parseOsRelease(final List osReleaseLines) { - final Map values = new HashMap<>(); - - osReleaseLines.stream().map(String::trim).filter(line -> (line.isEmpty() || line.startsWith("#")) == false).forEach(line -> { - final String[] parts = line.split("=", 2); - final String key = parts[0]; - // remove optional leading and trailing quotes and whitespace - final String value = parts[1].replaceAll("^['\"]?\\s*", "").replaceAll("\\s*['\"]?$", ""); - - values.put(key, value); - }); - - return values; - } - - static String deriveId(final Map osRelease) { - return osRelease.get("ID") + "-" + osRelease.get("VERSION_ID"); - } - - private static List getLinuxExclusionList(Project project) { - final String exclusionsFilename = "dockerOnLinuxExclusions"; - final Path exclusionsPath = project.getRootDir().toPath().resolve(Path.of(".ci", exclusionsFilename)); - - try { - return Files.readAllLines(exclusionsPath) - .stream() - .map(String::trim) - .filter(line -> (line.isEmpty() || line.startsWith("#")) == false) - .collect(Collectors.toList()); - } catch (IOException e) { - throw new GradleException("Failed to read .ci/" + exclusionsFilename, e); - } - } - - /** - * The {@link DistroTestPlugin} generates a number of test tasks, some - * of which are Docker packaging tests. When running on the host OS or in CI - * i.e. not in a Vagrant VM, only certain operating systems are supported. This - * method determines whether the Docker tests should be run on the host - * OS. Essentially, unless an OS and version is specifically excluded, we expect - * to be able to run Docker and test the Docker images. - */ - private static boolean shouldRunDockerTests(Project project) { - switch (OS.current()) { - case WINDOWS: - // Not yet supported. - return false; - - case MAC: - // Assume that Docker for Mac is installed, since Docker is part of the dev workflow. - return true; - - case LINUX: - // We don't attempt to check the current flavor and version of Linux unless we're - // running in CI, because we don't want to stop people running the Docker tests in - // their own environments if they really want to. - if (BuildParams.isCi() == false) { - return true; - } - - // Only some hosts in CI are configured with Docker. We attempt to work out the OS - // and version, so that we know whether to expect to find Docker. We don't attempt - // to probe for whether Docker is available, because that doesn't tell us whether - // Docker is unavailable when it should be. - final Path osRelease = Paths.get("/etc/os-release"); - - if (Files.exists(osRelease)) { - Map values; - - try { - final List osReleaseLines = Files.readAllLines(osRelease); - values = parseOsRelease(osReleaseLines); - } catch (IOException e) { - throw new GradleException("Failed to read /etc/os-release", e); - } - - final String id = deriveId(values); - - final boolean shouldExclude = getLinuxExclusionList(project).contains(id); - - logger.warn("Linux OS id [" + id + "] is " + (shouldExclude ? "" : "not ") + "present in the Docker exclude list"); - - return shouldExclude == false; - } - - logger.warn("/etc/os-release does not exist!"); - return false; - - default: - logger.warn("Unknown OS [" + OS.current() + "], answering false to shouldRunDockerTests()"); - return false; - } - } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 4e0655f4e2877..ebdf24d0a318f 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -36,6 +36,8 @@ import java.io.File; +import static org.elasticsearch.gradle.tool.Boilerplate.noop; + public class TestClustersPlugin implements Plugin { public static final String EXTENSION_NAME = "testClusters"; @@ -59,7 +61,7 @@ public void apply(Project project) { createListClustersTask(project, container); // register cluster registry as a global build service - project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, spec -> {}); + project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); // register throttle so we only run at most max-workers/2 nodes concurrently project.getGradle() diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 52486844c5deb..faa1f6aca623f 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -21,20 +21,27 @@ import com.avast.gradle.dockercompose.ComposeExtension; import com.avast.gradle.dockercompose.DockerComposePlugin; import com.avast.gradle.dockercompose.ServiceInfo; +import com.avast.gradle.dockercompose.tasks.ComposeDown; +import com.avast.gradle.dockercompose.tasks.ComposePull; import com.avast.gradle.dockercompose.tasks.ComposeUp; -import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider; +import org.elasticsearch.gradle.docker.DockerSupportPlugin; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.elasticsearch.gradle.info.BuildParams; import org.elasticsearch.gradle.precommit.TestingConventionsTasks; +import org.elasticsearch.gradle.tool.Boilerplate; import org.gradle.api.Action; import org.gradle.api.DefaultTask; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.provider.Provider; import org.gradle.api.tasks.TaskContainer; +import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.testing.Test; import java.io.File; @@ -46,75 +53,76 @@ public class TestFixturesPlugin implements Plugin { + private static final Logger LOGGER = Logging.getLogger(TestFixturesPlugin.class); private static final String DOCKER_COMPOSE_THROTTLE = "dockerComposeThrottle"; static final String DOCKER_COMPOSE_YML = "docker-compose.yml"; @Override public void apply(Project project) { - TaskContainer tasks = project.getTasks(); + project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); + TaskContainer tasks = project.getTasks(); TestFixtureExtension extension = project.getExtensions().create("testFixtures", TestFixtureExtension.class, project); Provider dockerComposeThrottle = project.getGradle() .getSharedServices() .registerIfAbsent(DOCKER_COMPOSE_THROTTLE, DockerComposeThrottle.class, spec -> spec.getMaxParallelUsages().set(1)); + Provider dockerSupport = Boilerplate.getBuildService( + project.getGradle().getSharedServices(), + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ); + ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class); File testfixturesDir = project.file("testfixtures_shared"); ext.set("testFixturesDir", testfixturesDir); if (project.file(DOCKER_COMPOSE_YML).exists()) { - Task buildFixture = project.getTasks().create("buildFixture"); - Task pullFixture = project.getTasks().create("pullFixture"); - Task preProcessFixture = project.getTasks().create("preProcessFixture"); - preProcessFixture.doFirst((task) -> { - try { - Files.createDirectories(testfixturesDir.toPath()); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }); - preProcessFixture.getOutputs().dir(testfixturesDir); - buildFixture.dependsOn(preProcessFixture); - pullFixture.dependsOn(preProcessFixture); - Task postProcessFixture = project.getTasks().create("postProcessFixture"); - postProcessFixture.dependsOn(buildFixture); - preProcessFixture.onlyIf(spec -> buildFixture.getEnabled()); - postProcessFixture.onlyIf(spec -> buildFixture.getEnabled()); - - if (dockerComposeSupported() == false) { - preProcessFixture.setEnabled(false); - postProcessFixture.setEnabled(false); - buildFixture.setEnabled(false); - pullFixture.setEnabled(false); - } else { - project.getPluginManager().apply(BasePlugin.class); - project.getPluginManager().apply(DockerComposePlugin.class); - ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class); - composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML)); - composeExtension.setRemoveContainers(true); - composeExtension.setExecutable( - project.file("/usr/local/bin/docker-compose").exists() ? "/usr/local/bin/docker-compose" : "/usr/bin/docker-compose" - ); - - buildFixture.dependsOn(tasks.named("composeUp")); - pullFixture.dependsOn(tasks.named("composePull")); - tasks.named("composeUp").configure(t -> { - // Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions - if (BuildParams.isCi()) { - t.usesService(dockerComposeThrottle); + project.getPluginManager().apply(BasePlugin.class); + project.getPluginManager().apply(DockerComposePlugin.class); + + TaskProvider preProcessFixture = project.getTasks().register("preProcessFixture", t -> { + t.getOutputs().dir(testfixturesDir); + t.doFirst(t2 -> { + try { + Files.createDirectories(testfixturesDir.toPath()); + } catch (IOException e) { + throw new UncheckedIOException(e); } - t.mustRunAfter(preProcessFixture); }); - tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture)); - tasks.named("composeDown").configure(t -> t.doLast(t2 -> project.delete(testfixturesDir))); + }); + TaskProvider buildFixture = project.getTasks() + .register("buildFixture", t -> t.dependsOn(preProcessFixture, tasks.named("composeUp"))); + TaskProvider postProcessFixture = project.getTasks().register("postProcessFixture", task -> { + task.dependsOn(buildFixture); configureServiceInfoForTask( - postProcessFixture, + task, project, false, - (name, port) -> postProcessFixture.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port) + (name, port) -> task.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port) ); - } + }); + + maybeSkipTask(dockerSupport, preProcessFixture); + maybeSkipTask(dockerSupport, postProcessFixture); + maybeSkipTask(dockerSupport, buildFixture); + + ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class); + composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML)); + composeExtension.setRemoveContainers(true); + composeExtension.setExecutable( + project.file("/usr/local/bin/docker-compose").exists() ? "/usr/local/bin/docker-compose" : "/usr/bin/docker-compose" + ); + + tasks.named("composeUp").configure(t -> { + // Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions + if (BuildParams.isCi()) { + t.usesService(dockerComposeThrottle); + } + t.mustRunAfter(preProcessFixture); + }); + tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture)); + tasks.named("composeDown").configure(t -> t.doLast(t2 -> project.delete(testfixturesDir))); } else { project.afterEvaluate(spec -> { if (extension.fixtures.isEmpty()) { @@ -129,43 +137,43 @@ public void apply(Project project) { extension.fixtures.matching(fixtureProject -> fixtureProject.equals(project) == false) .all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath())); - conditionTaskByType(tasks, extension, Test.class); - conditionTaskByType(tasks, extension, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask")); - conditionTaskByType(tasks, extension, TestingConventionsTasks.class); - conditionTaskByType(tasks, extension, ComposeUp.class); - - if (dockerComposeSupported() == false) { - project.getLogger() - .info( - "Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose " - + "but none could be found so these will be skipped", - project.getPath() - ); - return; - } - - tasks.withType(Test.class, task -> extension.fixtures.all(fixtureProject -> { - fixtureProject.getTasks().matching(it -> it.getName().equals("buildFixture")).all(task::dependsOn); - fixtureProject.getTasks().matching(it -> it.getName().equals("composeDown")).all(task::finalizedBy); + // Skip docker compose tasks if it is unavailable + maybeSkipTasks(tasks, dockerSupport, Test.class); + maybeSkipTasks(tasks, dockerSupport, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask")); + maybeSkipTasks(tasks, dockerSupport, TestingConventionsTasks.class); + maybeSkipTasks(tasks, dockerSupport, ComposeUp.class); + maybeSkipTasks(tasks, dockerSupport, ComposePull.class); + maybeSkipTasks(tasks, dockerSupport, ComposeDown.class); + + tasks.withType(Test.class).configureEach(task -> extension.fixtures.all(fixtureProject -> { + task.dependsOn(fixtureProject.getTasks().named("postProcessFixture")); + task.finalizedBy(fixtureProject.getTasks().named("composeDown")); configureServiceInfoForTask( task, fixtureProject, true, (name, host) -> task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host) ); - task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture")); })); } - private void conditionTaskByType(TaskContainer tasks, TestFixtureExtension extension, Class taskClass) { - tasks.withType(taskClass) - .configureEach( - task -> task.onlyIf( - spec -> extension.fixtures.stream() - .anyMatch(fixtureProject -> fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false) == false - ) - ); + private void maybeSkipTasks(TaskContainer tasks, Provider dockerSupport, Class taskClass) { + tasks.withType(taskClass).configureEach(t -> maybeSkipTask(dockerSupport, t)); + } + + private void maybeSkipTask(Provider dockerSupport, TaskProvider task) { + task.configure(t -> maybeSkipTask(dockerSupport, t)); + } + + private void maybeSkipTask(Provider dockerSupport, Task task) { + task.onlyIf(spec -> { + boolean isComposeAvailable = dockerSupport.get().getDockerAvailability().isComposeAvailable; + if (isComposeAvailable == false) { + LOGGER.info("Task {} requires docker-compose but it is unavailable. Task will be skipped.", task.getPath()); + } + return isComposeAvailable; + }); } private void configureServiceInfoForTask( @@ -176,10 +184,11 @@ private void configureServiceInfoForTask( ) { // Configure ports for the tests as system properties. // We only know these at execution time so we need to do it in doFirst - TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class); task.doFirst(new Action() { @Override public void execute(Task theTask) { + TestFixtureExtension extension = theTask.getProject().getExtensions().getByType(TestFixtureExtension.class); + fixtureProject.getExtensions() .getByType(ComposeExtension.class) .getServicesInfos() @@ -204,19 +213,6 @@ public void execute(Task theTask) { }); } - public static boolean dockerComposeSupported() { - if (OS.current().equals(OS.WINDOWS)) { - return false; - } - final boolean hasDockerCompose = (new File("/usr/local/bin/docker-compose")).exists() - || (new File("/usr/bin/docker-compose").exists()); - return hasDockerCompose && Boolean.parseBoolean(System.getProperty("tests.fixture.enabled", "true")); - } - - private void disableTaskByType(TaskContainer tasks, Class type) { - tasks.withType(type, task -> task.setEnabled(false)); - } - @SuppressWarnings("unchecked") private Class getTaskClass(String type) { Class aClass; diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java index 3c1c44cc1eadb..85002b9c49821 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java @@ -38,6 +38,10 @@ public abstract class Boilerplate { + public static Action noop() { + return t -> {}; + } + public static SourceSetContainer getJavaSourceSets(Project project) { return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/DockerUtils.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/DockerUtils.java deleted file mode 100644 index af2001996513a..0000000000000 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/DockerUtils.java +++ /dev/null @@ -1,234 +0,0 @@ -package org.elasticsearch.gradle.tool; - -import org.elasticsearch.gradle.Version; -import org.gradle.api.GradleException; -import org.gradle.api.Project; -import org.gradle.process.ExecResult; - -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.util.List; -import java.util.Locale; -import java.util.Optional; - -/** - * Contains utilities for checking whether Docker is installed, is executable, - * has a recent enough version, and appears to be functional. The Elasticsearch build - * requires Docker >= 17.05 as it uses a multi-stage build. - */ -public class DockerUtils { - /** - * Defines the possible locations of the Docker CLI. These will be searched in order. - */ - private static String[] DOCKER_BINARIES = { "/usr/bin/docker", "/usr/local/bin/docker" }; - - /** - * Searches the entries in {@link #DOCKER_BINARIES} for the Docker CLI. This method does - * not check whether the Docker installation appears usable, see {@link #getDockerAvailability(Project)} - * instead. - * - * @return the path to a CLI, if available. - */ - public static Optional getDockerPath() { - // Check if the Docker binary exists - return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); - } - - /** - * Searches for a functional Docker installation, and returns information about the search. - * @return the results of the search. - */ - private static DockerAvailability getDockerAvailability(Project project) { - String dockerPath = null; - Result lastResult = null; - Version version = null; - boolean isVersionHighEnough = false; - - // Check if the Docker binary exists - final Optional dockerBinary = getDockerPath(); - - if (dockerBinary.isPresent()) { - dockerPath = dockerBinary.get(); - - // Since we use a multi-stage Docker build, check the Docker version since 17.05 - lastResult = runCommand(project, dockerPath, "version", "--format", "{{.Server.Version}}"); - - if (lastResult.isSuccess()) { - version = Version.fromString(lastResult.stdout.trim(), Version.Mode.RELAXED); - - isVersionHighEnough = version.onOrAfter("17.05.0"); - - if (isVersionHighEnough) { - // Check that we can execute a privileged command - lastResult = runCommand(project, dockerPath, "images"); - } - } - } - - boolean isAvailable = isVersionHighEnough && lastResult.isSuccess(); - - return new DockerAvailability(isAvailable, isVersionHighEnough, dockerPath, version, lastResult); - } - - /** - * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability(Project)}}. - */ - private static class DockerAvailability { - /** - * Indicates whether Docker is available and meets the required criteria. - * True if, and only if, Docker is: - *
    - *
  • Installed
  • - *
  • Executable
  • - *
  • Is at least version 17.05
  • - *
  • Can execute a command that requires privileges
  • - *
- */ - final boolean isAvailable; - - /** - * True if the installed Docker version is >= 17.05 - */ - final boolean isVersionHighEnough; - - /** - * The path to the Docker CLI, or null - */ - public final String path; - - /** - * The installed Docker version, or null - */ - public final Version version; - - /** - * Information about the last command executes while probing Docker, or null. - */ - final Result lastCommand; - - DockerAvailability(boolean isAvailable, boolean isVersionHighEnough, String path, Version version, Result lastCommand) { - this.isAvailable = isAvailable; - this.isVersionHighEnough = isVersionHighEnough; - this.path = path; - this.version = version; - this.lastCommand = lastCommand; - } - } - - /** - * Given a list of tasks that requires Docker, check whether Docker is available, otherwise - * throw an exception. - * @param project a Gradle project - * @param tasks the tasks that require Docker - * @throws GradleException if Docker is not available. The exception message gives the reason. - */ - public static void assertDockerIsAvailable(Project project, List tasks) { - DockerAvailability availability = getDockerAvailability(project); - - if (availability.isAvailable) { - return; - } - - /* - * There are tasks in the task graph that require Docker. - * Now we are failing because either the Docker binary does - * not exist or because execution of a privileged Docker - * command failed. - */ - if (availability.path == null) { - final String message = String.format( - Locale.ROOT, - "Docker (checked [%s]) is required to run the following task%s: \n%s", - String.join(", ", DOCKER_BINARIES), - tasks.size() > 1 ? "s" : "", - String.join("\n", tasks) - ); - throwDockerRequiredException(message); - } - - if (availability.lastCommand.isSuccess() && availability.isVersionHighEnough == false) { - final String message = String.format( - Locale.ROOT, - "building Docker images requires Docker version 17.05+ due to use of multi-stage builds yet was [%s]", - availability.version - ); - throwDockerRequiredException(message); - } - - // Some other problem, print the error - final String message = String.format( - Locale.ROOT, - "a problem occurred while using Docker from [%s]%s yet it is required to run the following task%s: \n%s\n" - + "the problem is that Docker exited with exit code [%d] with standard error output:\n%s", - availability.path, - availability.version == null ? "" : " v" + availability.version, - tasks.size() > 1 ? "s" : "", - String.join("\n", tasks), - availability.lastCommand.exitCode, - availability.lastCommand.stderr.trim() - ); - throwDockerRequiredException(message); - } - - private static void throwDockerRequiredException(final String message) { - throwDockerRequiredException(message, null); - } - - private static void throwDockerRequiredException(final String message, Exception e) { - throw new GradleException( - message - + "\nyou can address this by attending to the reported issue, " - + "removing the offending tasks from being executed, " - + "or by passing -Dbuild.docker=false", - e - ); - } - - /** - * Runs a command and captures the exit code, standard output and standard error. - * @param args the command and any arguments to execute - * @return a object that captures the result of running the command. If an exception occurring - * while running the command, or the process was killed after reaching the 10s timeout, - * then the exit code will be -1. - */ - private static Result runCommand(Project project, String... args) { - if (args.length == 0) { - throw new IllegalArgumentException("Cannot execute with no command"); - } - - ByteArrayOutputStream stdout = new ByteArrayOutputStream(); - ByteArrayOutputStream stderr = new ByteArrayOutputStream(); - - final ExecResult execResult = project.exec(spec -> { - // The redundant cast is to silence a compiler warning. - spec.setCommandLine((Object[]) args); - spec.setStandardOutput(stdout); - spec.setErrorOutput(stderr); - spec.setIgnoreExitValue(true); - }); - return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString()); - } - - /** - * This class models the result of running a command. It captures the exit code, standard output and standard error. - */ - private static class Result { - final int exitCode; - final String stdout; - final String stderr; - - Result(int exitCode, String stdout, String stderr) { - this.exitCode = exitCode; - this.stdout = stdout; - this.stderr = stderr; - } - - boolean isSuccess() { - return exitCode == 0; - } - - public String toString() { - return "exitCode = [" + exitCode + "] " + "stdout = [" + stdout.trim() + "] " + "stderr = [" + stderr.trim() + "]"; - } - } -} diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.docker-support.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.docker-support.properties new file mode 100644 index 0000000000000..fec4e97bf67a5 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.docker-support.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.docker.DockerSupportPlugin diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/DistroTestPluginTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java similarity index 89% rename from buildSrc/src/test/java/org/elasticsearch/gradle/test/DistroTestPluginTests.java rename to buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java index 96b6208be7205..cba7590701154 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/test/DistroTestPluginTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java @@ -1,14 +1,16 @@ -package org.elasticsearch.gradle.test; +package org.elasticsearch.gradle.docker; + +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.elasticsearch.gradle.test.DistroTestPlugin.deriveId; -import static org.elasticsearch.gradle.test.DistroTestPlugin.parseOsRelease; +import static org.elasticsearch.gradle.docker.DockerSupportService.deriveId; +import static org.elasticsearch.gradle.docker.DockerSupportService.parseOsRelease; import static org.hamcrest.CoreMatchers.equalTo; -public class DistroTestPluginTests extends GradleIntegrationTestCase { +public class DockerSupportServiceTests extends GradleIntegrationTestCase { public void testParseOsReleaseOnOracle() { final List lines = List.of( diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index f9d12ce242da8..efaee7aa05071 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -1,4 +1,4 @@ -import org.elasticsearch.gradle.BuildPlugin +import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.info.BuildParams @@ -6,6 +6,7 @@ import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.distribution-download' testFixtures.useFixture() @@ -105,10 +106,19 @@ task copyKeystore(type: Sync) { } } -preProcessFixture { - if (TestFixturesPlugin.dockerComposeSupported()) { - dependsOn assemble +elasticsearch_distributions { + Flavor.values().each { distroFlavor -> + "docker_$distroFlavor" { + flavor = distroFlavor + type = 'docker' + version = VersionProperties.getElasticsearch() + required = false // This ensures we skip this testing if Docker is unavailable + } } +} + +preProcessFixture { + dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss dependsOn copyKeystore doLast { // tests expect to have an empty repo @@ -140,16 +150,13 @@ task integTest(type: Test) { outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } maxParallelForks = '1' include '**/*IT.class' - // don't add the tasks to build the docker images if we have no way of testing them - if (TestFixturesPlugin.dockerComposeSupported()) { - dependsOn assemble - } } check.dependsOn integTest void addBuildDockerImage(final boolean oss) { final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: LoggedExec) { + ext.requiresDocker = true // mark this task as requiring docker to execute inputs.files(tasks.named(taskName("copy", oss, "DockerContext"))) List tags if (oss) { @@ -179,7 +186,6 @@ void addBuildDockerImage(final boolean oss) { } } assemble.dependsOn(buildDockerImageTask) - BuildPlugin.requireDocker(buildDockerImageTask) } for (final boolean oss : [false, true]) { diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 672b84f5912c2..3e32a92366804 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -97,7 +97,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', executable = "${BuildParams.runtimeJavaHome}/bin/java" env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}" maxWaitInSeconds 60 - onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && BuildParams.inFipsJvm == false } + onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.state.executed && BuildParams.inFipsJvm == false } waitCondition = { fixture, ant -> // the hdfs.MiniHDFS fixture writes the ports file when // it's ready, so we can just wait for the file to exist diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index f3027a0d5b91b..04021f337882b 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -16,10 +16,12 @@ * specific language governing permissions and limitations * under the License. */ +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.distribution-download' testFixtures.useFixture() @@ -42,15 +44,17 @@ task copyKeystore(type: Sync) { } } -preProcessFixture { - if (TestFixturesPlugin.dockerComposeSupported()) { - if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) { - dependsOn ":distribution:docker:buildDockerImage" - } else { - dependsOn ":distribution:docker:buildOssDockerImage" - } +elasticsearch_distributions { + docker { + type = 'docker' + flavor = System.getProperty('tests.distribution', 'default') + version = VersionProperties.getElasticsearch() + required = false // This ensures we skip this testing if Docker is unavailable } - dependsOn copyKeystore +} + +preProcessFixture { + dependsOn copyKeystore, elasticsearch_distributions.docker doLast { // tests expect to have an empty repo project.delete( @@ -68,14 +72,12 @@ preProcessFixture { } } -if (TestFixturesPlugin.dockerComposeSupported()) { - dockerCompose { - tcpPortsToIgnoreWhenWaiting = [9600, 9601] - if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) { - useComposeFiles = ['docker-compose.yml'] - } else { - useComposeFiles = ['docker-compose-oss.yml'] - } +dockerCompose { + tcpPortsToIgnoreWhenWaiting = [9600, 9601] + if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) { + useComposeFiles = ['docker-compose.yml'] + } else { + useComposeFiles = ['docker-compose-oss.yml'] } } @@ -100,10 +102,6 @@ task integTest(type: Test) { outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } maxParallelForks = '1' include '**/*IT.class' - // don't add the tasks to build the docker images if we have no way of testing them - if (TestFixturesPlugin.dockerComposeSupported()) { - dependsOn ":distribution:docker:buildDockerImage" - } } check.dependsOn integTest From 08c44c7b6afb8268cfabfa9e128494ab83bc4cab Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 20 Feb 2020 10:03:10 -0800 Subject: [PATCH 2/6] Address feedback Signed-off-by: Mark Vieira --- .../gradle/docker/DockerSupportService.java | 10 +++++----- distribution/docker/build.gradle | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java index 5360b43c4afd2..a83431797f683 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java @@ -60,7 +60,7 @@ public DockerAvailability getDockerAvailability() { // Check if the Docker binary exists final Optional dockerBinary = getDockerPath(); - if (isBlacklistedOs() == false && dockerBinary.isPresent()) { + if (isExcludedOs() == false && dockerBinary.isPresent()) { dockerPath = dockerBinary.get(); // Since we use a multi-stage Docker build, check the Docker version meets minimum requirement @@ -150,7 +150,7 @@ void assertDockerIsAvailable(List tasks) { throwDockerRequiredException(message); } - private boolean isBlacklistedOs() { + private boolean isExcludedOs() { // We don't attempt to check the current flavor and version of Linux unless we're // running in CI, because we don't want to stop people running the Docker tests in // their own environments if they really want to. @@ -175,13 +175,13 @@ private boolean isBlacklistedOs() { } final String id = deriveId(values); - final boolean blacklisted = getLinuxExclusionList().contains(id); + final boolean excluded = getLinuxExclusionList().contains(id); - if (blacklisted) { + if (excluded) { LOGGER.warn("Linux OS id [{}] is present in the Docker exclude list. Tasks requiring Docker will be disabled.", id); } - return blacklisted; + return excluded; } return false; diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index efaee7aa05071..2d263bf361e1f 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -112,7 +112,7 @@ elasticsearch_distributions { flavor = distroFlavor type = 'docker' version = VersionProperties.getElasticsearch() - required = false // This ensures we skip this testing if Docker is unavailable + required = false // This ensures we don't attempt to build images if docker is unavailable } } } From 4a3cf2d451fa1c485393616db11de37b787afe66 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 20 Feb 2020 10:54:06 -0800 Subject: [PATCH 3/6] Address feedback Signed-off-by: Mark Vieira --- .../gradle/ElasticsearchDistribution.java | 25 +++++++++++++------ .../gradle/docker/DockerSupportPlugin.java | 14 +++++------ .../gradle/docker/DockerSupportService.java | 4 +-- .../gradle/test/DistroTestPlugin.java | 6 +++-- distribution/docker/build.gradle | 2 +- qa/remote-clusters/build.gradle | 2 +- 6 files changed, 33 insertions(+), 20 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java index 83eaa87850114..0b05ec79c6b93 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java @@ -123,7 +123,7 @@ public String toString() { private final Property platform; private final Property flavor; private final Property bundledJdk; - private final Property required; + private final Property failIfUnavailable; ElasticsearchDistribution( String name, @@ -141,7 +141,7 @@ public String toString() { this.platform = objectFactory.property(Platform.class); this.flavor = objectFactory.property(Flavor.class); this.bundledJdk = objectFactory.property(Boolean.class); - this.required = objectFactory.property(Boolean.class).convention(true); + this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true); this.extracted = new Extracted(extractedConfiguration); } @@ -190,12 +190,12 @@ public void setBundledJdk(Boolean bundledJdk) { this.bundledJdk.set(bundledJdk); } - public boolean isRequired() { - return this.required.get(); + public boolean getFailIfUnavailable() { + return this.failIfUnavailable.get(); } - public void setRequired(boolean required) { - this.required.set(required); + public void setFailIfUnavailable(boolean failIfUnavailable) { + this.failIfUnavailable.set(failIfUnavailable); } @Override @@ -220,7 +220,7 @@ public Extracted getExtracted() { @Override public TaskDependency getBuildDependencies() { // For non-required Docker distributions, skip building the distribution is Docker is unavailable - if (getType() == Type.DOCKER && isRequired() == false && dockerSupport.get().getDockerAvailability().isAvailable == false) { + if (getType() == Type.DOCKER && getFailIfUnavailable() == false && dockerSupport.get().getDockerAvailability().isAvailable == false) { return task -> Collections.emptySet(); } @@ -259,6 +259,12 @@ void finalizeValues() { return; } + if (getType() != Type.DOCKER && failIfUnavailable.get() == false) { + throw new IllegalArgumentException( + "failIfUnavailable not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]" + ); + } + if (getType() == Type.ARCHIVE) { // defaults for archive, set here instead of via convention so integ-test-zip can verify they are not set if (platform.isPresent() == false) { @@ -270,6 +276,11 @@ void finalizeValues() { "platform not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]" ); } + if (getType() == Type.DOCKER && bundledJdk.isPresent()) { + throw new IllegalArgumentException( + "bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [docker]" + ); + } } if (flavor.isPresent() == false) { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java index cb974aebee3ca..5c105df6d1635 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java @@ -11,12 +11,12 @@ import java.util.stream.Collectors; /** - *

Plugin providing {@link DockerSupportService} for detecting Docker installations and determining requirements for Docker-based - * Elasticsearch build tasks.

- * - *

Additionally registers a task graph listener used to assert a compatible Docker installation exists when task requiring Docker are + * Plugin providing {@link DockerSupportService} for detecting Docker installations and determining requirements for Docker-based + * Elasticsearch build tasks. + *

+ * Additionally registers a task graph listener used to assert a compatible Docker installation exists when task requiring Docker are * scheduled for execution. Tasks may declare a Docker requirement via an extra property. If a compatible Docker installation is not - * available on the build system an exception will be thrown prior to task execution.

+ * available on the build system an exception will be thrown prior to task execution. * *
  *     task myDockerTask {
@@ -49,11 +49,11 @@ public void apply(Project project) {
         project.getGradle().getTaskGraph().whenReady(graph -> {
             List dockerTasks = graph.getAllTasks().stream().filter(task -> {
                 ExtraPropertiesExtension ext = task.getExtensions().getExtraProperties();
-                return ext.has(REQUIRES_DOCKER_ATTRIBUTE) && ext.get(REQUIRES_DOCKER_ATTRIBUTE).equals("true");
+                return ext.has(REQUIRES_DOCKER_ATTRIBUTE) && (boolean) ext.get(REQUIRES_DOCKER_ATTRIBUTE);
             }).map(Task::getPath).collect(Collectors.toList());
 
             if (dockerTasks.isEmpty() == false) {
-                dockerSupportServiceProvider.get().assertDockerIsAvailable(dockerTasks);
+                dockerSupportServiceProvider.get().failIfDockerUnavailable(dockerTasks);
             }
         });
     }
diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java
index a83431797f683..7ffa83c0ff4c9 100644
--- a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java
+++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java
@@ -104,7 +104,7 @@ public DockerAvailability getDockerAvailability() {
      *
      * @throws GradleException if Docker is not available. The exception message gives the reason.
      */
-    void assertDockerIsAvailable(List tasks) {
+    void failIfDockerUnavailable(List tasks) {
         DockerAvailability availability = getDockerAvailability();
 
         // Docker installation is available and compatible
@@ -255,7 +255,7 @@ private void throwDockerRequiredException(final String message) {
 
     private void throwDockerRequiredException(final String message, Exception e) {
         throw new GradleException(
-            message + "\nyou can address this by attending to the reported issue, " + "removing the offending tasks from being executed.",
+            message + "\nyou can address this by attending to the reported issue, or removing the offending tasks from being executed.",
             e
         );
     }
diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java
index 8e4aa01275c91..8b62e81e626e8 100644
--- a/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java
+++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java
@@ -444,14 +444,16 @@ private static void addDistro(
             if (type == Type.ARCHIVE) {
                 d.setPlatform(platform);
             }
-            d.setBundledJdk(bundledJdk);
+            if (type != Type.DOCKER) {
+                d.setBundledJdk(bundledJdk);
+            }
             d.setVersion(version);
         });
 
         // Allow us to gracefully omit building Docker distributions if Docker is not available on the system.
         // In such a case as we can't build the Docker images we'll simply skip the corresponding tests.
         if (type == Type.DOCKER) {
-            distro.setRequired(false);
+            distro.setFailIfUnavailable(false);
         }
 
         container.add(distro);
diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle
index 2d263bf361e1f..e0573155b7af8 100644
--- a/distribution/docker/build.gradle
+++ b/distribution/docker/build.gradle
@@ -112,7 +112,7 @@ elasticsearch_distributions {
       flavor = distroFlavor
       type = 'docker'
       version = VersionProperties.getElasticsearch()
-      required = false // This ensures we don't attempt to build images if docker is unavailable
+      failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable
     }
   }
 }
diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle
index 04021f337882b..2d0acca20a50d 100644
--- a/qa/remote-clusters/build.gradle
+++ b/qa/remote-clusters/build.gradle
@@ -49,7 +49,7 @@ elasticsearch_distributions {
     type = 'docker'
     flavor = System.getProperty('tests.distribution', 'default')
     version = VersionProperties.getElasticsearch()
-    required = false // This ensures we skip this testing if Docker is unavailable
+    failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable
   }
 }
 

From f5fc6b7cf012a84fe8ffa00318eb2198c73e154c Mon Sep 17 00:00:00 2001
From: Mark Vieira 
Date: Thu, 20 Feb 2020 11:01:51 -0800
Subject: [PATCH 4/6] Fix code style violations

Signed-off-by: Mark Vieira 
---
 .../elasticsearch/gradle/ElasticsearchDistribution.java   | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
index 0b05ec79c6b93..a1a089cd79c5d 100644
--- a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
+++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
@@ -220,7 +220,9 @@ public Extracted getExtracted() {
     @Override
     public TaskDependency getBuildDependencies() {
         // For non-required Docker distributions, skip building the distribution is Docker is unavailable
-        if (getType() == Type.DOCKER && getFailIfUnavailable() == false && dockerSupport.get().getDockerAvailability().isAvailable == false) {
+        if (getType() == Type.DOCKER
+            && getFailIfUnavailable() == false
+            && dockerSupport.get().getDockerAvailability().isAvailable == false) {
             return task -> Collections.emptySet();
         }
 
@@ -277,9 +279,7 @@ void finalizeValues() {
                 );
             }
             if (getType() == Type.DOCKER && bundledJdk.isPresent()) {
-                throw new IllegalArgumentException(
-                    "bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [docker]"
-                );
+                throw new IllegalArgumentException("bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [docker]");
             }
         }
 

From 5f24188537132bb992e48770c22a4b93163a7b08 Mon Sep 17 00:00:00 2001
From: Mark Vieira 
Date: Thu, 20 Feb 2020 12:57:42 -0800
Subject: [PATCH 5/6] Reword validation error messages

Signed-off-by: Mark Vieira 
---
 .../gradle/ElasticsearchDistribution.java            | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
index a1a089cd79c5d..c91ebc28c7887 100644
--- a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
+++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java
@@ -245,7 +245,7 @@ void finalizeValues() {
         if (getType() == Type.INTEG_TEST_ZIP) {
             if (platform.getOrNull() != null) {
                 throw new IllegalArgumentException(
-                    "platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
+                    "platform cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
                 );
             }
             if (flavor.getOrNull() != null) {
@@ -255,7 +255,7 @@ void finalizeValues() {
             }
             if (bundledJdk.getOrNull() != null) {
                 throw new IllegalArgumentException(
-                    "bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]"
+                    "bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]"
                 );
             }
             return;
@@ -263,7 +263,7 @@ void finalizeValues() {
 
         if (getType() != Type.DOCKER && failIfUnavailable.get() == false) {
             throw new IllegalArgumentException(
-                "failIfUnavailable not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]"
+                "failIfUnavailable cannot be 'false' on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
             );
         }
 
@@ -275,11 +275,13 @@ void finalizeValues() {
         } else { // rpm, deb or docker
             if (platform.isPresent()) {
                 throw new IllegalArgumentException(
-                    "platform not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]"
+                    "platform cannot be set on elasticsearch distribution [" + name + "] of type [" + getType() + "]"
                 );
             }
             if (getType() == Type.DOCKER && bundledJdk.isPresent()) {
-                throw new IllegalArgumentException("bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [docker]");
+                throw new IllegalArgumentException(
+                    "bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [docker]"
+                );
             }
         }
 

From d1b260d5c21ad50ca6fff1b3fe1383ab43ffb0c0 Mon Sep 17 00:00:00 2001
From: Mark Vieira 
Date: Thu, 20 Feb 2020 13:39:20 -0800
Subject: [PATCH 6/6] Fix unit test

Signed-off-by: Mark Vieira 
---
 .../elasticsearch/gradle/DistributionDownloadPluginTests.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java
index a2e7413c5d717..6ab3bb775392a 100644
--- a/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java
+++ b/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java
@@ -123,7 +123,7 @@ public void testPlatformForIntegTest() {
             Platform.LINUX,
             null,
             null,
-            "platform not allowed for elasticsearch distribution [testdistro]"
+            "platform cannot be set on elasticsearch distribution [testdistro]"
         );
     }
 
@@ -175,7 +175,7 @@ public void testBundledJdkForIntegTest() {
             null,
             null,
             true,
-            "bundledJdk not allowed for elasticsearch distribution [testdistro]"
+            "bundledJdk cannot be set on elasticsearch distribution [testdistro]"
         );
     }