diff --git a/build.gradle b/build.gradle index eb2b2206f266b..a7b8ca36a2c59 100644 --- a/build.gradle +++ b/build.gradle @@ -34,6 +34,7 @@ import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure plugins { id 'lifecycle-base' + id 'elasticsearch.docker-support' id 'elasticsearch.global-build-info' id "com.diffplug.gradle.spotless" version "3.24.2" apply false } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index c7604a48dfcfd..916a2c6047aed 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -84,8 +84,6 @@ import java.nio.charset.StandardCharsets import java.nio.file.Files import static org.elasticsearch.gradle.tool.Boilerplate.maybeConfigure -import static org.elasticsearch.gradle.tool.DockerUtils.assertDockerIsAvailable -import static org.elasticsearch.gradle.tool.DockerUtils.getDockerPath /** * Encapsulates build configuration for elasticsearch projects. @@ -208,51 +206,6 @@ class BuildPlugin implements Plugin { } } - static void requireDocker(final Task task) { - final Project rootProject = task.project.rootProject - ExtraPropertiesExtension ext = rootProject.extensions.getByType(ExtraPropertiesExtension) - - if (rootProject.hasProperty('requiresDocker') == false) { - /* - * This is our first time encountering a task that requires Docker. We will add an extension that will let us track the tasks - * that register as requiring Docker. We will add a delayed execution that when the task graph is ready if any such tasks are - * in the task graph, then we check two things: - * - the Docker binary is available - * - we can execute a Docker command that requires privileges - * - * If either of these fail, we fail the build. - */ - - // check if the Docker binary exists and record its path - final String dockerBinary = getDockerPath().orElse(null) - - final boolean buildDocker - final String buildDockerProperty = System.getProperty("build.docker") - if (buildDockerProperty == null) { - buildDocker = dockerBinary != null - } else if (buildDockerProperty == "true") { - buildDocker = true - } else if (buildDockerProperty == "false") { - buildDocker = false - } else { - throw new IllegalArgumentException( - "expected build.docker to be unset or one of \"true\" or \"false\" but was [" + buildDockerProperty + "]") - } - - ext.set('buildDocker', buildDocker) - ext.set('requiresDocker', []) - rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph -> - final List tasks = taskGraph.allTasks.intersect(ext.get('requiresDocker') as List).collect { " ${it.path}".toString()} - - if (tasks.isEmpty() == false) { - assertDockerIsAvailable(task.project, tasks) - } - } - } - - (ext.get('requiresDocker') as List).add(task) - } - /** Add a check before gradle execution phase which ensures java home for the given java version is set. */ static void requireJavaHome(Task task, int version) { // use root project for global accounting diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 8d146592c3e2f..83c37e43778c3 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -22,8 +22,11 @@ import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor; import org.elasticsearch.gradle.ElasticsearchDistribution.Platform; import org.elasticsearch.gradle.ElasticsearchDistribution.Type; +import org.elasticsearch.gradle.docker.DockerSupportPlugin; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.elasticsearch.gradle.info.BuildParams; import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin; +import org.elasticsearch.gradle.tool.Boilerplate; import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; @@ -38,6 +41,7 @@ import org.gradle.api.file.FileTree; import org.gradle.api.file.RelativePath; import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.api.provider.Provider; import org.gradle.api.tasks.Sync; import org.gradle.api.tasks.TaskProvider; import org.gradle.authentication.http.HttpHeaderAuthentication; @@ -72,11 +76,17 @@ public class DistributionDownloadPlugin implements Plugin { public void apply(Project project) { // this is needed for isInternal project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); + project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); + + Provider dockerSupport = Boilerplate.getBuildService( + project.getGradle().getSharedServices(), + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ); distributionsContainer = project.container(ElasticsearchDistribution.class, name -> { Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name); Configuration extractedConfiguration = project.getConfigurations().create("es_distro_extracted_" + name); - return new ElasticsearchDistribution(name, project.getObjects(), fileConfiguration, extractedConfiguration); + return new ElasticsearchDistribution(name, project.getObjects(), dockerSupport, fileConfiguration, extractedConfiguration); }); project.getExtensions().add(CONTAINER_NAME, distributionsContainer); diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java index ff6b53fa29447..c91ebc28c7887 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ElasticsearchDistribution.java @@ -19,13 +19,16 @@ package org.elasticsearch.gradle; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.gradle.api.Buildable; import org.gradle.api.artifacts.Configuration; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; +import org.gradle.api.provider.Provider; import org.gradle.api.tasks.TaskDependency; import java.io.File; +import java.util.Collections; import java.util.Iterator; import java.util.Locale; @@ -110,6 +113,7 @@ public String toString() { } private final String name; + private final Provider dockerSupport; // pkg private so plugin can configure final Configuration configuration; private final Extracted extracted; @@ -119,14 +123,17 @@ public String toString() { private final Property platform; private final Property flavor; private final Property bundledJdk; + private final Property failIfUnavailable; ElasticsearchDistribution( String name, ObjectFactory objectFactory, + Provider dockerSupport, Configuration fileConfiguration, Configuration extractedConfiguration ) { this.name = name; + this.dockerSupport = dockerSupport; this.configuration = fileConfiguration; this.version = objectFactory.property(String.class).convention(VersionProperties.getElasticsearch()); this.type = objectFactory.property(Type.class); @@ -134,6 +141,7 @@ public String toString() { this.platform = objectFactory.property(Platform.class); this.flavor = objectFactory.property(Flavor.class); this.bundledJdk = objectFactory.property(Boolean.class); + this.failIfUnavailable = objectFactory.property(Boolean.class).convention(true); this.extracted = new Extracted(extractedConfiguration); } @@ -182,6 +190,14 @@ public void setBundledJdk(Boolean bundledJdk) { this.bundledJdk.set(bundledJdk); } + public boolean getFailIfUnavailable() { + return this.failIfUnavailable.get(); + } + + public void setFailIfUnavailable(boolean failIfUnavailable) { + this.failIfUnavailable.set(failIfUnavailable); + } + @Override public String toString() { return configuration.getSingleFile().toString(); @@ -203,6 +219,13 @@ public Extracted getExtracted() { @Override public TaskDependency getBuildDependencies() { + // For non-required Docker distributions, skip building the distribution is Docker is unavailable + if (getType() == Type.DOCKER + && getFailIfUnavailable() == false + && dockerSupport.get().getDockerAvailability().isAvailable == false) { + return task -> Collections.emptySet(); + } + return configuration.getBuildDependencies(); } @@ -222,7 +245,7 @@ void finalizeValues() { if (getType() == Type.INTEG_TEST_ZIP) { if (platform.getOrNull() != null) { throw new IllegalArgumentException( - "platform not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]" + "platform cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]" ); } if (flavor.getOrNull() != null) { @@ -232,12 +255,18 @@ void finalizeValues() { } if (bundledJdk.getOrNull() != null) { throw new IllegalArgumentException( - "bundledJdk not allowed for elasticsearch distribution [" + name + "] of type [integ_test_zip]" + "bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [integ_test_zip]" ); } return; } + if (getType() != Type.DOCKER && failIfUnavailable.get() == false) { + throw new IllegalArgumentException( + "failIfUnavailable cannot be 'false' on elasticsearch distribution [" + name + "] of type [" + getType() + "]" + ); + } + if (getType() == Type.ARCHIVE) { // defaults for archive, set here instead of via convention so integ-test-zip can verify they are not set if (platform.isPresent() == false) { @@ -246,7 +275,12 @@ void finalizeValues() { } else { // rpm, deb or docker if (platform.isPresent()) { throw new IllegalArgumentException( - "platform not allowed for elasticsearch distribution [" + name + "] of type [" + getType() + "]" + "platform cannot be set on elasticsearch distribution [" + name + "] of type [" + getType() + "]" + ); + } + if (getType() == Type.DOCKER && bundledJdk.isPresent()) { + throw new IllegalArgumentException( + "bundledJdk cannot be set on elasticsearch distribution [" + name + "] of type [docker]" ); } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java new file mode 100644 index 0000000000000..5c105df6d1635 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportPlugin.java @@ -0,0 +1,61 @@ +package org.elasticsearch.gradle.docker; + +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.plugins.ExtraPropertiesExtension; +import org.gradle.api.provider.Provider; + +import java.io.File; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Plugin providing {@link DockerSupportService} for detecting Docker installations and determining requirements for Docker-based + * Elasticsearch build tasks. + *

+ * Additionally registers a task graph listener used to assert a compatible Docker installation exists when task requiring Docker are + * scheduled for execution. Tasks may declare a Docker requirement via an extra property. If a compatible Docker installation is not + * available on the build system an exception will be thrown prior to task execution. + * + *

+ *     task myDockerTask {
+ *         ext.requiresDocker = true
+ *     }
+ * 
+ */ +public class DockerSupportPlugin implements Plugin { + public static final String DOCKER_SUPPORT_SERVICE_NAME = "dockerSupportService"; + public static final String DOCKER_ON_LINUX_EXCLUSIONS_FILE = ".ci/dockerOnLinuxExclusions"; + public static final String REQUIRES_DOCKER_ATTRIBUTE = "requiresDocker"; + + @Override + public void apply(Project project) { + if (project != project.getRootProject()) { + throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project."); + } + + Provider dockerSupportServiceProvider = project.getGradle() + .getSharedServices() + .registerIfAbsent( + DOCKER_SUPPORT_SERVICE_NAME, + DockerSupportService.class, + spec -> spec.parameters( + params -> { params.setExclusionsFile(new File(project.getRootDir(), DOCKER_ON_LINUX_EXCLUSIONS_FILE)); } + ) + ); + + // Ensure that if any tasks declare they require docker, we assert an available Docker installation exists + project.getGradle().getTaskGraph().whenReady(graph -> { + List dockerTasks = graph.getAllTasks().stream().filter(task -> { + ExtraPropertiesExtension ext = task.getExtensions().getExtraProperties(); + return ext.has(REQUIRES_DOCKER_ATTRIBUTE) && (boolean) ext.get(REQUIRES_DOCKER_ATTRIBUTE); + }).map(Task::getPath).collect(Collectors.toList()); + + if (dockerTasks.isEmpty() == false) { + dockerSupportServiceProvider.get().failIfDockerUnavailable(dockerTasks); + } + }); + } + +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java new file mode 100644 index 0000000000000..7ffa83c0ff4c9 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/docker/DockerSupportService.java @@ -0,0 +1,375 @@ +package org.elasticsearch.gradle.docker; + +import org.elasticsearch.gradle.Version; +import org.elasticsearch.gradle.info.BuildParams; +import org.gradle.api.GradleException; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.services.BuildService; +import org.gradle.api.services.BuildServiceParameters; +import org.gradle.process.ExecOperations; +import org.gradle.process.ExecResult; + +import javax.inject.Inject; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +/** + * Build service for detecting available Docker installation and checking for compatibility with Elasticsearch Docker image build + * requirements. This includes a minimum version requirement, as well as the ability to run privileged commands. + */ +public abstract class DockerSupportService implements BuildService { + + private static Logger LOGGER = Logging.getLogger(DockerSupportService.class); + // Defines the possible locations of the Docker CLI. These will be searched in order. + private static String[] DOCKER_BINARIES = { "/usr/bin/docker", "/usr/local/bin/docker" }; + private static String[] DOCKER_COMPOSE_BINARIES = { "/usr/local/bin/docker-compose", "/usr/bin/docker-compose" }; + private static final Version MINIMUM_DOCKER_VERSION = Version.fromString("17.05.0"); + + private final ExecOperations execOperations; + private DockerAvailability dockerAvailability; + + @Inject + public DockerSupportService(ExecOperations execOperations) { + this.execOperations = execOperations; + } + + /** + * Searches for a functional Docker installation, and returns information about the search. + * + * @return the results of the search. + */ + public DockerAvailability getDockerAvailability() { + if (this.dockerAvailability == null) { + String dockerPath = null; + Result lastResult = null; + Version version = null; + boolean isVersionHighEnough = false; + boolean isComposeAvailable = false; + + // Check if the Docker binary exists + final Optional dockerBinary = getDockerPath(); + if (isExcludedOs() == false && dockerBinary.isPresent()) { + dockerPath = dockerBinary.get(); + + // Since we use a multi-stage Docker build, check the Docker version meets minimum requirement + lastResult = runCommand(dockerPath, "version", "--format", "{{.Server.Version}}"); + + if (lastResult.isSuccess()) { + version = Version.fromString(lastResult.stdout.trim(), Version.Mode.RELAXED); + + isVersionHighEnough = version.onOrAfter(MINIMUM_DOCKER_VERSION); + + if (isVersionHighEnough) { + // Check that we can execute a privileged command + lastResult = runCommand(dockerPath, "images"); + + // If docker all checks out, see if docker-compose is available and working + Optional composePath = getDockerComposePath(); + if (lastResult.isSuccess() && composePath.isPresent()) { + isComposeAvailable = runCommand(composePath.get(), "version").isSuccess(); + } + } + } + } + + boolean isAvailable = isVersionHighEnough && lastResult != null && lastResult.isSuccess(); + + this.dockerAvailability = new DockerAvailability( + isAvailable, + isComposeAvailable, + isVersionHighEnough, + dockerPath, + version, + lastResult + ); + } + + return this.dockerAvailability; + } + + /** + * Given a list of tasks that requires Docker, check whether Docker is available, otherwise throw an exception. + * + * @throws GradleException if Docker is not available. The exception message gives the reason. + */ + void failIfDockerUnavailable(List tasks) { + DockerAvailability availability = getDockerAvailability(); + + // Docker installation is available and compatible + if (availability.isAvailable) { + return; + } + + // No Docker binary was located + if (availability.path == null) { + final String message = String.format( + Locale.ROOT, + "Docker (checked [%s]) is required to run the following task%s: \n%s", + String.join(", ", DOCKER_BINARIES), + tasks.size() > 1 ? "s" : "", + String.join("\n", tasks) + ); + throwDockerRequiredException(message); + } + + // Docker binaries were located, but did not meet the minimum version requirement + if (availability.lastCommand.isSuccess() && availability.isVersionHighEnough == false) { + final String message = String.format( + Locale.ROOT, + "building Docker images requires minimum Docker version of %s due to use of multi-stage builds yet was [%s]", + MINIMUM_DOCKER_VERSION, + availability.version + ); + throwDockerRequiredException(message); + } + + // Some other problem, print the error + final String message = String.format( + Locale.ROOT, + "a problem occurred while using Docker from [%s]%s yet it is required to run the following task%s: \n%s\n" + + "the problem is that Docker exited with exit code [%d] with standard error output:\n%s", + availability.path, + availability.version == null ? "" : " v" + availability.version, + tasks.size() > 1 ? "s" : "", + String.join("\n", tasks), + availability.lastCommand.exitCode, + availability.lastCommand.stderr.trim() + ); + throwDockerRequiredException(message); + } + + private boolean isExcludedOs() { + // We don't attempt to check the current flavor and version of Linux unless we're + // running in CI, because we don't want to stop people running the Docker tests in + // their own environments if they really want to. + if (BuildParams.isCi() == false) { + return false; + } + + // Only some hosts in CI are configured with Docker. We attempt to work out the OS + // and version, so that we know whether to expect to find Docker. We don't attempt + // to probe for whether Docker is available, because that doesn't tell us whether + // Docker is unavailable when it should be. + final Path osRelease = Paths.get("/etc/os-release"); + + if (Files.exists(osRelease)) { + Map values; + + try { + final List osReleaseLines = Files.readAllLines(osRelease); + values = parseOsRelease(osReleaseLines); + } catch (IOException e) { + throw new GradleException("Failed to read /etc/os-release", e); + } + + final String id = deriveId(values); + final boolean excluded = getLinuxExclusionList().contains(id); + + if (excluded) { + LOGGER.warn("Linux OS id [{}] is present in the Docker exclude list. Tasks requiring Docker will be disabled.", id); + } + + return excluded; + } + + return false; + } + + private List getLinuxExclusionList() { + File exclusionsFile = getParameters().getExclusionsFile(); + + if (exclusionsFile.exists()) { + try { + return Files.readAllLines(exclusionsFile.toPath()) + .stream() + .map(String::trim) + .filter(line -> (line.isEmpty() || line.startsWith("#")) == false) + .collect(Collectors.toList()); + } catch (IOException e) { + throw new GradleException("Failed to read " + exclusionsFile.getAbsolutePath(), e); + } + } else { + return Collections.emptyList(); + } + } + + // visible for testing + static String deriveId(Map values) { + return values.get("ID") + "-" + values.get("VERSION_ID"); + } + + // visible for testing + static Map parseOsRelease(final List osReleaseLines) { + final Map values = new HashMap<>(); + + osReleaseLines.stream().map(String::trim).filter(line -> (line.isEmpty() || line.startsWith("#")) == false).forEach(line -> { + final String[] parts = line.split("=", 2); + final String key = parts[0]; + // remove optional leading and trailing quotes and whitespace + final String value = parts[1].replaceAll("^['\"]?\\s*", "").replaceAll("\\s*['\"]?$", ""); + + values.put(key, value); + }); + + return values; + } + + /** + * Searches the entries in {@link #DOCKER_BINARIES} for the Docker CLI. This method does + * not check whether the Docker installation appears usable, see {@link #getDockerAvailability()} + * instead. + * + * @return the path to a CLI, if available. + */ + private Optional getDockerPath() { + // Check if the Docker binary exists + return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + } + + /** + * Searches the entries in {@link #DOCKER_COMPOSE_BINARIES} for the Docker Compose CLI. This method does + * not check whether the installation appears usable, see {@link #getDockerAvailability()} instead. + * + * @return the path to a CLI, if available. + */ + private Optional getDockerComposePath() { + // Check if the Docker binary exists + return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); + } + + private void throwDockerRequiredException(final String message) { + throwDockerRequiredException(message, null); + } + + private void throwDockerRequiredException(final String message, Exception e) { + throw new GradleException( + message + "\nyou can address this by attending to the reported issue, or removing the offending tasks from being executed.", + e + ); + } + + /** + * Runs a command and captures the exit code, standard output and standard error. + * + * @param args the command and any arguments to execute + * @return a object that captures the result of running the command. If an exception occurring + * while running the command, or the process was killed after reaching the 10s timeout, + * then the exit code will be -1. + */ + private Result runCommand(String... args) { + if (args.length == 0) { + throw new IllegalArgumentException("Cannot execute with no command"); + } + + ByteArrayOutputStream stdout = new ByteArrayOutputStream(); + ByteArrayOutputStream stderr = new ByteArrayOutputStream(); + + final ExecResult execResult = execOperations.exec(spec -> { + // The redundant cast is to silence a compiler warning. + spec.setCommandLine((Object[]) args); + spec.setStandardOutput(stdout); + spec.setErrorOutput(stderr); + spec.setIgnoreExitValue(true); + }); + return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString()); + } + + /** + * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability()}}. + */ + public static class DockerAvailability { + /** + * Indicates whether Docker is available and meets the required criteria. + * True if, and only if, Docker is: + *
    + *
  • Installed
  • + *
  • Executable
  • + *
  • Is at least version compatibile with minimum version
  • + *
  • Can execute a command that requires privileges
  • + *
+ */ + public final boolean isAvailable; + + /** + * True if docker-compose is available. + */ + public final boolean isComposeAvailable; + + /** + * True if the installed Docker version is >= 17.05 + */ + public final boolean isVersionHighEnough; + + /** + * The path to the Docker CLI, or null + */ + public final String path; + + /** + * The installed Docker version, or null + */ + public final Version version; + + /** + * Information about the last command executes while probing Docker, or null. + */ + final Result lastCommand; + + DockerAvailability( + boolean isAvailable, + boolean isComposeAvailable, + boolean isVersionHighEnough, + String path, + Version version, + Result lastCommand + ) { + this.isAvailable = isAvailable; + this.isComposeAvailable = isComposeAvailable; + this.isVersionHighEnough = isVersionHighEnough; + this.path = path; + this.version = version; + this.lastCommand = lastCommand; + } + } + + /** + * This class models the result of running a command. It captures the exit code, standard output and standard error. + */ + private static class Result { + final int exitCode; + final String stdout; + final String stderr; + + Result(int exitCode, String stdout, String stderr) { + this.exitCode = exitCode; + this.stdout = stdout; + this.stderr = stderr; + } + + boolean isSuccess() { + return exitCode == 0; + } + + public String toString() { + return "exitCode = [" + exitCode + "] " + "stdout = [" + stdout.trim() + "] " + "stderr = [" + stderr.trim() + "]"; + } + } + + interface Parameters extends BuildServiceParameters { + File getExclusionsFile(); + + void setExclusionsFile(File exclusionsFile); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java index e6c1b5300696a..a9561adda015c 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/test/DistroTestPlugin.java @@ -19,24 +19,6 @@ package org.elasticsearch.gradle.test; -import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath; -import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Random; -import java.util.stream.Collectors; -import java.util.stream.Stream; - import org.elasticsearch.gradle.BwcVersions; import org.elasticsearch.gradle.DistributionDownloadPlugin; import org.elasticsearch.gradle.ElasticsearchDistribution; @@ -45,22 +27,21 @@ import org.elasticsearch.gradle.ElasticsearchDistribution.Type; import org.elasticsearch.gradle.Jdk; import org.elasticsearch.gradle.JdkDownloadPlugin; -import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; +import org.elasticsearch.gradle.docker.DockerSupportPlugin; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.elasticsearch.gradle.info.BuildParams; +import org.elasticsearch.gradle.tool.Boilerplate; import org.elasticsearch.gradle.vagrant.BatsProgressLogger; import org.elasticsearch.gradle.vagrant.VagrantBasePlugin; import org.elasticsearch.gradle.vagrant.VagrantExtension; -import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.Directory; -import org.gradle.api.logging.Logger; -import org.gradle.api.logging.Logging; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.provider.Provider; @@ -70,9 +51,24 @@ import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.testing.Test; -public class DistroTestPlugin implements Plugin { - private static final Logger logger = Logging.getLogger(DistroTestPlugin.class); +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Random; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertLinuxPath; +import static org.elasticsearch.gradle.vagrant.VagrantMachine.convertWindowsPath; + +public class DistroTestPlugin implements Plugin { private static final String GRADLE_JDK_VERSION = "13.0.1+9@cec27d702aa74d5a8630c65ae61e4305"; private static final String GRADLE_JDK_VENDOR = "openjdk"; @@ -88,11 +84,15 @@ public class DistroTestPlugin implements Plugin { @Override public void apply(Project project) { - final boolean runDockerTests = shouldRunDockerTests(project); - + project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); project.getPluginManager().apply(DistributionDownloadPlugin.class); project.getPluginManager().apply("elasticsearch.build"); + Provider dockerSupport = Boilerplate.getBuildService( + project.getGradle().getSharedServices(), + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ); + // TODO: it would be useful to also have the SYSTEM_JAVA_HOME setup in the root project, so that running from GCP only needs // a java for gradle to run, and the tests are self sufficient and consistent with the java they use @@ -101,17 +101,15 @@ public void apply(Project project) { Provider upgradeDir = project.getLayout().getBuildDirectory().dir("packaging/upgrade"); Provider pluginsDir = project.getLayout().getBuildDirectory().dir("packaging/plugins"); - List distributions = configureDistributions(project, upgradeVersion, runDockerTests); + List distributions = configureDistributions(project, upgradeVersion); TaskProvider copyDistributionsTask = configureCopyDistributionsTask(project, distributionsDir); TaskProvider copyUpgradeTask = configureCopyUpgradeTask(project, upgradeVersion, upgradeDir); TaskProvider copyPluginsTask = configureCopyPluginsTask(project, pluginsDir); TaskProvider destructiveDistroTest = project.getTasks().register("destructiveDistroTest"); for (ElasticsearchDistribution distribution : distributions) { - if (distribution.getType() != Type.DOCKER || runDockerTests) { - TaskProvider destructiveTask = configureDistroTest(project, distribution); - destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask)); - } + TaskProvider destructiveTask = configureDistroTest(project, distribution, dockerSupport); + destructiveDistroTest.configure(t -> t.dependsOn(destructiveTask)); } Map> batsTests = new HashMap<>(); configureBatsTest(project, "plugins", distributionsDir, copyDistributionsTask, copyPluginsTask).configure( @@ -325,8 +323,14 @@ private static TaskProvider configureVMWrapperTask( }); } - private static TaskProvider configureDistroTest(Project project, ElasticsearchDistribution distribution) { + private static TaskProvider configureDistroTest( + Project project, + ElasticsearchDistribution distribution, + Provider dockerSupport + ) { return project.getTasks().register(destructiveDistroTestTaskName(distribution), Test.class, t -> { + // Disable Docker distribution tests unless a Docker installation is available + t.onlyIf(t2 -> distribution.getType() != Type.DOCKER || dockerSupport.get().getDockerAvailability().isAvailable); t.getOutputs().doNotCacheIf("Build cache is disabled for packaging tests", Specs.satisfyAll()); t.setMaxParallelForks(1); t.setWorkingDir(project.getProjectDir()); @@ -355,7 +359,7 @@ private static TaskProvider configureBatsTest( }); } - private List configureDistributions(Project project, Version upgradeVersion, boolean runDockerTests) { + private List configureDistributions(Project project, Version upgradeVersion) { NamedDomainObjectContainer distributions = DistributionDownloadPlugin.getContainer(project); List currentDistros = new ArrayList<>(); List upgradeDistros = new ArrayList<>(); @@ -364,7 +368,7 @@ private List configureDistributions(Project project, for (Flavor flavor : Flavor.values()) { for (boolean bundledJdk : Arrays.asList(true, false)) { // All our Docker images include a bundled JDK so it doesn't make sense to test without one - boolean skip = type == Type.DOCKER && (runDockerTests == false || bundledJdk == false); + boolean skip = type == Type.DOCKER && bundledJdk == false; if (skip == false) { addDistro(distributions, type, null, flavor, bundledJdk, VersionProperties.getElasticsearch(), currentDistros); @@ -431,7 +435,6 @@ private static void addDistro( String version, List container ) { - String name = distroId(type, platform, flavor, bundledJdk) + "-" + version; if (distributions.findByName(name) != null) { return; @@ -442,9 +445,18 @@ private static void addDistro( if (type == Type.ARCHIVE) { d.setPlatform(platform); } - d.setBundledJdk(bundledJdk); + if (type != Type.DOCKER) { + d.setBundledJdk(bundledJdk); + } d.setVersion(version); }); + + // Allow us to gracefully omit building Docker distributions if Docker is not available on the system. + // In such a case as we can't build the Docker images we'll simply skip the corresponding tests. + if (type == Type.DOCKER) { + distro.setFailIfUnavailable(false); + } + container.add(distro); } @@ -461,91 +473,4 @@ private static String destructiveDistroTestTaskName(ElasticsearchDistribution di Type type = distro.getType(); return "destructiveDistroTest." + distroId(type, distro.getPlatform(), distro.getFlavor(), distro.getBundledJdk()); } - - static Map parseOsRelease(final List osReleaseLines) { - final Map values = new HashMap<>(); - - osReleaseLines.stream().map(String::trim).filter(line -> (line.isEmpty() || line.startsWith("#")) == false).forEach(line -> { - final String[] parts = line.split("=", 2); - final String key = parts[0]; - // remove optional leading and trailing quotes and whitespace - final String value = parts[1].replaceAll("^['\"]?\\s*", "").replaceAll("\\s*['\"]?$", ""); - - values.put(key, value); - }); - - return values; - } - - static String deriveId(final Map osRelease) { - return osRelease.get("ID") + "-" + osRelease.get("VERSION_ID"); - } - - private static List getLinuxExclusionList(Project project) { - final String exclusionsFilename = "dockerOnLinuxExclusions"; - final Path exclusionsPath = project.getRootDir().toPath().resolve(".ci").resolve(exclusionsFilename); - - try { - return Files.readAllLines(exclusionsPath) - .stream() - .map(String::trim) - .filter(line -> (line.isEmpty() || line.startsWith("#")) == false) - .collect(Collectors.toList()); - } catch (IOException e) { - throw new GradleException("Failed to read .ci/" + exclusionsFilename, e); - } - } - - /** - * The {@link DistroTestPlugin} generates a number of test tasks, some - * of which are Docker packaging tests. When running on the host OS or in CI - * i.e. not in a Vagrant VM, only certain operating systems are supported. This - * method determines whether the Docker tests should be run on the host - * OS. Essentially, unless an OS and version is specifically excluded, we expect - * to be able to run Docker and test the Docker images. - */ - private static boolean shouldRunDockerTests(Project project) { - switch (OS.current()) { - case WINDOWS: - // Not yet supported. - return false; - - case MAC: - // Assume that Docker for Mac is installed, since Docker is part of the dev workflow. - return true; - - case LINUX: - // Only some hosts in CI are configured with Docker. We attempt to work out the OS - // and version, so that we know whether to expect to find Docker. We don't attempt - // to probe for whether Docker is available, because that doesn't tell us whether - // Docker is unavailable when it should be. - final Path osRelease = Paths.get("/etc/os-release"); - - if (Files.exists(osRelease)) { - Map values; - - try { - final List osReleaseLines = Files.readAllLines(osRelease); - values = parseOsRelease(osReleaseLines); - } catch (IOException e) { - throw new GradleException("Failed to read /etc/os-release", e); - } - - final String id = deriveId(values); - - final boolean shouldExclude = getLinuxExclusionList(project).contains(id); - - logger.warn("Linux OS id [" + id + "] is " + (shouldExclude ? "" : "not ") + "present in the Docker exclude list"); - - return shouldExclude == false; - } - - logger.warn("/etc/os-release does not exist!"); - return false; - - default: - logger.warn("Unknown OS [" + OS.current() + "], answering false to shouldRunDockerTests()"); - return false; - } - } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 78d93772e8114..e74058b04bb52 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -39,6 +39,8 @@ import java.io.File; +import static org.elasticsearch.gradle.tool.Boilerplate.noop; + public class TestClustersPlugin implements Plugin { public static final String EXTENSION_NAME = "testClusters"; @@ -72,7 +74,7 @@ public void apply(Project project) { createListClustersTask(project, container); // register cluster registry as a global build service - project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, spec -> {}); + project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); // register throttle so we only run at most max-workers/2 nodes concurrently project.getGradle() diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java index 52486844c5deb..64ea96e9f8827 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/testfixtures/TestFixturesPlugin.java @@ -21,20 +21,27 @@ import com.avast.gradle.dockercompose.ComposeExtension; import com.avast.gradle.dockercompose.DockerComposePlugin; import com.avast.gradle.dockercompose.ServiceInfo; +import com.avast.gradle.dockercompose.tasks.ComposeDown; +import com.avast.gradle.dockercompose.tasks.ComposePull; import com.avast.gradle.dockercompose.tasks.ComposeUp; -import org.elasticsearch.gradle.OS; import org.elasticsearch.gradle.SystemPropertyCommandLineArgumentProvider; +import org.elasticsearch.gradle.docker.DockerSupportPlugin; +import org.elasticsearch.gradle.docker.DockerSupportService; import org.elasticsearch.gradle.info.BuildParams; import org.elasticsearch.gradle.precommit.TestingConventionsTasks; +import org.elasticsearch.gradle.tool.Boilerplate; import org.gradle.api.Action; import org.gradle.api.DefaultTask; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.provider.Provider; import org.gradle.api.tasks.TaskContainer; +import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.testing.Test; import java.io.File; @@ -46,75 +53,76 @@ public class TestFixturesPlugin implements Plugin { + private static final Logger LOGGER = Logging.getLogger(TestFixturesPlugin.class); private static final String DOCKER_COMPOSE_THROTTLE = "dockerComposeThrottle"; static final String DOCKER_COMPOSE_YML = "docker-compose.yml"; @Override public void apply(Project project) { - TaskContainer tasks = project.getTasks(); + project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); + TaskContainer tasks = project.getTasks(); TestFixtureExtension extension = project.getExtensions().create("testFixtures", TestFixtureExtension.class, project); Provider dockerComposeThrottle = project.getGradle() .getSharedServices() .registerIfAbsent(DOCKER_COMPOSE_THROTTLE, DockerComposeThrottle.class, spec -> spec.getMaxParallelUsages().set(1)); + Provider dockerSupport = Boilerplate.getBuildService( + project.getGradle().getSharedServices(), + DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME + ); + ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class); File testfixturesDir = project.file("testfixtures_shared"); ext.set("testFixturesDir", testfixturesDir); if (project.file(DOCKER_COMPOSE_YML).exists()) { - Task buildFixture = project.getTasks().create("buildFixture"); - Task pullFixture = project.getTasks().create("pullFixture"); - Task preProcessFixture = project.getTasks().create("preProcessFixture"); - preProcessFixture.doFirst((task) -> { - try { - Files.createDirectories(testfixturesDir.toPath()); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }); - preProcessFixture.getOutputs().dir(testfixturesDir); - buildFixture.dependsOn(preProcessFixture); - pullFixture.dependsOn(preProcessFixture); - Task postProcessFixture = project.getTasks().create("postProcessFixture"); - postProcessFixture.dependsOn(buildFixture); - preProcessFixture.onlyIf(spec -> buildFixture.getEnabled()); - postProcessFixture.onlyIf(spec -> buildFixture.getEnabled()); - - if (dockerComposeSupported() == false) { - preProcessFixture.setEnabled(false); - postProcessFixture.setEnabled(false); - buildFixture.setEnabled(false); - pullFixture.setEnabled(false); - } else { - project.getPluginManager().apply(BasePlugin.class); - project.getPluginManager().apply(DockerComposePlugin.class); - ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class); - composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML)); - composeExtension.setRemoveContainers(true); - composeExtension.setExecutable( - project.file("/usr/local/bin/docker-compose").exists() ? "/usr/local/bin/docker-compose" : "/usr/bin/docker-compose" - ); - - buildFixture.dependsOn(tasks.named("composeUp")); - pullFixture.dependsOn(tasks.named("composePull")); - tasks.named("composeUp").configure(t -> { - // Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions - if (BuildParams.isCi()) { - t.usesService(dockerComposeThrottle); + project.getPluginManager().apply(BasePlugin.class); + project.getPluginManager().apply(DockerComposePlugin.class); + + TaskProvider preProcessFixture = project.getTasks().register("preProcessFixture", t -> { + t.getOutputs().dir(testfixturesDir); + t.doFirst(t2 -> { + try { + Files.createDirectories(testfixturesDir.toPath()); + } catch (IOException e) { + throw new UncheckedIOException(e); } - t.mustRunAfter(preProcessFixture); }); - tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture)); - tasks.named("composeDown").configure(t -> t.doLast(t2 -> project.delete(testfixturesDir))); + }); + TaskProvider buildFixture = project.getTasks() + .register("buildFixture", t -> t.dependsOn(preProcessFixture, tasks.named("composeUp"))); + TaskProvider postProcessFixture = project.getTasks().register("postProcessFixture", task -> { + task.dependsOn(buildFixture); configureServiceInfoForTask( - postProcessFixture, + task, project, false, - (name, port) -> postProcessFixture.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port) + (name, port) -> task.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port) ); - } + }); + + maybeSkipTask(dockerSupport, preProcessFixture); + maybeSkipTask(dockerSupport, postProcessFixture); + maybeSkipTask(dockerSupport, buildFixture); + + ComposeExtension composeExtension = project.getExtensions().getByType(ComposeExtension.class); + composeExtension.setUseComposeFiles(Collections.singletonList(DOCKER_COMPOSE_YML)); + composeExtension.setRemoveContainers(true); + composeExtension.setExecutable( + project.file("/usr/local/bin/docker-compose").exists() ? "/usr/local/bin/docker-compose" : "/usr/bin/docker-compose" + ); + + tasks.named("composeUp").configure(t -> { + // Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions + if (BuildParams.isCi()) { + t.usesService(dockerComposeThrottle); + } + t.mustRunAfter(preProcessFixture); + }); + tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture)); + tasks.named("composeDown").configure(t -> t.doLast(t2 -> project.delete(testfixturesDir))); } else { project.afterEvaluate(spec -> { if (extension.fixtures.isEmpty()) { @@ -129,43 +137,44 @@ public void apply(Project project) { extension.fixtures.matching(fixtureProject -> fixtureProject.equals(project) == false) .all(fixtureProject -> project.evaluationDependsOn(fixtureProject.getPath())); - conditionTaskByType(tasks, extension, Test.class); - conditionTaskByType(tasks, extension, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask")); - conditionTaskByType(tasks, extension, TestingConventionsTasks.class); - conditionTaskByType(tasks, extension, ComposeUp.class); - - if (dockerComposeSupported() == false) { - project.getLogger() - .info( - "Tests for {} require docker-compose at /usr/local/bin/docker-compose or /usr/bin/docker-compose " - + "but none could be found so these will be skipped", - project.getPath() - ); - return; - } - - tasks.withType(Test.class, task -> extension.fixtures.all(fixtureProject -> { - fixtureProject.getTasks().matching(it -> it.getName().equals("buildFixture")).all(task::dependsOn); - fixtureProject.getTasks().matching(it -> it.getName().equals("composeDown")).all(task::finalizedBy); + // Skip docker compose tasks if it is unavailable + maybeSkipTasks(tasks, dockerSupport, Test.class); + maybeSkipTasks(tasks, dockerSupport, getTaskClass("org.elasticsearch.gradle.test.RestIntegTestTask")); + maybeSkipTasks(tasks, dockerSupport, TestingConventionsTasks.class); + maybeSkipTasks(tasks, dockerSupport, getTaskClass("org.elasticsearch.gradle.test.AntFixture")); + maybeSkipTasks(tasks, dockerSupport, ComposeUp.class); + maybeSkipTasks(tasks, dockerSupport, ComposePull.class); + maybeSkipTasks(tasks, dockerSupport, ComposeDown.class); + + tasks.withType(Test.class).configureEach(task -> extension.fixtures.all(fixtureProject -> { + task.dependsOn(fixtureProject.getTasks().named("postProcessFixture")); + task.finalizedBy(fixtureProject.getTasks().named("composeDown")); configureServiceInfoForTask( task, fixtureProject, true, (name, host) -> task.getExtensions().getByType(SystemPropertyCommandLineArgumentProvider.class).systemProperty(name, host) ); - task.dependsOn(fixtureProject.getTasks().getByName("postProcessFixture")); })); } - private void conditionTaskByType(TaskContainer tasks, TestFixtureExtension extension, Class taskClass) { - tasks.withType(taskClass) - .configureEach( - task -> task.onlyIf( - spec -> extension.fixtures.stream() - .anyMatch(fixtureProject -> fixtureProject.getTasks().getByName("buildFixture").getEnabled() == false) == false - ) - ); + private void maybeSkipTasks(TaskContainer tasks, Provider dockerSupport, Class taskClass) { + tasks.withType(taskClass).configureEach(t -> maybeSkipTask(dockerSupport, t)); + } + + private void maybeSkipTask(Provider dockerSupport, TaskProvider task) { + task.configure(t -> maybeSkipTask(dockerSupport, t)); + } + + private void maybeSkipTask(Provider dockerSupport, Task task) { + task.onlyIf(spec -> { + boolean isComposeAvailable = dockerSupport.get().getDockerAvailability().isComposeAvailable; + if (isComposeAvailable == false) { + LOGGER.info("Task {} requires docker-compose but it is unavailable. Task will be skipped.", task.getPath()); + } + return isComposeAvailable; + }); } private void configureServiceInfoForTask( @@ -176,10 +185,11 @@ private void configureServiceInfoForTask( ) { // Configure ports for the tests as system properties. // We only know these at execution time so we need to do it in doFirst - TestFixtureExtension extension = task.getProject().getExtensions().getByType(TestFixtureExtension.class); task.doFirst(new Action() { @Override public void execute(Task theTask) { + TestFixtureExtension extension = theTask.getProject().getExtensions().getByType(TestFixtureExtension.class); + fixtureProject.getExtensions() .getByType(ComposeExtension.class) .getServicesInfos() @@ -204,19 +214,6 @@ public void execute(Task theTask) { }); } - public static boolean dockerComposeSupported() { - if (OS.current().equals(OS.WINDOWS)) { - return false; - } - final boolean hasDockerCompose = (new File("/usr/local/bin/docker-compose")).exists() - || (new File("/usr/bin/docker-compose").exists()); - return hasDockerCompose && Boolean.parseBoolean(System.getProperty("tests.fixture.enabled", "true")); - } - - private void disableTaskByType(TaskContainer tasks, Class type) { - tasks.withType(type, task -> task.setEnabled(false)); - } - @SuppressWarnings("unchecked") private Class getTaskClass(String type) { Class aClass; diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java index 3c1c44cc1eadb..85002b9c49821 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/Boilerplate.java @@ -38,6 +38,10 @@ public abstract class Boilerplate { + public static Action noop() { + return t -> {}; + } + public static SourceSetContainer getJavaSourceSets(Project project) { return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/DockerUtils.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tool/DockerUtils.java deleted file mode 100644 index d67e28757a89a..0000000000000 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/tool/DockerUtils.java +++ /dev/null @@ -1,243 +0,0 @@ -package org.elasticsearch.gradle.tool; - -import org.elasticsearch.gradle.Version; -import org.gradle.api.GradleException; -import org.gradle.api.Project; -import org.gradle.process.ExecResult; - -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.util.List; -import java.util.Locale; -import java.util.Optional; - -/** - * Contains utilities for checking whether Docker is installed, is executable, - * has a recent enough version, and appears to be functional. The Elasticsearch build - * requires Docker >= 17.05 as it uses a multi-stage build. - */ -public class DockerUtils { - /** - * Defines the possible locations of the Docker CLI. These will be searched in order. - */ - private static String[] DOCKER_BINARIES = { "/usr/bin/docker", "/usr/local/bin/docker" }; - - /** - * Searches the entries in {@link #DOCKER_BINARIES} for the Docker CLI. This method does - * not check whether the Docker installation appears usable, see {@link #getDockerAvailability(Project)} - * instead. - * - * @return the path to a CLI, if available. - */ - public static Optional getDockerPath() { - // Check if the Docker binary exists - return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); - } - - /** - * Searches for a functional Docker installation, and returns information about the search. - * @return the results of the search. - */ - private static DockerAvailability getDockerAvailability(Project project) { - String dockerPath = null; - Result lastResult = null; - Version version = null; - boolean isVersionHighEnough = false; - - // Check if the Docker binary exists - final Optional dockerBinary = getDockerPath(); - - if (dockerBinary.isPresent()) { - dockerPath = dockerBinary.get(); - - // Since we use a multi-stage Docker build, check the Docker version since 17.05 - lastResult = runCommand(project, dockerPath, "version", "--format", "{{.Server.Version}}"); - - if (lastResult.isSuccess()) { - version = Version.fromString(lastResult.stdout.trim(), Version.Mode.RELAXED); - - isVersionHighEnough = version.onOrAfter("17.05.0"); - - if (isVersionHighEnough) { - // Check that we can execute a privileged command - lastResult = runCommand(project, dockerPath, "images"); - } - } - } - - boolean isAvailable = isVersionHighEnough && lastResult.isSuccess(); - - return new DockerAvailability(isAvailable, isVersionHighEnough, dockerPath, version, lastResult); - } - - /** - * An immutable class that represents the results of a Docker search from {@link #getDockerAvailability(Project)}}. - */ - private static class DockerAvailability { - /** - * Indicates whether Docker is available and meets the required criteria. - * True if, and only if, Docker is: - *
    - *
  • Installed
  • - *
  • Executable
  • - *
  • Is at least version 17.05
  • - *
  • Can execute a command that requires privileges
  • - *
- */ - final boolean isAvailable; - - /** - * True if the installed Docker version is >= 17.05 - */ - final boolean isVersionHighEnough; - - /** - * The path to the Docker CLI, or null - */ - public final String path; - - /** - * The installed Docker version, or null - */ - public final Version version; - - /** - * Information about the last command executes while probing Docker, or null. - */ - final Result lastCommand; - - DockerAvailability(boolean isAvailable, boolean isVersionHighEnough, String path, Version version, Result lastCommand) { - this.isAvailable = isAvailable; - this.isVersionHighEnough = isVersionHighEnough; - this.path = path; - this.version = version; - this.lastCommand = lastCommand; - } - } - - /** - * Given a list of tasks that requires Docker, check whether Docker is available, otherwise - * throw an exception. - * @param project a Gradle project - * @param tasks the tasks that require Docker - * @throws GradleException if Docker is not available. The exception message gives the reason. - */ - public static void assertDockerIsAvailable(Project project, List tasks) { - DockerAvailability availability = getDockerAvailability(project); - - if (availability.isAvailable) { - return; - } - - /* - * There are tasks in the task graph that require Docker. - * Now we are failing because either the Docker binary does - * not exist or because execution of a privileged Docker - * command failed. - */ - if (availability.path == null) { - final String message = String.format( - Locale.ROOT, - "Docker (checked [%s]) is required to run the following task%s: \n%s", - String.join(", ", DOCKER_BINARIES), - tasks.size() > 1 ? "s" : "", - String.join("\n", tasks) - ); - throwDockerRequiredException(message); - } - - if (availability.version == null) { - final String message = String.format( - Locale.ROOT, - "Docker is required to run the following task%s, but it doesn't appear to be running: \n%s", - tasks.size() > 1 ? "s" : "", - String.join("\n", tasks) - ); - throwDockerRequiredException(message); - } - - if (availability.isVersionHighEnough == false) { - final String message = String.format( - Locale.ROOT, - "building Docker images requires Docker version 17.05+ due to use of multi-stage builds yet was [%s]", - availability.version - ); - throwDockerRequiredException(message); - } - - // Some other problem, print the error - final String message = String.format( - Locale.ROOT, - "a problem occurred running Docker from [%s] yet it is required to run the following task%s: \n%s\n" - + "the problem is that Docker exited with exit code [%d] with standard error output [%s]", - availability.path, - tasks.size() > 1 ? "s" : "", - String.join("\n", tasks), - availability.lastCommand.exitCode, - availability.lastCommand.stderr.trim() - ); - throwDockerRequiredException(message); - } - - private static void throwDockerRequiredException(final String message) { - throwDockerRequiredException(message, null); - } - - private static void throwDockerRequiredException(final String message, Exception e) { - throw new GradleException( - message - + "\nyou can address this by attending to the reported issue, " - + "removing the offending tasks from being executed, " - + "or by passing -Dbuild.docker=false", - e - ); - } - - /** - * Runs a command and captures the exit code, standard output and standard error. - * @param args the command and any arguments to execute - * @return a object that captures the result of running the command. If an exception occurring - * while running the command, or the process was killed after reaching the 10s timeout, - * then the exit code will be -1. - */ - private static Result runCommand(Project project, String... args) { - if (args.length == 0) { - throw new IllegalArgumentException("Cannot execute with no command"); - } - - ByteArrayOutputStream stdout = new ByteArrayOutputStream(); - ByteArrayOutputStream stderr = new ByteArrayOutputStream(); - - final ExecResult execResult = project.exec(spec -> { - // The redundant cast is to silence a compiler warning. - spec.setCommandLine((Object[]) args); - spec.setStandardOutput(stdout); - spec.setErrorOutput(stderr); - }); - - return new Result(execResult.getExitValue(), stdout.toString(), stderr.toString()); - } - - /** - * This class models the result of running a command. It captures the exit code, standard output and standard error. - */ - private static class Result { - final int exitCode; - final String stdout; - final String stderr; - - Result(int exitCode, String stdout, String stderr) { - this.exitCode = exitCode; - this.stdout = stdout; - this.stderr = stderr; - } - - boolean isSuccess() { - return exitCode == 0; - } - - public String toString() { - return "exitCode = [" + exitCode + "] " + "stdout = [" + stdout.trim() + "] " + "stderr = [" + stderr.trim() + "]"; - } - } -} diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.docker-support.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.docker-support.properties new file mode 100644 index 0000000000000..fec4e97bf67a5 --- /dev/null +++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.docker-support.properties @@ -0,0 +1 @@ +implementation-class=org.elasticsearch.gradle.docker.DockerSupportPlugin diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java index a2e7413c5d717..6ab3bb775392a 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/DistributionDownloadPluginTests.java @@ -123,7 +123,7 @@ public void testPlatformForIntegTest() { Platform.LINUX, null, null, - "platform not allowed for elasticsearch distribution [testdistro]" + "platform cannot be set on elasticsearch distribution [testdistro]" ); } @@ -175,7 +175,7 @@ public void testBundledJdkForIntegTest() { null, null, true, - "bundledJdk not allowed for elasticsearch distribution [testdistro]" + "bundledJdk cannot be set on elasticsearch distribution [testdistro]" ); } diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/DistroTestPluginTests.java b/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java similarity index 89% rename from buildSrc/src/test/java/org/elasticsearch/gradle/test/DistroTestPluginTests.java rename to buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java index 96b6208be7205..cba7590701154 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/test/DistroTestPluginTests.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/docker/DockerSupportServiceTests.java @@ -1,14 +1,16 @@ -package org.elasticsearch.gradle.test; +package org.elasticsearch.gradle.docker; + +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.elasticsearch.gradle.test.DistroTestPlugin.deriveId; -import static org.elasticsearch.gradle.test.DistroTestPlugin.parseOsRelease; +import static org.elasticsearch.gradle.docker.DockerSupportService.deriveId; +import static org.elasticsearch.gradle.docker.DockerSupportService.parseOsRelease; import static org.hamcrest.CoreMatchers.equalTo; -public class DistroTestPluginTests extends GradleIntegrationTestCase { +public class DockerSupportServiceTests extends GradleIntegrationTestCase { public void testParseOsReleaseOnOracle() { final List lines = List.of( diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 84cfc6bf040b9..23682895b32d3 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -1,4 +1,4 @@ -import org.elasticsearch.gradle.BuildPlugin +import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.info.BuildParams @@ -6,6 +6,7 @@ import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.distribution-download' testFixtures.useFixture() @@ -105,10 +106,19 @@ task copyKeystore(type: Sync) { } } -preProcessFixture { - if (TestFixturesPlugin.dockerComposeSupported()) { - dependsOn assemble +elasticsearch_distributions { + Flavor.values().each { distroFlavor -> + "docker_$distroFlavor" { + flavor = distroFlavor + type = 'docker' + version = VersionProperties.getElasticsearch() + failIfUnavailable = false // This ensures we don't attempt to build images if docker is unavailable + } } +} + +preProcessFixture { + dependsOn elasticsearch_distributions.docker_default, elasticsearch_distributions.docker_oss dependsOn copyKeystore doLast { // tests expect to have an empty repo @@ -140,16 +150,13 @@ task integTest(type: Test) { outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } maxParallelForks = '1' include '**/*IT.class' - // don't add the tasks to build the docker images if we have no way of testing them - if (TestFixturesPlugin.dockerComposeSupported()) { - dependsOn assemble - } } check.dependsOn integTest void addBuildDockerImage(final boolean oss) { final Task buildDockerImageTask = task(taskName("build", oss, "DockerImage"), type: LoggedExec) { + ext.requiresDocker = true // mark this task as requiring docker to execute inputs.files(tasks.named(taskName("copy", oss, "DockerContext"))) List tags if (oss) { @@ -179,7 +186,6 @@ void addBuildDockerImage(final boolean oss) { } } assemble.dependsOn(buildDockerImageTask) - BuildPlugin.requireDocker(buildDockerImageTask) } for (final boolean oss : [false, true]) { diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 135debae6e82a..eaf72ab57aa61 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -97,7 +97,7 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', executable = "${BuildParams.runtimeJavaHome}/bin/java" env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}" maxWaitInSeconds 60 - onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && BuildParams.inFipsJvm == false } + onlyIf { BuildParams.inFipsJvm == false } waitCondition = { fixture, ant -> // the hdfs.MiniHDFS fixture writes the ports file when // it's ready, so we can just wait for the file to exist diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index f3027a0d5b91b..2d0acca20a50d 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -16,10 +16,12 @@ * specific language governing permissions and limitations * under the License. */ +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testfixtures.TestFixturesPlugin apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.distribution-download' testFixtures.useFixture() @@ -42,15 +44,17 @@ task copyKeystore(type: Sync) { } } -preProcessFixture { - if (TestFixturesPlugin.dockerComposeSupported()) { - if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) { - dependsOn ":distribution:docker:buildDockerImage" - } else { - dependsOn ":distribution:docker:buildOssDockerImage" - } +elasticsearch_distributions { + docker { + type = 'docker' + flavor = System.getProperty('tests.distribution', 'default') + version = VersionProperties.getElasticsearch() + failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable } - dependsOn copyKeystore +} + +preProcessFixture { + dependsOn copyKeystore, elasticsearch_distributions.docker doLast { // tests expect to have an empty repo project.delete( @@ -68,14 +72,12 @@ preProcessFixture { } } -if (TestFixturesPlugin.dockerComposeSupported()) { - dockerCompose { - tcpPortsToIgnoreWhenWaiting = [9600, 9601] - if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) { - useComposeFiles = ['docker-compose.yml'] - } else { - useComposeFiles = ['docker-compose-oss.yml'] - } +dockerCompose { + tcpPortsToIgnoreWhenWaiting = [9600, 9601] + if ('default'.equalsIgnoreCase(System.getProperty('tests.distribution', 'default'))) { + useComposeFiles = ['docker-compose.yml'] + } else { + useComposeFiles = ['docker-compose-oss.yml'] } } @@ -100,10 +102,6 @@ task integTest(type: Test) { outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true } maxParallelForks = '1' include '**/*IT.class' - // don't add the tasks to build the docker images if we have no way of testing them - if (TestFixturesPlugin.dockerComposeSupported()) { - dependsOn ":distribution:docker:buildDockerImage" - } } check.dependsOn integTest