From 301048ea734ebf48641be0cb63a15f4c203f5f61 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 14 Oct 2019 12:42:01 -0700 Subject: [PATCH] Remove usages of ClusterFormationTasks and replace with test clusters (#1365) --- .../hadoop/gradle/BuildPlugin.groovy | 5 +- .../fixture/ElasticsearchFixturePlugin.groovy | 187 +++-------- .../gradle/fixture/hadoop/InstanceInfo.groovy | 2 +- .../hadoop/gradle/util/PlaceholderTask.groovy | 313 ------------------ qa/kerberos/build.gradle | 147 ++++---- 5 files changed, 102 insertions(+), 552 deletions(-) delete mode 100644 buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/util/PlaceholderTask.groovy diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy index c7e51d0f3..82753f570 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy @@ -5,6 +5,7 @@ import org.elasticsearch.gradle.info.GenerateGlobalBuildInfoTask import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin import org.elasticsearch.gradle.info.JavaHome import org.elasticsearch.gradle.precommit.LicenseHeadersTask +import org.elasticsearch.gradle.testclusters.RestTestRunnerTask import org.elasticsearch.hadoop.gradle.util.Resources import org.gradle.api.GradleException import org.gradle.api.JavaVersion @@ -577,7 +578,7 @@ class BuildPlugin implements Plugin { hadoopTestingJar.from(project.sourceSets.main.output) hadoopTestingJar.from(project.sourceSets.itest.output) - Test integrationTest = project.tasks.create('integrationTest', Test.class) + Test integrationTest = project.tasks.create('integrationTest', RestTestRunnerTask.class) integrationTest.dependsOn(hadoopTestingJar) integrationTest.testClassesDirs = project.sourceSets.itest.output.classesDirs @@ -713,7 +714,7 @@ class BuildPlugin implements Plugin { } private static void configurePrecommit(Project project) { - if (project != project.rootProject) { + if (project != project.rootProject && project.hasProperty('localRepo') == false) { LicenseHeadersTask licenseHeaders = project.tasks.create('licenseHeaders', LicenseHeadersTask.class) project.tasks.getByName('check').dependsOn(licenseHeaders) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/ElasticsearchFixturePlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/ElasticsearchFixturePlugin.groovy index f6c60cb3d..d55089ccc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/ElasticsearchFixturePlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/ElasticsearchFixturePlugin.groovy @@ -1,20 +1,13 @@ package org.elasticsearch.hadoop.gradle.fixture -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.test.ClusterConfiguration -import org.elasticsearch.gradle.test.ClusterFormationTasks -import org.elasticsearch.gradle.test.NodeInfo -import org.elasticsearch.hadoop.gradle.util.PlaceholderTask +import org.elasticsearch.gradle.testclusters.ElasticsearchCluster +import org.elasticsearch.gradle.testclusters.RestTestRunnerTask +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestDistribution +import org.gradle.api.NamedDomainObjectContainer import org.gradle.api.Plugin import org.gradle.api.Project -import org.gradle.api.Task -import org.gradle.api.execution.TaskExecutionAdapter -import org.gradle.api.tasks.TaskState -import org.gradle.util.ConfigureUtil - -import java.nio.charset.StandardCharsets -import java.nio.file.Files -import java.util.stream.Stream +import org.gradle.process.CommandLineArgumentProvider /** * Plugin that adds the ability to stand up an Elasticsearch cluster for tests. @@ -28,92 +21,60 @@ import java.util.stream.Stream */ class ElasticsearchFixturePlugin implements Plugin { - static class ElasticsearchCluster { - - Project project - ClusterConfiguration configuration - List tasks = [] - - ElasticsearchCluster(Project project) { - this.project = project - this.configuration = new ClusterConfiguration(project) - } - - void clusterConf(Closure configClosure) { - ConfigureUtil.configure(configClosure, configuration) - } - - void addTask(Task task) { - tasks.add(task) - } - } - @Override void apply(Project project) { - + project.pluginManager.apply(TestClustersPlugin) def version = project.hasProperty("es.version") ? project.getProperty("es.version") : project.elasticsearchVersion // Optionally allow user to disable the fixture def hasLocalRepo = project.hasProperty("localRepo") - def useFixture = !hasLocalRepo && Boolean.parseBoolean(project.hasProperty("tests.fixture.es.enable") ? project.getProperty("tests.fixture.es.enable") : "true") + def useFixture = !hasLocalRepo && Boolean.parseBoolean(project.findProperty("tests.fixture.es.enable") ?: "true") + def integrationTestTask = project.tasks.getByName("integrationTest") as RestTestRunnerTask if (useFixture) { // Depends on project already containing an "integrationTest" // task, as well as javaHome+runtimeJavaHome configured - createClusterFor(project.tasks.getByName("integrationTest"), project, version) + createClusterFor(integrationTestTask, project, version) } else { - project.tasks.getByName("integrationTest") { - systemProperty "test.disable.local.es", "true" - } + integrationTestTask.systemProperty("test.disable.local.es", "true") } } - private static def createClusterFor(Task integrationTest, Project project, String version) { - // Version settings - def majorVersion = version.tokenize(".").get(0).toInteger() - - // Init task can be used to prepare cluster - Task clusterInit = project.tasks.create(name: "esCluster#init", dependsOn: project.testClasses) - integrationTest.dependsOn(clusterInit) - - ElasticsearchCluster cluster = project.extensions.create("esCluster", ElasticsearchCluster.class, project) - cluster.tasks.add(integrationTest) - ClusterConfiguration clusterConfig = cluster.configuration + private static void createClusterFor(RestTestRunnerTask integrationTest, Project project, String version) { + def clustersContainer = project.extensions.getByName(TestClustersPlugin.EXTENSION_NAME) as NamedDomainObjectContainer + def integTestCluster = clustersContainer.create("integTest") { ElasticsearchCluster cluster -> + cluster.version = version + cluster.testDistribution = TestDistribution.DEFAULT + } - // default settings: - clusterConfig.clusterName = "elasticsearch-fixture" - clusterConfig.numNodes = 1 - clusterConfig.httpPort = 9500 - clusterConfig.transportPort = 9600 - clusterConfig.distribution = "default" // Full Distribution + integrationTest.useCluster(integTestCluster) + // Add the cluster HTTP URI as a system property which isn't tracked as a task input + integrationTest.jvmArgumentProviders.add({ ["-Dtests.rest.cluster=${integTestCluster.httpSocketURI}"] } as CommandLineArgumentProvider) - // Set BWC if not current ES version: - if (version != project.elasticsearchVersion) { - clusterConfig.bwcVersion = Version.fromString(version) - clusterConfig.numBwcNodes = 1 - } + // Version settings + def majorVersion = version.tokenize(".").get(0).toInteger() // Version specific configurations if (majorVersion <= 2) { - clusterConfig.setting("transport.type","local") - clusterConfig.setting("http.type","netty3") - clusterConfig.setting("script.inline", "true") - clusterConfig.setting("script.indexed", "true") + integTestCluster.setting("transport.type","local") + integTestCluster.setting("http.type","netty3") + integTestCluster.setting("script.inline", "true") + integTestCluster.setting("script.indexed", "true") } else if (majorVersion == 5) { - clusterConfig.setting("transport.type","netty4") - clusterConfig.setting("http.type","netty4") - clusterConfig.setting("script.inline", "true") - clusterConfig.setting("node.ingest", "true") - clusterConfig.setting("script.max_compilations_rate", null) + integTestCluster.setting("transport.type","netty4") + integTestCluster.setting("http.type","netty4") + integTestCluster.setting("script.inline", "true") + integTestCluster.setting("node.ingest", "true") + integTestCluster.setting("script.max_compilations_rate", null) } else if (majorVersion == 6) { - clusterConfig.setting("node.ingest", "true") - clusterConfig.setting("http.host", "localhost") - clusterConfig.systemProperty('es.http.cname_in_publish_address', 'true') + integTestCluster.setting("node.ingest", "true") + integTestCluster.setting("http.host", "localhost") + integTestCluster.systemProperty('es.http.cname_in_publish_address', 'true') } else if (majorVersion >= 7) { - clusterConfig.setting("node.ingest", "true") - clusterConfig.setting("http.host", "localhost") + integTestCluster.setting("node.ingest", "true") + integTestCluster.setting("http.host", "localhost") // TODO: Remove this when this is the default in 7 - clusterConfig.systemProperty('es.http.cname_in_publish_address', 'true') + integTestCluster.systemProperty('es.http.cname_in_publish_address', 'true') } // Also write a script to a file for use in tests @@ -128,79 +89,7 @@ class ElasticsearchFixturePlugin implements Plugin { script = new File(scriptsDir, "increment.painless").setText("ctx._source.counter = ctx._source.getOrDefault('counter', 0) + 1", 'UTF-8') } if (script != null) { - clusterConfig.extraConfigFile("script", script) + integTestCluster.extraConfigFile("script", script) } - - project.gradle.projectsEvaluated { - Task clusterMain = new PlaceholderTask() - List nodes = ClusterFormationTasks.setup(project, "esCluster", clusterMain, clusterConfig) - project.tasks.getByPath("esCluster#wait").doLast { - integrationTest.systemProperty('tests.rest.cluster', "${nodes.collect{it.httpUri()}.join(",")}") - } - - // dump errors and warnings from cluster log on failure - TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() { - @Override - void afterExecute(Task task, TaskState state) { - if (state.failure != null) { - for (NodeInfo nodeInfo : nodes) { - printLogExcerpt(nodeInfo) - } - } - } - } - for (Task clusterTask : cluster.tasks) { - for (Object dependency : clusterMain.taskDeps) { - clusterTask.dependsOn(dependency) - } - for (Object finalizer : clusterMain.taskFinalizers) { - clusterTask.finalizedBy(finalizer) - } - clusterTask.doFirst { - project.gradle.addListener(logDumpListener) - } - clusterTask.doLast { - project.gradle.removeListener(logDumpListener) - } - } - } - } - - /** Print out an excerpt of the log from the given node. */ - protected static void printLogExcerpt(NodeInfo nodeInfo) { - File logFile = new File(nodeInfo.homeDir, "logs/${nodeInfo.clusterName}.log") - println("\nCluster ${nodeInfo.clusterName} - node ${nodeInfo.nodeNum} log excerpt:") - println("(full log at ${logFile})") - println('-----------------------------------------') - Stream stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8) - try { - boolean inStartup = true - boolean inExcerpt = false - int linesSkipped = 0 - for (String line : stream) { - if (line.startsWith("[")) { - inExcerpt = false // clear with the next log message - } - if (line =~ /(\[WARN *\])|(\[ERROR *\])/) { - inExcerpt = true // show warnings and errors - } - if (inStartup || inExcerpt) { - if (linesSkipped != 0) { - println("... SKIPPED ${linesSkipped} LINES ...") - } - println(line) - linesSkipped = 0 - } else { - ++linesSkipped - } - if (line =~ /recovered \[\d+\] indices into cluster_state/) { - inStartup = false - } - } - } finally { - stream.close() - } - println('=========================================') - } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/hadoop/InstanceInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/hadoop/InstanceInfo.groovy index e3befc345..a5e4df891 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/hadoop/InstanceInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/fixture/hadoop/InstanceInfo.groovy @@ -236,7 +236,7 @@ class InstanceInfo { /** Return the java home used by this node. */ String getJavaHome() { - return javaVersion == null ? project.runtimeJavaHome : project.javaVersions.get(javaVersion) + return javaVersion == null ? project.runtimeJavaHome : project.javaVersions.find { it.version == javaVersion }.javaHome.absolutePath } /** Returns debug string for the command that started this node. */ diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/util/PlaceholderTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/util/PlaceholderTask.groovy deleted file mode 100644 index 54d52f0a6..000000000 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/util/PlaceholderTask.groovy +++ /dev/null @@ -1,313 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.hadoop.gradle.util - -import org.gradle.api.Action -import org.gradle.api.AntBuilder -import org.gradle.api.Project -import org.gradle.api.Task -import org.gradle.api.logging.Logger -import org.gradle.api.logging.LoggingManager -import org.gradle.api.plugins.Convention -import org.gradle.api.plugins.ExtensionContainer -import org.gradle.api.provider.Property -import org.gradle.api.specs.Spec -import org.gradle.api.tasks.TaskDependency -import org.gradle.api.tasks.TaskDestroyables -import org.gradle.api.tasks.TaskInputs -import org.gradle.api.tasks.TaskLocalState -import org.gradle.api.tasks.TaskOutputs -import org.gradle.api.tasks.TaskState - -import java.time.Duration - -/** - * A placeholder task only useful for accumulating the list of dependencies and finalize tasks for a - * task that is passed around to third party code. - */ -class PlaceholderTask implements Task { - - Set taskDeps = [] - Set taskFinalizers = [] - - @Override - String getName() { - return "placeholder" - } - - @Override - Set getDependsOn() { - return taskDeps - } - - @Override - void setDependsOn(Iterable dependsOnTasks) { - taskDeps = [] - taskDeps.addAll(dependsOnTasks) - } - - @Override - Task dependsOn(Object... paths) { - taskDeps.addAll(paths) - return this - } - - @Override - Task finalizedBy(Object... paths) { - taskFinalizers.addAll(paths) - return this - } - - @Override - void setFinalizedBy(Iterable finalizedBy) { - taskFinalizers = [] - taskFinalizers.addAll(finalizedBy) - } - - @Override - Project getProject() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - List> getActions() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setActions(List> actions) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskDependency getTaskDependencies() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void onlyIf(Closure onlyIfClosure) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void onlyIf(Spec onlyIfSpec) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setOnlyIf(Closure onlyIfClosure) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setOnlyIf(Spec onlyIfSpec) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskState getState() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setDidWork(boolean didWork) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - boolean getDidWork() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - String getPath() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task doFirst(Action action) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task doFirst(Closure action) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task doFirst(String actionName, Action action) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task doLast(Action action) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task doLast(String actionName, Action action) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task doLast(Closure action) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - boolean getEnabled() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setEnabled(boolean enabled) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task configure(Closure configureClosure) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - AntBuilder getAnt() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Logger getLogger() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - LoggingManager getLogging() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Object property(String propertyName) throws MissingPropertyException { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - boolean hasProperty(String propertyName) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Convention getConvention() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - String getDescription() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setDescription(String description) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - String getGroup() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setGroup(String group) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskInputs getInputs() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskOutputs getOutputs() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskDestroyables getDestroyables() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskLocalState getLocalState() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - File getTemporaryDir() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Task mustRunAfter(Object... paths) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setMustRunAfter(Iterable mustRunAfter) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskDependency getMustRunAfter() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskDependency getFinalizedBy() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskDependency shouldRunAfter(Object... paths) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - void setShouldRunAfter(Iterable shouldRunAfter) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - TaskDependency getShouldRunAfter() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - int compareTo(Task o) { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - ExtensionContainer getExtensions() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } - - @Override - Property getTimeout() { - throw new UnsupportedOperationException("Placeholder task cannot perform operations") - } -} diff --git a/qa/kerberos/build.gradle b/qa/kerberos/build.gradle index bf3be3399..87ecff402 100644 --- a/qa/kerberos/build.gradle +++ b/qa/kerberos/build.gradle @@ -19,6 +19,7 @@ import org.elasticsearch.gradle.test.AntFixture +import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask import org.elasticsearch.hadoop.gradle.fixture.hadoop.ServiceDescriptor import org.elasticsearch.hadoop.gradle.fixture.hadoop.conf.HadoopClusterConfiguration import org.elasticsearch.hadoop.gradle.fixture.hadoop.HadoopClusterFormationTasks @@ -53,11 +54,9 @@ dependencies { compile 'org.scala-lang:scala-library:2.11.8' compile project(":elasticsearch-spark-20") - if (!localRepo) { - // These try to pull in the elasticsearch jar, but when running with a local repo it is not available - compile project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath - compile project(":elasticsearch-storm").sourceSets.itest.runtimeClasspath - } + // These try to pull in the elasticsearch jar, but when running with a local repo it is not available + compile project(":elasticsearch-hadoop-mr").sourceSets.itest.runtimeClasspath + compile project(":elasticsearch-storm").sourceSets.itest.runtimeClasspath kdcFixture project(':test:fixtures:minikdc') @@ -144,7 +143,7 @@ if (disableTests) { // Configure MiniKDC AntFixture kdcFixture = project.tasks.create('kdcFixture', AntFixture) { dependsOn project.configurations.kdcFixture - executable = new File(project.runtimeJavaHome, 'bin/java') + executable = new File(project.compilerJavaHome, 'bin/java') env 'CLASSPATH', "${ -> project.configurations.kdcFixture.asPath }" waitCondition = { fixture, ant -> // the kdc wrapper writes the ports file when @@ -178,55 +177,38 @@ if (disableTests) { // ============================================================================= // Configure ES with Kerberos Auth - esCluster { - clusterConf { - dependsOn(kdcFixture) - - // This may be needed if we ever run against java 9: - // --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED - - // Set kerberos conf on JVM - systemProperty("java.security.krb5.conf", krb5Conf.toString()) - //systemProperty("sun.security.krb5.debug", "true") - - // force localhost IPv4 otherwise it is a chicken and egg problem where we need the keytab for the - // hostname when starting the cluster but do not know the exact address that is first in the http - // ports file - setting 'http.host', '127.0.0.1' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.ml.enabled', 'false' - // Enable Security - setting 'xpack.security.enabled', 'true' - setting 'xpack.security.audit.enabled', 'true' - // Configure File Realm - setting 'xpack.security.authc.realms.file.myfile.order', '0' - // Configure Native Realm - setting 'xpack.security.authc.realms.native.mynative.order', '1' - // Configure Kerberos Realm - setting 'xpack.security.authc.realms.kerberos.krb5.order', '2' - setting 'xpack.security.authc.realms.kerberos.krb5.keytab.path', 'es.keytab' - setting 'xpack.security.authc.realms.kerberos.krb5.krb.debug', 'true' - setting 'xpack.security.authc.realms.kerberos.krb5.remove_realm_name', 'false' - // Configure API Key Realm - setting 'xpack.security.authc.api_key.enabled', 'true' - - setupCommand 'setupTestAdmin', - 'bin/elasticsearch-users', 'useradd', "test_admin", '-p', 'x-pack-test-password', '-r', 'superuser' - - extraConfigFile('es.keytab', esKeytab.toAbsolutePath()) - - // Override the wait condition; Do the same wait logic, but pass in the test credentials for the API - waitCondition = { node, ant -> - File tmpFile = new File(node.cwd, 'wait.success') - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", - dest: tmpFile.toString(), - username: 'test_admin', - password: 'x-pack-test-password', - ignoreerrors: true, - retries: 10) - return tmpFile.exists() - } - } + testClusters.integTest { + // This may be needed if we ever run against java 9: + // --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED + + // Set kerberos conf on JVM + systemProperty("java.security.krb5.conf", krb5Conf.toString()) + //systemProperty("sun.security.krb5.debug", "true") + + // force localhost IPv4 otherwise it is a chicken and egg problem where we need the keytab for the + // hostname when starting the cluster but do not know the exact address that is first in the http + // ports file + setting 'http.host', '127.0.0.1' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.ml.enabled', 'false' + // Enable Security + setting 'xpack.security.enabled', 'true' + setting 'xpack.security.audit.enabled', 'true' + // Configure File Realm + setting 'xpack.security.authc.realms.file.myfile.order', '0' + // Configure Native Realm + setting 'xpack.security.authc.realms.native.mynative.order', '1' + // Configure Kerberos Realm + setting 'xpack.security.authc.realms.kerberos.krb5.order', '2' + setting 'xpack.security.authc.realms.kerberos.krb5.keytab.path', 'es.keytab' + setting 'xpack.security.authc.realms.kerberos.krb5.krb.debug', 'true' + setting 'xpack.security.authc.realms.kerberos.krb5.remove_realm_name', 'false' + // Configure API Key Realm + setting 'xpack.security.authc.api_key.enabled', 'true' + + user username: 'test_admin', password: 'x-pack-test-password', role: 'superuser' + + extraConfigFile('es.keytab', esKeytab.toFile()) } // Configure Integration Test Task @@ -248,17 +230,21 @@ if (disableTests) { File resourceDir = project.sourceSets.main.resources.getSrcDirs().head() File mrItestResourceDir = project(":elasticsearch-hadoop-mr").sourceSets.itest.resources.getSrcDirs().head() - JavaExec setupUsers = project.tasks.create("setupUsers", JavaExec.class) { - main = 'org.elasticsearch.hadoop.qa.kerberos.setup.SetupKerberosUsers' - classpath = sourceSets.main.runtimeClasspath - systemProperty('es.nodes', 'localhost:9500') - systemProperty('es.net.http.auth.user', 'test_admin') - systemProperty('es.net.http.auth.pass', 'x-pack-test-password') - systemProperty('principals', "$clientPrincipal$realm") - systemProperty('users', "client") - systemProperty('proxiers', "$hivePrincipalName$realm") + Task setupUsers = project.tasks.create("setupUsers", DefaultTestClustersTask) { + useCluster(testClusters.integTest) + doLast { + project.javaexec { + main = 'org.elasticsearch.hadoop.qa.kerberos.setup.SetupKerberosUsers' + classpath = sourceSets.main.runtimeClasspath + systemProperty('es.nodes', 'localhost:9500') + systemProperty('es.net.http.auth.user', 'test_admin') + systemProperty('es.net.http.auth.pass', 'x-pack-test-password') + systemProperty('principals', "$clientPrincipal$realm") + systemProperty('users', "client") + systemProperty('proxiers', "$hivePrincipalName$realm") + } + } } - esCluster.addTask(setupUsers) integrationTest.dependsOn(setupUsers) // ============================================================================= @@ -361,17 +347,7 @@ if (disableTests) { } config.addDependency(jar) config.addDependency(testingJar) - - // Create a task to use to leave the cluster online until a user provides input to tear it down. - Task testCluster = project.tasks.create("testCluster") - testCluster.doLast { - project.logger.lifecycle("TEST FINISHED") - System.console().readLine("Tear Down? ") - project.logger.lifecycle("STOPPING CLUSTER") - } - esCluster.addTask(testCluster) - config.addClusterTask(testCluster) - + // We need to create a tmp directory in hadoop before history server does, because history server will set permissions // wrong. HadoopMRJob createTmp = config.service('hadoop').role('datanode').createClusterTask('createTmp', HadoopMRJob.class) { @@ -465,7 +441,7 @@ if (disableTests) { ]) args = ['/data/artists/artists.dat'] } - esCluster.addTask(mrLoadData) + integrationTest.dependsOn(mrLoadData) // Run the MR job to read data out of ES. Ensure Kerberos settings are available. HadoopMRJob mrReadData = config.createClusterTask('mrReadData', HadoopMRJob.class) { @@ -490,7 +466,7 @@ if (disableTests) { ]) args = ['/data/output/mr'] } - esCluster.addTask(mrReadData) + integrationTest.dependsOn(mrReadData) // ============================================================================= // Spark Jobs @@ -521,7 +497,7 @@ if (disableTests) { "-Dtest.krb5.keytab=${clientKeytab.toString()}") args = ['/data/artists/artists.dat'] } - esCluster.addTask(sparkLoadData) + integrationTest.dependsOn(sparkLoadData) // Run the Spark job to load data to ES. Ensure Kerberos settings are available. SparkApp sparkReadData = config.createClusterTask('sparkReadData', SparkApp.class) { @@ -547,7 +523,7 @@ if (disableTests) { "-Dtest.krb5.keytab=${clientKeytab.toString()}") args = ['/data/output/spark'] } - esCluster.addTask(sparkReadData) + integrationTest.dependsOn(sparkReadData) // ============================================================================= // Hive Jobs @@ -581,7 +557,7 @@ if (disableTests) { 'TEST_LIB': jar.archivePath.toString() ]) } - esCluster.addTask(hiveLoadData) + integrationTest.dependsOn(hiveLoadData) HiveBeeline hiveReadData = config.createClusterTask('hiveReadData', HiveBeeline.class) { clusterConfiguration = config @@ -597,7 +573,7 @@ if (disableTests) { 'TEST_LIB': jar.archivePath.toString() ]) } - esCluster.addTask(hiveReadData) + integrationTest.dependsOn(hiveReadData) // ============================================================================= // Pig Jobs @@ -612,7 +588,7 @@ if (disableTests) { 'PIG_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()}" ] } - esCluster.addTask(pigLoadData) + integrationTest.dependsOn(pigLoadData) PigScript pigReadData = config.createClusterTask('pigReadData', PigScript.class) { clusterConfiguration = config @@ -623,7 +599,7 @@ if (disableTests) { 'PIG_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()}" ] } - esCluster.addTask(pigReadData) + integrationTest.dependsOn(pigReadData) // ============================================================================= // Copy job outputs @@ -663,9 +639,6 @@ if (disableTests) { environmentVariables.put('HADOOP_ROOT_LOGGER','TRACE,console') args = ['-copyToLocal', "/data/output/$integrationName", outputDataDir] } - // When using testCluster, it should depend on the copy - // output tasks to ensure all jobs have run. - testCluster.dependsOn(copyOutputTask) // Integration test needs to depend on copy output tasks // to ensure all integrations have their output files on // disk