diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy index f512f8e40..d12018e6d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy @@ -38,7 +38,6 @@ import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.JavaCompile import org.gradle.api.tasks.javadoc.Javadoc import org.gradle.api.tasks.testing.Test -import org.gradle.api.tasks.testing.TestReport import org.gradle.external.javadoc.JavadocOutputLevel import org.gradle.external.javadoc.MinimalJavadocOptions import org.gradle.internal.jvm.Jvm @@ -66,7 +65,6 @@ class BuildPlugin implements Plugin { configureEclipse(project) configureMaven(project) configureIntegrationTestTask(project) - configureTestReports(project) configurePrecommit(project) configureDependenciesInfo(project) } @@ -321,12 +319,6 @@ class BuildPlugin implements Plugin { testImplementation("org.locationtech.spatial4j:spatial4j:0.6") testImplementation("com.vividsolutions:jts:1.13") - // TODO: Remove when we merge ITests to test dirs - itestCompile("org.apache.hadoop:hadoop-minikdc:${project.ext.minikdcVersion}") { - // For some reason, the dependencies that are pulled in with MiniKDC have multiple resource files - // that cause issues when they are loaded. We exclude the ldap schema data jar to get around this. - exclude group: "org.apache.directory.api", module: "api-ldap-schema-data" - } itestImplementation(project.sourceSets.main.output) itestImplementation(project.configurations.testImplementation) itestImplementation(project.configurations.provided) @@ -593,49 +585,45 @@ class BuildPlugin implements Plugin { * @param project to be configured */ private static void configureIntegrationTestTask(Project project) { - Jar hadoopTestingJar = project.rootProject.tasks.findByName('hadoopTestingJar') as Jar - if (hadoopTestingJar == null) { - // jar used for testing Hadoop remotely (es-hadoop + tests) - hadoopTestingJar = project.rootProject.tasks.create('hadoopTestingJar', Jar) - hadoopTestingJar.dependsOn(project.rootProject.tasks.getByName('jar')) - hadoopTestingJar.classifier = 'testing' - project.logger.info("Created Remote Testing Jar") - } + if (project != project.rootProject) { + TaskProvider itestJar = project.tasks.register('itestJar', Jar) { Jar itestJar -> + itestJar.dependsOn(project.tasks.getByName('jar')) + itestJar.getArchiveClassifier().set('testing') + + // Add this project's classes to the testing uber-jar + itestJar.from(project.sourceSets.main.output) + itestJar.from(project.sourceSets.test.output) + itestJar.from(project.sourceSets.itest.output) + } - // Add this project's classes to the testing uber-jar - hadoopTestingJar.from(project.sourceSets.test.output) - hadoopTestingJar.from(project.sourceSets.main.output) - hadoopTestingJar.from(project.sourceSets.itest.output) - - Test integrationTest = project.tasks.create('integrationTest', RestTestRunnerTask.class) - integrationTest.dependsOn(hadoopTestingJar) - - integrationTest.testClassesDirs = project.sourceSets.itest.output.classesDirs - integrationTest.classpath = project.sourceSets.itest.runtimeClasspath - integrationTest.excludes = ["**/Abstract*.class"] - - integrationTest.ignoreFailures = false - - integrationTest.executable = "${project.ext.get('runtimeJavaHome')}/bin/java" - integrationTest.minHeapSize = "256m" - integrationTest.maxHeapSize = "2g" - - integrationTest.testLogging { - displayGranularity 0 - events "started", "failed" //, "standardOut", "standardError" - exceptionFormat "full" - showCauses true - showExceptions true - showStackTraces true - stackTraceFilters "groovy" - minGranularity 2 - maxGranularity 2 - } + Test integrationTest = project.tasks.create('integrationTest', RestTestRunnerTask.class) + integrationTest.dependsOn(itestJar) + + integrationTest.testClassesDirs = project.sourceSets.itest.output.classesDirs + integrationTest.classpath = project.sourceSets.itest.runtimeClasspath + integrationTest.excludes = ["**/Abstract*.class"] + + integrationTest.ignoreFailures = false + + integrationTest.executable = "${project.ext.get('runtimeJavaHome')}/bin/java" + integrationTest.minHeapSize = "256m" + integrationTest.maxHeapSize = "2g" + + integrationTest.testLogging { + displayGranularity 0 + events "started", "failed" //, "standardOut", "standardError" + exceptionFormat "full" + showCauses true + showExceptions true + showStackTraces true + stackTraceFilters "groovy" + minGranularity 2 + maxGranularity 2 + } - integrationTest.reports.html.enabled = false + integrationTest.reports.html.enabled = false - // Only add cluster settings if it's not the root project - if (project != project.rootProject) { + // Only add cluster settings if it's not the root project project.logger.info "Configuring ${project.name} integrationTest task to use ES Fixture" // Create the cluster fixture around the integration test. // There's probably a more elegant way to do this in Gradle @@ -643,22 +631,6 @@ class BuildPlugin implements Plugin { } } - /** - * Configure the root testReport task with the test tasks in this project to report on, creating the report task - * on root if it is not created yet. - * @param project to configure - */ - private static void configureTestReports(Project project) { - TestReport testReport = project.rootProject.getTasks().findByName('testReport') as TestReport - if (testReport == null) { - // Create the task on root if it is not created yet. - testReport = project.rootProject.getTasks().create('testReport', TestReport.class) - testReport.setDestinationDir(project.rootProject.file("${project.rootProject.getBuildDir()}/reports/allTests")) - } - testReport.reportOn(project.getTasks().getByName('test')) - testReport.reportOn(project.getTasks().getByName('integrationTest')) - } - /** * @param project that belongs to a git repo * @return the file containing the hash for the current branch diff --git a/hive/build.gradle b/hive/build.gradle index 2d6f43ebd..2c8544273 100644 --- a/hive/build.gradle +++ b/hive/build.gradle @@ -21,6 +21,14 @@ jar { } } +itestJar { + from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) { + include "org/elasticsearch/hadoop/**" + include "esh-build.properties" + include "META-INF/services/*" + } +} + javadoc { source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath) diff --git a/mr/build.gradle b/mr/build.gradle index 1efc459a6..49b88e243 100644 --- a/mr/build.gradle +++ b/mr/build.gradle @@ -20,6 +20,11 @@ dependencies { testImplementation(project.ext.hadoopClient) testImplementation("io.netty:netty-all:4.0.29.Final") testImplementation("org.elasticsearch:securemock:1.2") + itestImplementation("org.apache.hadoop:hadoop-minikdc:${project.ext.minikdcVersion}") { + // For some reason, the dependencies that are pulled in with MiniKDC have multiple resource files + // that cause issues when they are loaded. We exclude the ldap schema data jar to get around this. + exclude group: "org.apache.directory.api", module: "api-ldap-schema-data" + } } String generatedResources = "$buildDir/generated-resources/main" diff --git a/mr/src/itest/java/org/elasticsearch/hadoop/Provisioner.java b/mr/src/itest/java/org/elasticsearch/hadoop/Provisioner.java index 6b8506bc9..c05862de7 100644 --- a/mr/src/itest/java/org/elasticsearch/hadoop/Provisioner.java +++ b/mr/src/itest/java/org/elasticsearch/hadoop/Provisioner.java @@ -42,7 +42,7 @@ public abstract class Provisioner { // init ES-Hadoop JAR // expect the jar under build\libs try { - File folder = new File(".." + File.separator + "build" + File.separator + "libs" + File.separator).getCanonicalFile(); + File folder = new File("build" + File.separator + "libs" + File.separator).getCanonicalFile(); // find proper jar File[] files = folder.listFiles(new FileFilter() { diff --git a/pig/build.gradle b/pig/build.gradle index 53cdfb8bc..608606288 100644 --- a/pig/build.gradle +++ b/pig/build.gradle @@ -21,6 +21,14 @@ jar { } } +itestJar { + from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) { + include "org/elasticsearch/hadoop/**" + include "esh-build.properties" + include "META-INF/services/*" + } +} + javadoc { source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath) diff --git a/qa/kerberos/build.gradle b/qa/kerberos/build.gradle index b18c2d74a..55e2bba32 100644 --- a/qa/kerberos/build.gradle +++ b/qa/kerberos/build.gradle @@ -98,8 +98,8 @@ if (disableTests) { // Build uber storm jar for testing Storm remotely (es-hadoop + es-storm + qa tests) Jar qaKerberosStormJar = project.tasks.create('kerberosStormJar', Jar) - qaKerberosStormJar.dependsOn(project.rootProject.tasks.getByName('jar')) - qaKerberosStormJar.dependsOn(project.rootProject.tasks.getByName('hadoopTestingJar')) + qaKerberosStormJar.dependsOn(project(':elasticsearch-storm').tasks.getByName('jar')) + qaKerberosStormJar.dependsOn(project(':elasticsearch-storm').tasks.getByName('itestJar')) qaKerberosStormJar.classifier = 'storm-testing' // Add projects to the storm testing uber-jar @@ -233,8 +233,14 @@ if (disableTests) { // Fixtures will be depending on the jar and test jar artifacts def jar = project.tasks.getByName('jar') as org.gradle.jvm.tasks.Jar - def testingJar = project.rootProject.tasks.findByName('hadoopTestingJar') as Jar - + def kerberosItestJar = project.tasks.findByName('itestJar') as Jar + def mrJar = project(':elasticsearch-hadoop-mr').tasks.getByName('jar') as Jar + def mrItestJar = project(':elasticsearch-hadoop-mr').tasks.getByName('itestJar') as Jar + def hiveItestJar = project(':elasticsearch-hadoop-hive').tasks.getByName('itestJar') as Jar + def pigItestJar = project(':elasticsearch-hadoop-pig').tasks.getByName('itestJar') as Jar + def sparkItestJar = project(':elasticsearch-spark-20').tasks.getByName('itestJar') as Jar + def stormItestJar = project(':elasticsearch-storm').tasks.getByName('itestJar') as Jar + // Need these for SSL items, test data, and scripts File resourceDir = project.sourceSets.main.resources.getSrcDirs().head() File mrItestResourceDir = project(":elasticsearch-hadoop-mr").sourceSets.itest.resources.getSrcDirs().head() @@ -330,7 +336,7 @@ if (disableTests) { // Add the ES-Hadoop jar to the resource manager classpath so that it can load the token renewer implementation // for ES tokens. Otherwise, tokens may not be cancelled at the end of the job. s.role('resourcemanager') { RoleConfiguration r -> - r.addEnvironmentVariable('YARN_USER_CLASSPATH', testingJar.archivePath.toString()) + r.addEnvironmentVariable('YARN_USER_CLASSPATH', mrJar.archivePath.toString()) r.settingsFile('yarn-site.xml') { SettingsContainer.FileSettings f -> // Add settings specifically for ES Node to allow for cancelling the tokens f.addSetting('es.nodes', esAddress) @@ -359,7 +365,13 @@ if (disableTests) { s.addSetting('es.nodes', esAddress) } config.addDependency(jar) - config.addDependency(testingJar) + config.addDependency(kerberosItestJar) + config.addDependency(mrJar) + config.addDependency(mrItestJar) + config.addDependency(hiveItestJar) + config.addDependency(pigItestJar) + config.addDependency(sparkItestJar) + config.addDependency(stormItestJar) // We need to create a tmp directory in hadoop before history server does, because history server will set permissions // wrong. @@ -435,7 +447,7 @@ if (disableTests) { useCluster(testClusters.integTest) dependsOn(copyData, setupUsers) jobJar = jar.archivePath - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, mrItestJar.archivePath) jobClass = 'org.elasticsearch.hadoop.qa.kerberos.mr.LoadToES' jobSettings([ 'es.resource': 'qa_kerberos_mr_data', @@ -462,7 +474,7 @@ if (disableTests) { useCluster(testClusters.integTest) dependsOn(mrLoadData) jobJar = jar.archivePath - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, mrItestJar.archivePath) jobClass = 'org.elasticsearch.hadoop.qa.kerberos.mr.ReadFromES' jobSettings([ 'es.resource': 'qa_kerberos_mr_data', @@ -495,7 +507,7 @@ if (disableTests) { // principal = clientPrincipal + realm // keytab = clientKeytab.toString() jobJar = jar.archivePath - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, sparkItestJar.archivePath) jobClass = 'org.elasticsearch.hadoop.qa.kerberos.spark.LoadToES' jobSettings([ 'spark.es.resource': 'qa_kerberos_spark_data', @@ -523,7 +535,7 @@ if (disableTests) { // principal = clientPrincipal + realm // keytab = clientKeytab.toString() jobJar = jar.archivePath - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, sparkItestJar.archivePath) jobClass = 'org.elasticsearch.hadoop.qa.kerberos.spark.ReadFromES' jobSettings([ 'spark.es.resource': 'qa_kerberos_spark_data', @@ -565,7 +577,7 @@ if (disableTests) { dependsOn(jar, setupUsers, copyData, patchBeeline) hivePrincipal = hivePrincipalName + realm script = new File(resourceDir, 'hive/load_to_es.sql') - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, hiveItestJar.archivePath) environmentVariables.putAll([ 'HADOOP_CLIENT_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()} " + @@ -582,7 +594,7 @@ if (disableTests) { dependsOn(hiveLoadData) hivePrincipal = hivePrincipalName + realm script = new File(resourceDir, 'hive/read_from_es.sql') - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, hiveItestJar.archivePath) environmentVariables.putAll([ 'HADOOP_CLIENT_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()} " + @@ -602,7 +614,7 @@ if (disableTests) { useCluster(testClusters.integTest) dependsOn(jar, setupUsers, copyData) script = new File(resourceDir, 'pig/load_to_es.pig') - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, pigItestJar.archivePath) environmentVariables.putAll([ 'PIG_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()}" ]) @@ -614,7 +626,7 @@ if (disableTests) { useCluster(testClusters.integTest) dependsOn(pigLoadData) script = new File(resourceDir, 'pig/read_from_es.pig') - libJars(testingJar.archivePath) + libJars(kerberosItestJar.archivePath, pigItestJar.archivePath) environmentVariables.putAll([ 'PIG_OPTS': "-Djava.security.krb5.conf=${krb5Conf.toString()}" ]) diff --git a/spark/core/itest/java/org/elasticsearch/spark/integration/SparkUtils.java b/spark/core/itest/java/org/elasticsearch/spark/integration/SparkUtils.java index 9698a3ad5..b079f0936 100644 --- a/spark/core/itest/java/org/elasticsearch/spark/integration/SparkUtils.java +++ b/spark/core/itest/java/org/elasticsearch/spark/integration/SparkUtils.java @@ -38,7 +38,7 @@ public abstract class SparkUtils { // init ES-Hadoop JAR // expect the jar under build\libs try { - File folder = new File(".." + File.separator + ".." + File.separator + "build" + File.separator + "libs" + File.separator).getCanonicalFile(); + File folder = new File("build" + File.separator + "libs" + File.separator).getCanonicalFile(); System.out.println(folder.getAbsolutePath()); // find proper jar File[] files = folder.listFiles(new FileFilter() { diff --git a/spark/sql-13/build.gradle b/spark/sql-13/build.gradle index e35a0e2c8..6d9a60384 100644 --- a/spark/sql-13/build.gradle +++ b/spark/sql-13/build.gradle @@ -147,6 +147,14 @@ jar { } } +itestJar { + from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) { + include "org/elasticsearch/hadoop/**" + include "esh-build.properties" + include "META-INF/services/*" + } +} + javadoc { if (project.ext.scalaMajorVersion != '2.10') { dependsOn compileScala diff --git a/spark/sql-20/build.gradle b/spark/sql-20/build.gradle index 2deed035c..a51afe733 100644 --- a/spark/sql-20/build.gradle +++ b/spark/sql-20/build.gradle @@ -161,6 +161,14 @@ jar { } } +itestJar { + from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) { + include "org/elasticsearch/hadoop/**" + include "esh-build.properties" + include "META-INF/services/*" + } +} + javadoc { if (project.ext.scalaMajorVersion != '2.10') { dependsOn compileScala diff --git a/storm/build.gradle b/storm/build.gradle index 5dd8e2004..a9d51ee8f 100644 --- a/storm/build.gradle +++ b/storm/build.gradle @@ -20,6 +20,14 @@ jar { } } +itestJar { + from(zipTree(project(":elasticsearch-hadoop-mr").jar.archivePath)) { + include "org/elasticsearch/hadoop/**" + include "esh-build.properties" + include "META-INF/services/*" + } +} + javadoc { source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)