diff --git a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy index 2b4f2ca40..3c8d43022 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/hadoop/gradle/BuildPlugin.groovy @@ -17,11 +17,15 @@ import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolutionStrategy import org.gradle.api.artifacts.maven.MavenPom import org.gradle.api.artifacts.maven.MavenResolver +import org.gradle.api.attributes.LibraryElements +import org.gradle.api.attributes.Usage import org.gradle.api.file.CopySpec +import org.gradle.api.file.FileCollection import org.gradle.api.java.archives.Manifest import org.gradle.api.plugins.JavaPlugin import org.gradle.api.plugins.MavenPlugin import org.gradle.api.plugins.MavenPluginConvention +import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.SourceSetContainer import org.gradle.api.tasks.TaskProvider import org.gradle.api.tasks.Upload @@ -84,6 +88,32 @@ class BuildPlugin implements Plugin { } private static void configureConfigurations(Project project) { + if (project != project.rootProject) { + // Set up avenues for sharing source files between projects in order to create embedded Javadocs + // Import source configuration + Configuration sources = project.configurations.create("additionalSources") + sources.canBeConsumed = false + sources.canBeResolved = true + sources.attributes { + // Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled + // into incremental compilation analysis. + attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'java-source')) + attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources')) + } + + // Export source configuration + Configuration sourceElements = project.configurations.create("sourceElements") + sourceElements.canBeConsumed = true + sourceElements.canBeResolved = false + sourceElements.extendsFrom(sources) + sourceElements.attributes { + // Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled + // into incremental compilation analysis. + attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'java-source')) + attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources')) + } + } + if (project.path.startsWith(":qa")) { return } @@ -200,6 +230,15 @@ class BuildPlugin implements Plugin { project.sourceCompatibility = '1.8' project.targetCompatibility = '1.8' + // TODO: Remove all root project distribution logic. It should exist in a separate dist project. + if (project != project.rootProject) { + SourceSet mainSourceSet = project.sourceSets.main + FileCollection javaSourceDirs = mainSourceSet.java.sourceDirectories + javaSourceDirs.each { File srcDir -> + project.getArtifacts().add('sourceElements', srcDir) + } + } + JavaCompile compileJava = project.tasks.getByName('compileJava') as JavaCompile compileJava.getOptions().setCompilerArgs(['-Xlint:unchecked', '-Xlint:options']) @@ -235,6 +274,10 @@ class BuildPlugin implements Plugin { sourcesJar.dependsOn(project.tasks.classes) sourcesJar.classifier = 'sources' sourcesJar.from(project.sourceSets.main.allSource) + // TODO: Remove when root project does not handle distribution + if (project != project.rootProject) { + sourcesJar.from(project.configurations.additionalSources) + } // Configure javadoc Javadoc javadoc = project.tasks.getByName('javadoc') as Javadoc @@ -246,6 +289,10 @@ class BuildPlugin implements Plugin { "org/elasticsearch/hadoop/util/**", "org/apache/hadoop/hive/**" ] + // TODO: Remove when root project does not handle distribution + if (project != project.rootProject) { + javadoc.source = project.files(project.configurations.additionalSources) + } // Set javadoc executable to runtime Java (1.8) javadoc.executable = new File(project.ext.runtimeJavaHome, 'bin/javadoc') diff --git a/hive/build.gradle b/hive/build.gradle index 0db0afc18..3ae60751a 100644 --- a/hive/build.gradle +++ b/hive/build.gradle @@ -33,6 +33,8 @@ dependencies { itestImplementation("org.apache.hive:hive-jdbc:$hiveVersion") { exclude module: "log4j-slf4j-impl" } + + additionalSources(project(":elasticsearch-hadoop-mr")) } jar { @@ -42,12 +44,3 @@ jar { include "META-INF/services/*" } } - -javadoc { - source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava - classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath) -} - -sourcesJar { - from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs -} diff --git a/pig/build.gradle b/pig/build.gradle index 45b3f7721..8126c3cf3 100644 --- a/pig/build.gradle +++ b/pig/build.gradle @@ -31,6 +31,8 @@ dependencies { itestImplementation(project(":test:shared")) itestImplementation("dk.brics.automaton:automaton:1.11-8") + + additionalSources(project(":elasticsearch-hadoop-mr")) } jar { @@ -40,12 +42,3 @@ jar { include "META-INF/services/*" } } - -javadoc { - source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava - classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath) -} - -sourcesJar { - from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs -} diff --git a/spark/sql-13/build.gradle b/spark/sql-13/build.gradle index 276abc9bc..8bdba4541 100644 --- a/spark/sql-13/build.gradle +++ b/spark/sql-13/build.gradle @@ -161,6 +161,15 @@ dependencies { testImplementation("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") { exclude group: 'org.apache.hadoop' } + + additionalSources(project(":elasticsearch-hadoop-mr")) +} + +// Export generated Java code from the genjavadoc compiler plugin +artifacts { + sourceElements(project.file("$buildDir/generated/java")) { + builtBy compileScala + } } jar { @@ -173,13 +182,7 @@ jar { javadoc { dependsOn compileScala - source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava source += "$buildDir/generated/java" - classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath) -} - -sourcesJar { - from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs } scaladoc { @@ -193,4 +196,4 @@ tasks.withType(ScalaCompile) { "-P:genjavadoc:out=$buildDir/generated/java".toString() ] } -} \ No newline at end of file +} diff --git a/spark/sql-20/build.gradle b/spark/sql-20/build.gradle index 7e2a79d75..5c2760604 100644 --- a/spark/sql-20/build.gradle +++ b/spark/sql-20/build.gradle @@ -156,6 +156,15 @@ dependencies { itestImplementation("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") { exclude group: 'org.apache.hadoop' } + + additionalSources(project(":elasticsearch-hadoop-mr")) +} + +// Export generated Java code from the genjavadoc compiler plugin +artifacts { + sourceElements(project.file("$buildDir/generated/java")) { + builtBy compileScala + } } jar { @@ -168,13 +177,7 @@ jar { javadoc { dependsOn compileScala - source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava source += "$buildDir/generated/java" - classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath) -} - -sourcesJar { - from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs } scaladoc { @@ -188,4 +191,4 @@ tasks.withType(ScalaCompile) { "-P:genjavadoc:out=$buildDir/generated/java".toString() ] } -} \ No newline at end of file +} diff --git a/storm/build.gradle b/storm/build.gradle index 8e5896ca4..d50e58705 100644 --- a/storm/build.gradle +++ b/storm/build.gradle @@ -27,6 +27,8 @@ dependencies { itestImplementation(project(":test:shared")) itestImplementation("com.google.guava:guava:16.0.1") itestImplementation("com.twitter:carbonite:1.4.0") + + additionalSources(project(":elasticsearch-hadoop-mr")) } jar { @@ -37,13 +39,4 @@ jar { } } -javadoc { - source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava - classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath) -} - -sourcesJar { - from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs -} - tasks.getByName('integrationTest').enabled = false