Skip to content

Commit

Permalink
Import sources for javadoc through a dependency (elastic#1461)
Browse files Browse the repository at this point in the history
Add configurations for importing and exporting source directories Add an
additionalSources configuration to add another project's source to the 
current project's javadoc. Add a sourceElements configuration to register 
all java source directories in the project under so they may be picked up
by other projects.
  • Loading branch information
jbaiera authored Apr 10, 2020
1 parent b838ff8 commit 259ed4d
Show file tree
Hide file tree
Showing 6 changed files with 73 additions and 41 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,15 @@ import org.gradle.api.artifacts.ProjectDependency
import org.gradle.api.artifacts.ResolutionStrategy
import org.gradle.api.artifacts.maven.MavenPom
import org.gradle.api.artifacts.maven.MavenResolver
import org.gradle.api.attributes.LibraryElements
import org.gradle.api.attributes.Usage
import org.gradle.api.file.CopySpec
import org.gradle.api.file.FileCollection
import org.gradle.api.java.archives.Manifest
import org.gradle.api.plugins.JavaPlugin
import org.gradle.api.plugins.MavenPlugin
import org.gradle.api.plugins.MavenPluginConvention
import org.gradle.api.tasks.SourceSet
import org.gradle.api.tasks.SourceSetContainer
import org.gradle.api.tasks.TaskProvider
import org.gradle.api.tasks.Upload
Expand Down Expand Up @@ -84,6 +88,32 @@ class BuildPlugin implements Plugin<Project> {
}

private static void configureConfigurations(Project project) {
if (project != project.rootProject) {
// Set up avenues for sharing source files between projects in order to create embedded Javadocs
// Import source configuration
Configuration sources = project.configurations.create("additionalSources")
sources.canBeConsumed = false
sources.canBeResolved = true
sources.attributes {
// Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled
// into incremental compilation analysis.
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'java-source'))
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
}

// Export source configuration
Configuration sourceElements = project.configurations.create("sourceElements")
sourceElements.canBeConsumed = true
sourceElements.canBeResolved = false
sourceElements.extendsFrom(sources)
sourceElements.attributes {
// Changing USAGE is required when working with Scala projects, otherwise the source dirs get pulled
// into incremental compilation analysis.
attribute(Usage.USAGE_ATTRIBUTE, project.objects.named(Usage, 'java-source'))
attribute(LibraryElements.LIBRARY_ELEMENTS_ATTRIBUTE, project.objects.named(LibraryElements, 'sources'))
}
}

if (project.path.startsWith(":qa")) {
return
}
Expand Down Expand Up @@ -200,6 +230,15 @@ class BuildPlugin implements Plugin<Project> {
project.sourceCompatibility = '1.8'
project.targetCompatibility = '1.8'

// TODO: Remove all root project distribution logic. It should exist in a separate dist project.
if (project != project.rootProject) {
SourceSet mainSourceSet = project.sourceSets.main
FileCollection javaSourceDirs = mainSourceSet.java.sourceDirectories
javaSourceDirs.each { File srcDir ->
project.getArtifacts().add('sourceElements', srcDir)
}
}

JavaCompile compileJava = project.tasks.getByName('compileJava') as JavaCompile
compileJava.getOptions().setCompilerArgs(['-Xlint:unchecked', '-Xlint:options'])

Expand Down Expand Up @@ -235,6 +274,10 @@ class BuildPlugin implements Plugin<Project> {
sourcesJar.dependsOn(project.tasks.classes)
sourcesJar.classifier = 'sources'
sourcesJar.from(project.sourceSets.main.allSource)
// TODO: Remove when root project does not handle distribution
if (project != project.rootProject) {
sourcesJar.from(project.configurations.additionalSources)
}

// Configure javadoc
Javadoc javadoc = project.tasks.getByName('javadoc') as Javadoc
Expand All @@ -246,6 +289,10 @@ class BuildPlugin implements Plugin<Project> {
"org/elasticsearch/hadoop/util/**",
"org/apache/hadoop/hive/**"
]
// TODO: Remove when root project does not handle distribution
if (project != project.rootProject) {
javadoc.source = project.files(project.configurations.additionalSources)
}
// Set javadoc executable to runtime Java (1.8)
javadoc.executable = new File(project.ext.runtimeJavaHome, 'bin/javadoc')

Expand Down
11 changes: 2 additions & 9 deletions hive/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ dependencies {
itestImplementation("org.apache.hive:hive-jdbc:$hiveVersion") {
exclude module: "log4j-slf4j-impl"
}

additionalSources(project(":elasticsearch-hadoop-mr"))
}

jar {
Expand All @@ -42,12 +44,3 @@ jar {
include "META-INF/services/*"
}
}

javadoc {
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)
}

sourcesJar {
from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs
}
11 changes: 2 additions & 9 deletions pig/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ dependencies {

itestImplementation(project(":test:shared"))
itestImplementation("dk.brics.automaton:automaton:1.11-8")

additionalSources(project(":elasticsearch-hadoop-mr"))
}

jar {
Expand All @@ -40,12 +42,3 @@ jar {
include "META-INF/services/*"
}
}

javadoc {
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)
}

sourcesJar {
from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs
}
17 changes: 10 additions & 7 deletions spark/sql-13/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,15 @@ dependencies {
testImplementation("org.apache.spark:spark-sql_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'org.apache.hadoop'
}

additionalSources(project(":elasticsearch-hadoop-mr"))
}

// Export generated Java code from the genjavadoc compiler plugin
artifacts {
sourceElements(project.file("$buildDir/generated/java")) {
builtBy compileScala
}
}

jar {
Expand All @@ -173,13 +182,7 @@ jar {

javadoc {
dependsOn compileScala
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
source += "$buildDir/generated/java"
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)
}

sourcesJar {
from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs
}

scaladoc {
Expand All @@ -193,4 +196,4 @@ tasks.withType(ScalaCompile) {
"-P:genjavadoc:out=$buildDir/generated/java".toString()
]
}
}
}
17 changes: 10 additions & 7 deletions spark/sql-20/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,15 @@ dependencies {
itestImplementation("org.apache.spark:spark-streaming_${project.ext.scalaMajorVersion}:$sparkVersion") {
exclude group: 'org.apache.hadoop'
}

additionalSources(project(":elasticsearch-hadoop-mr"))
}

// Export generated Java code from the genjavadoc compiler plugin
artifacts {
sourceElements(project.file("$buildDir/generated/java")) {
builtBy compileScala
}
}

jar {
Expand All @@ -168,13 +177,7 @@ jar {

javadoc {
dependsOn compileScala
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
source += "$buildDir/generated/java"
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)
}

sourcesJar {
from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs
}

scaladoc {
Expand All @@ -188,4 +191,4 @@ tasks.withType(ScalaCompile) {
"-P:genjavadoc:out=$buildDir/generated/java".toString()
]
}
}
}
11 changes: 2 additions & 9 deletions storm/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ dependencies {
itestImplementation(project(":test:shared"))
itestImplementation("com.google.guava:guava:16.0.1")
itestImplementation("com.twitter:carbonite:1.4.0")

additionalSources(project(":elasticsearch-hadoop-mr"))
}

jar {
Expand All @@ -37,13 +39,4 @@ jar {
}
}

javadoc {
source += project(":elasticsearch-hadoop-mr").sourceSets.main.allJava
classpath += files(project(":elasticsearch-hadoop-mr").sourceSets.main.compileClasspath)
}

sourcesJar {
from project(":elasticsearch-hadoop-mr").sourceSets.main.allJava.srcDirs
}

tasks.getByName('integrationTest').enabled = false

0 comments on commit 259ed4d

Please sign in to comment.