Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Delay Java compilation during pipelining #843

Merged
merged 7 commits into from
Jul 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,8 @@ ThisBuild / publishTo := {
}
ThisBuild / pomIncludeRepository := (_ => false) // drop repos other than Maven Central from POM
ThisBuild / mimaPreviousArtifacts := Set.empty
// limit the number of concurrent test so testQuick works
Global / concurrentRestrictions += Tags.limit(Tags.Test, 4)
dwijnand marked this conversation as resolved.
Show resolved Hide resolved

def baseSettings: Seq[Setting[_]] = Seq(
resolvers += Resolver.typesafeIvyRepo("releases"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public static boolean defaultAllowMachinePath() {
return true;
}
public static boolean defaultPipelining() {
return true;
return false;
}
public static IncOptions create() {
return new IncOptions();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ type IncOptions {
#x return true;
#x }
#x public static boolean defaultPipelining() {
#x return true;
#x return false;
#x }
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ class BenchmarkBase extends BridgeProviderSpecification {
val scalaVersion = ZincBenchmark.scalaVersion
val bridge = getCompilerBridge(_dir.toPath, noLogger, scalaVersion)
val si = scalaInstance(scalaVersion, _dir.toPath, noLogger)
_compilerSetup = _setup.createCompiler(scalaVersion, si, bridge, log)
val pipelining = false
_compilerSetup = _setup.createCompiler(scalaVersion, si, bridge, pipelining, log)
printCompilationDetails()
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,17 @@ public String get() {
* @return An instance of {@link IncOptions}.
*/
public static IncOptions fromStringMap(Map<String, String> values, Logger logger) {
IncOptions base = IncOptions.of();
return fromStringMap(IncOptions.of(), values, logger);
}

/**
* Reads and returns an instance of {@link IncOptions} from a mapping of values.
*
* @param values The values read from a properties file.
* @param logger The logger used for reporting **and** for the transactional manager type.
* @return An instance of {@link IncOptions}.
*/
public static IncOptions fromStringMap(IncOptions base, Map<String, String> values, Logger logger) {
logger.debug(f0("Reading incremental options from map"));

if (values.containsKey(TRANSITIVE_STEP_KEY)) {
Expand Down
136 changes: 121 additions & 15 deletions internal/zinc-core/src/main/scala/sbt/internal/inc/Incremental.scala
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,83 @@ object Incremental {
} finally runProfiler.registerRun()
}

/**
* Compile all Java sources, ignoring incrementality.
* We are using Incremental class because we still need to perform Analysis so other subprojects
* can do incremental compilation.
*/
def compileAllJava(
sources: Seq[VirtualFile],
converter: FileConverter,
lookup: Lookup,
previous0: CompileAnalysis,
options: IncOptions,
currentSetup: MiniSetup,
stamper: ReadStamps,
output: Output,
outputJarContent: JarUtils.OutputJarContent,
earlyOutput: Option[Output],
earlyAnalysisStore: Option[AnalysisStore],
progress: Option[CompileProgress],
log: Logger
)(
compileJava: (Seq[VirtualFile], xsbti.AnalysisCallback, XClassFileManager) => Unit
): (Boolean, Analysis) = {
log.debug(s"[zinc] callAllJava")
val previous = previous0 match { case a: Analysis => a }
// prune Java knowledge out of previous Analysis
val pruned = prune(sources.toSet, previous, output, outputJarContent, converter)
val currentStamper = Stamps.initial(stamper)
val internalBinaryToSourceClassName = (binaryClassName: String) =>
pruned.relations.productClassName.reverse(binaryClassName).headOption
val internalSourceToClassNamesMap: VirtualFile => Set[String] =
(f: VirtualFile) => pruned.relations.classNames(f)
val externalAPI = getExternalAPI(lookup)
val builder = new AnalysisCallback.Builder(
internalBinaryToSourceClassName,
internalSourceToClassNamesMap,
externalAPI,
currentStamper,
options,
currentSetup,
converter,
lookup,
output,
outputJarContent,
earlyOutput,
earlyAnalysisStore,
progress,
log
)
// val profiler = options.externalHooks.getInvalidationProfiler
// val runProfiler = new AdaptedRunProfiler(profiler.profileRun)
// val incremental: IncrementalCommon = new IncrementalNameHashing(log, options, runProfiler)
val callback = builder.build()
try {
val analysis = withClassfileManager(options, converter, output, outputJarContent) {
classFileManager =>
// See IncrementalCommon.scala's completeCycle
def completeCycle(partialAnalysis: Analysis): Analysis = {
val a1 = pruned ++ partialAnalysis
val products = partialAnalysis.relations.allProducts
.map(converter.toVirtualFile(_))
classFileManager.generated(products.toArray)
a1
}
compileJava(sources, callback, classFileManager)
val a0 = callback.getPostJavaAnalysis
completeCycle(a0)
}
(sources.nonEmpty, analysis)
} catch {
case _: xsbti.CompileCancelled =>
log.info("Compilation has been cancelled")
// in case compilation got cancelled potential partial compilation results (e.g. produced classs files) got rolled back
// and we can report back as there was no change (false) and return a previous Analysis which is still up-to-date
(false, previous)
}
}

def getExternalAPI(lookup: Lookup): (VirtualFileRef, String) => Option[AnalyzedClass] =
(_: VirtualFileRef, binaryClassName: String) => lookup.lookupAnalyzedClass(binaryClassName)

Expand Down Expand Up @@ -274,7 +351,7 @@ object Incremental {
"All initially invalidated sources:" + initialInvSources + "\n"
)
}
val analysis = manageClassfiles(options, converter, output, outputJarContent) {
val analysis = withClassfileManager(options, converter, output, outputJarContent) {
classfileManager =>
incremental.cycle(
initialInvClasses,
Expand Down Expand Up @@ -347,7 +424,7 @@ object Incremental {
)
}

private[this] def manageClassfiles[T](
private[sbt] def withClassfileManager[T](
options: IncOptions,
converter: FileConverter,
output: Output,
Expand Down Expand Up @@ -400,7 +477,28 @@ private object AnalysisCallback {
earlyOutput,
earlyAnalysisStore,
progress,
incHandler,
Some(incHandler),
log
)
}

// Create an AnalysisCallback without IncHanlder for Java compilation purpose.
def build(): AnalysisCallback = {
new AnalysisCallback(
internalBinaryToSourceClassName,
internalSourceToClassNamesMap,
externalAPI,
stampReader,
options,
currentSetup,
outputJarContent,
converter,
lookup,
output,
earlyOutput,
earlyAnalysisStore,
progress,
None,
log
)
}
Expand All @@ -421,7 +519,7 @@ private final class AnalysisCallback(
earlyOutput: Option[Output],
earlyAnalysisStore: Option[AnalysisStore],
progress: Option[CompileProgress],
incHandler: Incremental.IncrementalCallback,
incHandlerOpt: Option[Incremental.IncrementalCallback],
log: Logger
) extends xsbti.AnalysisCallback {
import Incremental.CompileCycleResult
Expand Down Expand Up @@ -698,6 +796,7 @@ private final class AnalysisCallback(
private[this] var gotten: Boolean = false
def getCycleResultOnce: CompileCycleResult = {
assert(!gotten, "can't call AnalysisCallback#getCycleResultOnce more than once")
val incHandler = incHandlerOpt.getOrElse(sys.error("incHandler was expected"))
gotten = true
// notify that early artifact writing is not going to happen because of macros
def notifyEarlyArifactFailure(): Unit =
Expand Down Expand Up @@ -727,6 +826,10 @@ private final class AnalysisCallback(
addUsedNames(addCompilation(analysis0))
}

def getPostJavaAnalysis: Analysis = {
getAnalysis
}

def getOrNil[A, B](m: collection.Map[A, Seq[B]], a: A): Seq[B] = m.get(a).toList.flatten
def addCompilation(base: Analysis): Analysis =
base.copy(compilations = base.compilations.add(compilation))
Expand Down Expand Up @@ -845,22 +948,25 @@ private final class AnalysisCallback(
override def apiPhaseCompleted(): Unit = {
// If we know we're done with cycles (presumably because all sources were invalidated) we can store early analysis
// and picke data now. Otherwise, we need to wait for dependency information to decide if there are more cycles.
if (options.pipelining() && incHandler.isFullCompilation) {
val a = getAnalysis
val CompileCycleResult(continue, invalidations, merged) =
incHandler.mergeAndInvalidate(a, false)
if (!hasAnyMacro(merged)) {
assert(
!continue && invalidations.isEmpty,
"everything was supposed to be invalidated already"
)
invalidationResults = Some(CompileCycleResult.empty)
writeEarlyArtifacts(merged)
incHandlerOpt foreach { incHandler =>
if (options.pipelining() && incHandler.isFullCompilation) {
val a = getAnalysis
val CompileCycleResult(continue, invalidations, merged) =
incHandler.mergeAndInvalidate(a, false)
if (!hasAnyMacro(merged)) {
assert(
!continue && invalidations.isEmpty,
"everything was supposed to be invalidated already"
)
invalidationResults = Some(CompileCycleResult.empty)
writeEarlyArtifacts(merged)
}
}
}
}

override def dependencyPhaseCompleted(): Unit = {
val incHandler = incHandlerOpt.getOrElse(sys.error("incHandler was expected"))
if (invalidationResults.isEmpty) {
val a = getAnalysis
val CompileCycleResult(continue, invalidations, merged) =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,8 @@ private[inc] abstract class IncrementalCommon(
completingCycle: Boolean
): CompileCycleResult = {
val analysis =
if (isFullCompilation) partialAnalysis
if (isFullCompilation)
partialAnalysis.copy(compilations = pruned.compilations ++ partialAnalysis.compilations)
else pruned ++ partialAnalysis
val recompiledClasses: Set[String] = {
// Represents classes detected as changed externally and internally (by a previous cycle)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,8 @@ case class ProjectStructure(
incrementalCompiler: IncrementalCompilerImpl
) extends BridgeProviderSpecification {
import scala.concurrent.ExecutionContext.Implicits._
// This will test pipelining unless incOptions.properties overrides it
val defaultPipelining = true
val maxErrors = 100
val targetDir = baseDirectory / "target"
// val targetDir = Paths.get("/tmp/pipelining") / name / "target"
Expand Down Expand Up @@ -533,22 +535,31 @@ case class ProjectStructure(
val earlyDeps: Future[Seq[Path]] = Future.traverse(dependsOnRef) { dep =>
dep.earlyArtifact(i).map(success => if (success) dep.earlyOutput else dep.output)
}
val futureAnalysis = earlyDeps.map { internalCp =>
doCompile(i, notifyEarlyOutput, internalCp, pipelinedLookupAnalysis)
val futureScalaAnalysis = earlyDeps.map { internalCp =>
doCompile(i, notifyEarlyOutput, internalCp, pipelinedLookupAnalysis, false)
}
val wholeDeps = Future.traverse(dependsOnRef) { dep =>
dep.compile(i).map(_ => dep.output)
}
def futureJavaAnalysis(projectDependencies: Seq[Path], prev: Analysis): Future[Analysis] =
Future {
doCompile(i, notifyEarlyOutput, projectDependencies, pipelinedLookupAnalysis, true)
}

// wait for the full compilation from the dependencies
// during pipelining, downstream compilation may complete before the upstream
// to avoid deletion of directories etc, we need to wait for the upstream to finish
val f = for {
_ <- Future.traverse(dependsOnRef)(_.compile(i))
a <- futureAnalysis
pj <- wholeDeps
a0 <- futureScalaAnalysis
a <- futureJavaAnalysis(pj, a0)
} yield a
Comment on lines 552 to 556
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the part that waits for upstream whole compilation and self Scala compilation for scripted tests.

i.compilations(this) = (f, notifyEarlyOutput.future)
(f, notifyEarlyOutput.future)
} else {
val fullDeps = Future.traverse(dependsOnRef)(dep => dep.compile(i))
val f = fullDeps.map { _ =>
doCompile(i, notifyEarlyOutput, internalClasspath, traditionalLookupAnalysis)
val wholeDeps = Future.traverse(dependsOnRef)(dep => dep.compile(i))
val f = wholeDeps.map { _ =>
doCompile(i, notifyEarlyOutput, internalClasspath, traditionalLookupAnalysis, false)
}
i.compilations(this) = (f, notifyEarlyOutput.future)
(f, notifyEarlyOutput.future)
Expand All @@ -570,7 +581,8 @@ case class ProjectStructure(
i: IncState,
notifyEarlyOutput: Promise[Boolean],
internalCp: Seq[Path],
lookupAnalysis: VirtualFile => Option[CompileAnalysis]
lookupAnalysis: VirtualFile => Option[CompileAnalysis],
javaOnly: Boolean,
): Analysis = {
import i._
val sources = scalaSources ++ javaSources
Expand Down Expand Up @@ -624,7 +636,9 @@ case class ProjectStructure(
converter,
stamper
)
val result = incrementalCompiler.compile(in, scriptedLog)
val result =
if (javaOnly) incrementalCompiler.compileAllJava(in, scriptedLog)
else incrementalCompiler.compile(in, scriptedLog)
val analysis = result.analysis match { case a: Analysis => a }
cachedStore.set(AnalysisContents.create(analysis, result.setup))
scriptedLog.info(s"""$name: compilation done: ${sources.toList.mkString(", ")}""")
Expand Down Expand Up @@ -711,9 +725,9 @@ case class ProjectStructure(
import scala.collection.JavaConverters._
val map = new java.util.HashMap[String, String]
properties.asScala foreach { case (k: String, v: String) => map.put(k, v) }

val base = IncOptions.of().withPipelining(defaultPipelining)
val incOptions = {
val opts = IncOptionsUtil.fromStringMap(map, scriptedLog)
val opts = IncOptionsUtil.fromStringMap(base, map, scriptedLog)
if (opts.recompileAllFraction() != IncOptions.defaultRecompileAllFraction()) opts
else opts.withRecompileAllFraction(1.0)
}
Expand Down
Loading