Add custom external hooks

For projects with a large number of files, zinc has to do a lot of work
to determine which source files and binaries have changes since the last
build. In a very simple project with 5000 source files, it takes roughly
750ms to do a no-op compile using the default incremental compiler
options. After this change, it takes about 200ms. Of those 200ms, 50ms
are due to the update task, which does a partial project resolution*.

The implementation is straightforward since zinc already provides an api
for overriding the built in change detection strategy. In a previous
commit, I updated the sources task to return StampedFile rather than
regular java.io.File instances. To compute all of the source file
stamps, we simply list the sources and if the source is in fact an
instance of StampedFile, we don't need to compute it, otherwise we
generate a StampedFile on the fly. After building a map of stamped files
for both the sources files and all of the binary dependencies, we simply
diff these maps with the previous results in the changedSources,
changedBinaries and removedProducts methods.

The new ExternalHooks are easily disabled by setting
`externalHooks := _ => None`
in the project build.

In the future, I could see moving ExternalHooks into the zinc project so
that other tools like bloop or mill could use them.

* I think this delay could be eliminated by caching the UpdateResult so
long as the project doesn't depend on any snapshot libraries. For a
project with a single source, the no-op compile takes O(50ms) so caching
the project resolution would make compilation start nearly
instantaneous.
This commit is contained in:
Ethan Atkins 2018-08-25 16:37:22 -07:00
parent 1f996185e1
commit 25e97f99f5
3 changed files with 148 additions and 6 deletions

View File

@ -276,6 +276,11 @@ object Defaults extends BuildCommon {
fileTreeView := state.value
.get(BasicKeys.globalFileTreeView)
.getOrElse(FileTreeView.DEFAULT.asDataView(StampedFile.converter)),
externalHooks := {
val view = fileTreeView.value
compileOptions =>
Some(ExternalHooks(compileOptions, view))
},
watchAntiEntropy :== new FiniteDuration(500, TimeUnit.MILLISECONDS),
watchLogger := streams.value.log,
watchService :== { () =>
@ -1646,12 +1651,22 @@ object Defaults extends BuildCommon {
foldMappers(sourcePositionMappers.value)
)
},
compileInputs := Inputs.of(
compilers.value,
compileOptions.value,
compileIncSetup.value,
previousCompile.value
)
compileInputs := {
val options = compileOptions.value
val setup = compileIncSetup.value
Inputs.of(
compilers.value,
options,
externalHooks
.value(options)
.map { hooks =>
val newOptions = setup.incrementalCompilerOptions.withExternalHooks(hooks)
setup.withIncrementalCompilerOptions(newOptions)
}
.getOrElse(setup),
previousCompile.value
)
}
)
}

View File

@ -20,6 +20,7 @@ import xsbti.compile.{
CompileOrder,
Compilers,
CompileResult,
ExternalHooks,
GlobalsCache,
IncOptions,
Inputs,
@ -261,6 +262,7 @@ object Keys {
val copyResources = taskKey[Seq[(File, File)]]("Copies resources to the output directory.").withRank(AMinusTask)
val aggregate = settingKey[Boolean]("Configures task aggregation.").withRank(BMinusSetting)
val sourcePositionMappers = taskKey[Seq[xsbti.Position => Option[xsbti.Position]]]("Maps positions in generated source files to the original source it was generated from").withRank(DTask)
val externalHooks = taskKey[CompileOptions => Option[ExternalHooks]]("External hooks for modifying the internal behavior of the incremental compiler.").withRank(BMinusSetting)
// package keys
val packageBin = taskKey[File]("Produces a main artifact, such as a binary jar.").withRank(ATask)

View File

@ -0,0 +1,125 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal
import java.nio.file.Paths
import java.util.Optional
import sbt.StampedFile
import sbt.internal.inc.ExternalLookup
import sbt.io.syntax.File
import sbt.io.{ FileTreeRepository, FileTreeDataView, TypedPath }
import xsbti.compile._
import xsbti.compile.analysis.Stamp
import scala.collection.mutable
private[sbt] object ExternalHooks {
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
def apply(options: CompileOptions, view: FileTreeDataView[StampedFile]): DefaultExternalHooks = {
import scala.collection.JavaConverters._
val sources = options.sources()
val cachedSources = new java.util.HashMap[File, Stamp]
val converter: File => Stamp = f => StampedFile.sourceConverter(TypedPath(f.toPath)).stamp
sources.foreach {
case sf: StampedFile => cachedSources.put(sf, sf.stamp)
case f: File => cachedSources.put(f, converter(f))
}
view match {
case r: FileTreeRepository[StampedFile] =>
r.register(options.classesDirectory.toPath, Integer.MAX_VALUE)
options.classpath.foreach { f =>
r.register(f.toPath, Integer.MAX_VALUE)
}
case _ =>
}
val allBinaries = new java.util.HashMap[File, Stamp]
options.classpath.foreach { f =>
view.listEntries(f.toPath, Integer.MAX_VALUE, _ => true) foreach { e =>
e.value match {
case Right(value) => allBinaries.put(e.typedPath.getPath.toFile, value.stamp)
case _ =>
}
}
// This gives us the entry for the path itself, which is necessary if the path is a jar file
// rather than a directory.
view.listEntries(f.toPath, -1, _ => true) foreach { e =>
e.value match {
case Right(value) => allBinaries.put(e.typedPath.getPath.toFile, value.stamp)
case _ =>
}
}
}
val lookup = new ExternalLookup {
override def changedSources(previousAnalysis: CompileAnalysis): Option[Changes[File]] = Some {
new Changes[File] {
val getAdded: java.util.Set[File] = new java.util.HashSet[File]
val getRemoved: java.util.Set[File] = new java.util.HashSet[File]
val getChanged: java.util.Set[File] = new java.util.HashSet[File]
val getUnmodified: java.util.Set[File] = new java.util.HashSet[File]
override def isEmpty: java.lang.Boolean =
getAdded.isEmpty && getRemoved.isEmpty && getChanged.isEmpty
val prevSources: mutable.Map[File, Stamp] =
previousAnalysis.readStamps().getAllSourceStamps.asScala
prevSources.foreach {
case (file: File, s: Stamp) =>
cachedSources.get(file) match {
case null =>
getRemoved.add(file)
case stamp =>
if ((stamp.getHash.orElse("") == s.getHash.orElse("")) && (stamp.getLastModified
.orElse(-1L) == s.getLastModified.orElse(-1L))) {
getUnmodified.add(file)
} else {
getChanged.add(file)
}
}
}
sources.foreach(file => if (!prevSources.contains(file)) getAdded.add(file))
}
}
override def shouldDoIncrementalCompilation(
set: Set[String],
compileAnalysis: CompileAnalysis
): Boolean = true
// This could use the cache as well, but it would complicate the cache implementation.
override def hashClasspath(files: Array[File]): Optional[Array[FileHash]] =
Optional.empty[Array[FileHash]]
override def changedBinaries(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
Some(previousAnalysis.readStamps.getAllBinaryStamps.asScala.flatMap {
case (file, stamp) =>
allBinaries.get(file) match {
case null =>
javaHome match {
case Some(h) if file.toPath.startsWith(h) => None
case _ => Some(file)
}
case cachedStamp if stamp == cachedStamp => None
case _ => Some(file)
}
}.toSet)
}
override def removedProducts(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
Some(previousAnalysis.readStamps.getAllProductStamps.asScala.flatMap {
case (file, s) =>
allBinaries get file match {
case null => Some(file)
case stamp if stamp.getLastModified.orElse(0L) != s.getLastModified.orElse(0L) =>
Some(file)
case _ => None
}
}.toSet)
}
}
new DefaultExternalHooks(Optional.of(lookup), Optional.empty[ClassFileManager])
}
}