Merge pull request #1714 from sbt/wip/bytecode-enhancement

Expose mechanism whereby bytecode enhancement can be run *before* saving incremental compiler hashes.
This commit is contained in:
eugene yokota 2014-12-03 08:46:52 -05:00
commit 7df9802f0c
20 changed files with 673 additions and 69 deletions

View File

@ -10,24 +10,41 @@ import java.io.File
// We cannot require an implicit parameter Equiv[Seq[String]] to construct Equiv[CompileSetup]
// because complexity(Equiv[Seq[String]]) > complexity(Equiv[CompileSetup])
// (6 > 4)
final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String])
final class CompileOptions(val options: Seq[String], val javacOptions: Seq[String]) {
override def toString = s"CompileOptions(scalac=$options, javac=$javacOptions)"
}
final class CompileSetup(val output: APIOutput, val options: CompileOptions, val compilerVersion: String,
val order: CompileOrder, val nameHashing: Boolean) {
@deprecated("Use the other overloaded variant of the constructor that takes `nameHashing` value, instead.", "0.13.2")
def this(output: APIOutput, options: CompileOptions, compilerVersion: String, order: CompileOrder) = {
this(output, options, compilerVersion, order, false)
}
override def toString = s"""CompileSetup(
| options = $options
| compilerVersion = $compilerVersion
| order = $order
| nameHashing = $nameHashing
| output = $output
|)""".stripMargin
}
object CompileSetup {
// Equiv[CompileOrder.Value] dominates Equiv[CompileSetup]
implicit def equivCompileSetup(implicit equivOutput: Equiv[APIOutput], equivOpts: Equiv[CompileOptions], equivComp: Equiv[String] /*, equivOrder: Equiv[CompileOrder]*/ ): Equiv[CompileSetup] = new Equiv[CompileSetup] {
def equiv(a: CompileSetup, b: CompileSetup) =
equivOutput.equiv(a.output, b.output) &&
equivOpts.equiv(a.options, b.options) &&
equivComp.equiv(a.compilerVersion, b.compilerVersion) &&
a.order == b.order && // equivOrder.equiv(a.order, b.order)
a.nameHashing == b.nameHashing
def equiv(a: CompileSetup, b: CompileSetup) = {
// For some reason, an Equiv[Nothing] or some such is getting injected into here now, and borking all our results.
// We hardcode these to use the Equiv defined in this class.
def sameOutput = CompileSetup.equivOutput.equiv(a.output, b.output)
def sameOptions = CompileSetup.equivOpts.equiv(a.options, b.options)
def sameCompiler = equivComp.equiv(a.compilerVersion, b.compilerVersion)
def sameOrder = a.order == b.order
def sameNameHasher = a.nameHashing == b.nameHashing
sameOutput &&
sameOptions &&
sameCompiler &&
sameOrder && // equivOrder.equiv(a.order, b.order)
sameNameHasher
}
}
implicit val equivFile: Equiv[File] = new Equiv[File] {
def equiv(a: File, b: File) = a.getAbsoluteFile == b.getAbsoluteFile
@ -41,14 +58,17 @@ object CompileSetup {
case (a, b) =>
equivFile.equiv(a.sourceDirectory, b.sourceDirectory) && equivFile.equiv(a.outputDirectory, b.outputDirectory)
})
case (s1: SingleOutput, s2: SingleOutput) => equivFile.equiv(s1.outputDirectory, s2.outputDirectory)
case _ => false
case (s1: SingleOutput, s2: SingleOutput) =>
equivFile.equiv(s1.outputDirectory, s2.outputDirectory)
case _ =>
false
}
}
implicit val equivOpts: Equiv[CompileOptions] = new Equiv[CompileOptions] {
def equiv(a: CompileOptions, b: CompileOptions) =
def equiv(a: CompileOptions, b: CompileOptions) = {
(a.options sameElements b.options) &&
(a.javacOptions sameElements b.javacOptions)
}
}
implicit val equivCompilerVersion: Equiv[String] = new Equiv[String] {
def equiv(a: String, b: String) = a == b

View File

@ -13,7 +13,33 @@ import xsbti.api.Definition
import xsbti.DependencyContext
import xsbti.DependencyContext.{ DependencyByInheritance, DependencyByMemberRef }
/**
* Helper methods for running incremental compilation. All this is responsible for is
* adapting any xsbti.AnalysisCallback into one compatible with the [[sbt.inc.Incremental]] class.
*/
object IncrementalCompile {
/**
* Runs the incremental compilation algorithm.
* @param sources
* The full set of input sources
* @param entry
* A className -> source file lookup function.
* @param compile
* The mechanism to run a single 'step' of compile, for ALL source files involved.
* @param previous
* The previous dependency Analysis (or an empty one).
* @param forEntry
* The dependency Analysis associated with a given file
* @param output
* The configured output directory/directory mapping for source files.
* @param log
* Where all log messages should go
* @param options
* Incremental compiler options (like name hashing vs. not).
* @return
* A flag of whether or not compilation completed succesfully, and the resulting dependency analysis object.
*
*/
def apply(sources: Set[File], entry: String => Option[File],
compile: (Set[File], DependencyChanges, xsbti.AnalysisCallback) => Unit,
previous: Analysis,

View File

@ -10,7 +10,30 @@ import xsbti.api.{ Compilation, Source }
import xsbti.compile.DependencyChanges
import java.io.File
/**
* Helper class to run incremental compilation algorithm.
*
*
* This class delegates down to
* - IncrementalNameHashing
* - IncrementalDefault
* - IncrementalAnyStyle
*/
object Incremental {
/**
* Runs the incremental compiler algorithm.
*
* @param sources The sources to compile
* @param entry The means of looking up a class on the classpath.
* @param previous The previously detected source dependencies.
* @param current A mechanism for generating stamps (timestamps, hashes, etc).
* @param doCompile The function which can run one level of compile.
* @param log The log where we write debugging information
* @param options Incremental compilation options
* @param equivS The means of testing whether two "Stamps" are the same.
* @return
* A flag of whether or not compilation completed succesfully, and the resulting dependency analysis object.
*/
def compile(sources: Set[File],
entry: String => Option[File],
previous: Analysis,

View File

@ -20,11 +20,9 @@ import xsbti.api.Source
import xsbti.compile.{ CompileOrder, DependencyChanges, GlobalsCache, Output, SingleOutput, MultipleOutput, CompileProgress }
import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava }
final class CompileConfiguration(val sources: Seq[File], val classpath: Seq[File],
val previousAnalysis: Analysis, val previousSetup: Option[CompileSetup], val currentSetup: CompileSetup, val progress: Option[CompileProgress], val getAnalysis: File => Option[Analysis], val definesClass: DefinesClass,
val reporter: Reporter, val compiler: AnalyzingCompiler, val javac: xsbti.compile.JavaCompiler, val cache: GlobalsCache, val incOptions: IncOptions)
@deprecated("0.13.8", "Use MixedAnalyzingCompiler or IC instead.")
class AggressiveCompile(cacheFile: File) {
@deprecated("0.13.8", "Use IC.compile instead.")
def apply(compiler: AnalyzingCompiler,
javac: xsbti.compile.JavaCompiler,
sources: Seq[File], classpath: Seq[File],
@ -186,20 +184,14 @@ class AggressiveCompile(cacheFile: File) {
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath)
val store = AggressiveCompile.staticCache(cacheFile, AnalysisStore.sync(AnalysisStore.cached(FileBasedStore(cacheFile))))
val store = MixedAnalyzingCompiler.staticCachedStore(cacheFile)
}
@deprecated("0.13.8", "Use MixedAnalyzingCompiler instead.")
object AggressiveCompile {
import collection.mutable
import java.lang.ref.{ Reference, SoftReference }
private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]]
private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore =
synchronized {
cache get file flatMap { ref => Option(ref.get) } getOrElse {
val b = backing
cache.put(file, new SoftReference(b))
b
}
}
@deprecated("0.13.8", "Use MixedAnalyzingCompiler.staticCachedStore instead.")
def staticCachedStore(cacheFile: File) = MixedAnalyzingCompiler.staticCachedStore(cacheFile)
@deprecated("0.13.8", "Deprecated in favor of new sbt.compiler.javac package.")
def directOrFork(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File]): JavaTool =
if (javaHome.isDefined)

View File

@ -0,0 +1,41 @@
package sbt.compiler
import java.io.File
import sbt.CompileSetup
import sbt.inc.{ IncOptions, Analysis }
import sbt.inc.Locate._
import xsbti.Reporter
import xsbti.compile.{ GlobalsCache, CompileProgress }
/**
* Configuration used for running an analyzing compiler (a compiler which can extract dependencies between source files and JARs).
*
* @param sources
* @param classpath
* @param previousAnalysis
* @param previousSetup
* @param currentSetup
* @param progress
* @param getAnalysis
* @param definesClass
* @param reporter
* @param compiler
* @param javac
* @param cache
* @param incOptions
*/
final class CompileConfiguration(
val sources: Seq[File],
val classpath: Seq[File],
val previousAnalysis: Analysis,
val previousSetup: Option[CompileSetup],
val currentSetup: CompileSetup,
val progress: Option[CompileProgress],
val getAnalysis: File => Option[Analysis],
val definesClass: DefinesClass,
val reporter: Reporter,
val compiler: AnalyzingCompiler,
val javac: xsbti.compile.JavaCompiler,
val cache: GlobalsCache,
val incOptions: IncOptions)

View File

@ -1,29 +1,57 @@
package sbt.compiler
import java.io.File
import sbt.{ CompileSetup, IO, Using }
import sbt.inc.{ Analysis, IncOptions, TextAnalysisFormat }
import xsbti.{ Logger, Maybe }
import sbt.compiler.javac.AnalyzingJavaCompiler
import sbt.inc.Locate._
import sbt._
import sbt.inc._
import xsbti.Logger
import xsbti.api.Source
import xsbti.compile.ClasspathOptions
import xsbti.compile.CompileOrder._
import xsbti.compile.DefinesClass
import xsbti.compile.ScalaInstance
import xsbti.{ Reporter, Logger, Maybe }
import xsbti.compile._
// TODO -
// 1. Move analyzingCompile from MixedAnalyzingCompiler into here
// 2. Create AnalyzingJavaComiler class
// 3. MixedAnalyzingCompiler should just provide the raw 'compile' method used in incremental compiler (and
// by this class.
/**
* An implementation of the incremental compiler that can compile inputs and dump out source dependency analysis.
*/
object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] {
def compile(in: Inputs[Analysis, AnalyzingCompiler], log: Logger): Analysis =
override def compile(in: Inputs[Analysis, AnalyzingCompiler], log: Logger): Analysis =
{
val setup = in.setup; import setup._
val options = in.options; import options.{ options => scalacOptions, _ }
val compilers = in.compilers; import compilers._
val agg = new AggressiveCompile(setup.cacheFile)
val aMap = (f: File) => m2o(analysisMap(f))
val defClass = (f: File) => { val dc = definesClass(f); (name: String) => dc.apply(name) }
val incOptions = IncOptions.fromStringMap(incrementalCompilerOptions)
agg(scalac, javac, sources, classpath, output, cache, m2o(progress), scalacOptions, javacOptions, aMap,
defClass, reporter, order, skip, incOptions)(log)
val (previousAnalysis, previousSetup) = {
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile()).get().map {
case (a, s) => (a, Some(s))
} getOrElse {
(Analysis.empty(nameHashing = incOptions.nameHashing), None)
}
}
incrementalCompile(scalac, javac, sources, classpath, output, cache, m2o(progress), scalacOptions, javacOptions, previousAnalysis,
previousSetup, aMap, defClass, reporter, order, skip, incOptions)(log).analysis
}
private[this] def m2o[S](opt: Maybe[S]): Option[S] = if (opt.isEmpty) None else Some(opt.get)
def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions, log: Logger): AnalyzingCompiler =
new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options, log)
@deprecated("0.13.8", "A logger is no longer needed.")
override def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions, log: Logger): AnalyzingCompiler =
new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options)
override def newScalaCompiler(instance: ScalaInstance, interfaceJar: File, options: ClasspathOptions): AnalyzingCompiler =
new AnalyzingCompiler(instance, CompilerInterfaceProvider.constant(interfaceJar), options)
def compileInterfaceJar(label: String, sourceJar: File, targetJar: File, interfaceJar: File, instance: ScalaInstance, log: Logger) {
val raw = new RawCompiler(instance, sbt.ClasspathOptions.auto, log)
@ -51,4 +79,84 @@ object IC extends IncrementalCompiler[Analysis, AnalyzingCompiler] {
throw new java.io.IOException(s"Error while reading $file", ex)
}
}
/** The result of running the compilation. */
final case class Result(analysis: Analysis, setup: CompileSetup, hasModified: Boolean)
/**
* This will run a mixed-compilation of Java/Scala sources
*
*
* TODO - this is the interface sbt uses. Somehow this needs to be exposed further.
*
* @param scalac An instances of the Scalac compiler which can also extract "Analysis" (dependencies)
* @param javac An instance of the Javac compiler.
* @param sources The set of sources to compile
* @param classpath The classpath to use when compiling.
* @param output Configuration for where to output .class files.
* @param cache The caching mechanism to use instead of insantiating new compiler instances.
* @param progress Progress listening for the compilation process. TODO - Feed this through the Javac Compiler!
* @param options Options for the Scala compiler
* @param javacOptions Options for the Java compiler
* @param previousAnalysis The previous dependency Analysis object/
* @param previousSetup The previous compilation setup (if any)
* @param analysisMap A map of file to the dependency analysis of that file.
* @param definesClass A mehcnaism of looking up whether or not a JAR defines a particular Class.
* @param reporter Where we sent all compilation error/warning events
* @param compileOrder The order we'd like to mix compilation. JavaThenScala, ScalaThenJava or Mixed.
* @param skip IF true, we skip compilation and just return the previous analysis file.
* @param incrementalCompilerOptions Options specific to incremental compilation.
* @param log The location where we write log messages.
* @return The full configuration used to instantiate this mixed-analyzing compiler, the set of extracted dependencies and
* whether or not any file were modified.
*/
def incrementalCompile(scalac: AnalyzingCompiler,
javac: xsbti.compile.JavaCompiler,
sources: Seq[File],
classpath: Seq[File],
output: Output,
cache: GlobalsCache,
progress: Option[CompileProgress] = None,
options: Seq[String] = Nil,
javacOptions: Seq[String] = Nil,
previousAnalysis: Analysis,
previousSetup: Option[CompileSetup],
analysisMap: File => Option[Analysis] = { _ => None },
definesClass: Locate.DefinesClass = Locate.definesClass _,
reporter: Reporter,
compileOrder: CompileOrder = Mixed,
skip: Boolean = false,
incrementalCompilerOptions: IncOptions)(implicit log: Logger): Result = {
val config = MixedAnalyzingCompiler.makeConfig(scalac, javac, sources, classpath, output, cache,
progress, options, javacOptions, previousAnalysis, previousSetup, analysisMap, definesClass, reporter,
compileOrder, skip, incrementalCompilerOptions
)
import config.{ currentSetup => setup }
if (skip) Result(previousAnalysis, setup, false)
else {
val (analysis, changed) = compileInternal(MixedAnalyzingCompiler(config)(log))
Result(analysis, setup, changed)
}
}
/** Actually runs the incremental compiler using the given mixed compiler. This will prune the inputs based on the CompileSetup. */
private def compileInternal(mixedCompiler: MixedAnalyzingCompiler)(implicit log: Logger, equiv: Equiv[CompileSetup]): (Analysis, Boolean) = {
val entry = MixedAnalyzingCompiler.classPathLookup(mixedCompiler.config)
import mixedCompiler.config._
import mixedCompiler.config.currentSetup.output
val sourcesSet = sources.toSet
val analysis = previousSetup match {
case Some(previous) if previous.nameHashing != currentSetup.nameHashing =>
// if the value of `nameHashing` flag has changed we have to throw away
// previous Analysis completely and start with empty Analysis object
// that supports the particular value of the `nameHashing` flag.
// Otherwise we'll be getting UnsupportedOperationExceptions
Analysis.empty(currentSetup.nameHashing)
case Some(previous) if equiv.equiv(previous, currentSetup) => previousAnalysis
case _ => Incremental.prune(sourcesSet, previousAnalysis)
}
// Run the incremental compiler using the mixed compiler we've defined.
IncrementalCompile(sourcesSet, entry, mixedCompiler.compile, analysis, getAnalysis, output, log, incOptions).swap
}
}

View File

@ -0,0 +1,206 @@
package sbt.compiler
import java.io.File
import java.lang.ref.{ SoftReference, Reference }
import sbt.classfile.Analyze
import sbt.classpath.ClasspathUtilities
import sbt.compiler.CompileConfiguration
import sbt.compiler.javac.AnalyzingJavaCompiler
import sbt.inc.Locate.DefinesClass
import sbt._
import sbt.inc._
import sbt.inc.Locate
import xsbti.{ AnalysisCallback, Reporter }
import xsbti.api.Source
import xsbti.compile.CompileOrder._
import xsbti.compile._
/** An instance of an analyzing compiler that can run both javac + scalac. */
final class MixedAnalyzingCompiler(
val scalac: AnalyzingCompiler,
val javac: AnalyzingJavaCompiler,
val config: CompileConfiguration,
val log: Logger) {
import config._
import currentSetup._
private[this] val absClasspath = classpath.map(_.getAbsoluteFile)
/** Mechanism to work with compiler arguments. */
private[this] val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
/**
* Compiles the given Java/Scala files.
*
* @param include The files to compile right now
* @param changes A list of dependency changes.
* @param callback The callback where we report dependency issues.
*/
def compile(include: Set[File], changes: DependencyChanges, callback: AnalysisCallback): Unit = {
val outputDirs = outputDirectories(output)
outputDirs foreach (IO.createDirectory)
val incSrc = sources.filter(include)
val (javaSrcs, scalaSrcs) = incSrc partition javaOnly
logInputs(log, javaSrcs.size, scalaSrcs.size, outputDirs)
/** compiles the scala code necessary using the analyzing compiler. */
def compileScala(): Unit =
if (!scalaSrcs.isEmpty) {
val sources = if (order == Mixed) incSrc else scalaSrcs
val arguments = cArgs(Nil, absClasspath, None, options.options)
timed("Scala compilation", log) {
compiler.compile(sources, changes, arguments, output, callback, reporter, config.cache, log, progress)
}
}
/**
* Compiles the Java code necessary. All analysis code is included in this method.
*/
def compileJava(): Unit =
if (!javaSrcs.isEmpty) {
// Runs the analysis portion of Javac.
timed("Java compile + analysis", log) {
javac.compile(javaSrcs, options.javacOptions.toArray[String], output, callback, reporter, log, progress)
}
}
// TODO - Maybe on "Mixed" we should try to compile both Scala + Java.
if (order == JavaThenScala) { compileJava(); compileScala() } else { compileScala(); compileJava() }
}
private[this] def outputDirectories(output: Output): Seq[File] = output match {
case single: SingleOutput => List(single.outputDirectory)
case mult: MultipleOutput => mult.outputGroups map (_.outputDirectory)
}
/** Debugging method to time how long it takes to run various compilation tasks. */
private[this] def timed[T](label: String, log: Logger)(t: => T): T = {
val start = System.nanoTime
val result = t
val elapsed = System.nanoTime - start
log.debug(label + " took " + (elapsed / 1e9) + " s")
result
}
private[this] def logInputs(log: Logger, javaCount: Int, scalaCount: Int, outputDirs: Seq[File]) {
val scalaMsg = Analysis.counted("Scala source", "", "s", scalaCount)
val javaMsg = Analysis.counted("Java source", "", "s", javaCount)
val combined = scalaMsg ++ javaMsg
if (!combined.isEmpty)
log.info(combined.mkString("Compiling ", " and ", " to " + outputDirs.map(_.getAbsolutePath).mkString(",") + "..."))
}
/** Returns true if the file is java. */
private[this] def javaOnly(f: File) = f.getName.endsWith(".java")
}
/**
* This is a compiler that mixes the `sbt.compiler.AnalyzingCompiler` for Scala incremental compilation
* with a `xsbti.JavaCompiler`, allowing cross-compilation of mixed Java/Scala projects with analysis output.
*
*
* NOTE: this class *defines* how to run one step of cross-Java-Scala compilation and then delegates
* down to the incremental compiler for the rest.
*/
object MixedAnalyzingCompiler {
def makeConfig(scalac: AnalyzingCompiler,
javac: xsbti.compile.JavaCompiler,
sources: Seq[File],
classpath: Seq[File],
output: Output,
cache: GlobalsCache,
progress: Option[CompileProgress] = None,
options: Seq[String] = Nil,
javacOptions: Seq[String] = Nil,
previousAnalysis: Analysis,
previousSetup: Option[CompileSetup],
analysisMap: File => Option[Analysis] = { _ => None },
definesClass: DefinesClass = Locate.definesClass _,
reporter: Reporter,
compileOrder: CompileOrder = Mixed,
skip: Boolean = false,
incrementalCompilerOptions: IncOptions): CompileConfiguration =
{
val compileSetup = new CompileSetup(output, new CompileOptions(options, javacOptions),
scalac.scalaInstance.actualVersion, compileOrder, incrementalCompilerOptions.nameHashing)
config(
sources,
classpath,
compileSetup,
progress,
previousAnalysis,
previousSetup,
analysisMap,
definesClass,
scalac,
javac,
reporter,
skip,
cache,
incrementalCompilerOptions)
}
def config(
sources: Seq[File],
classpath: Seq[File],
setup: CompileSetup,
progress: Option[CompileProgress],
previousAnalysis: Analysis,
previousSetup: Option[CompileSetup],
analysis: File => Option[Analysis],
definesClass: DefinesClass,
compiler: AnalyzingCompiler,
javac: xsbti.compile.JavaCompiler,
reporter: Reporter,
skip: Boolean,
cache: GlobalsCache,
incrementalCompilerOptions: IncOptions): CompileConfiguration = {
import CompileSetup._
new CompileConfiguration(sources, classpath, previousAnalysis, previousSetup, setup,
progress, analysis, definesClass, reporter, compiler, javac, cache, incrementalCompilerOptions)
}
/** Returns the search classpath (for dependencies) and a function which can also do so. */
def searchClasspathAndLookup(config: CompileConfiguration): (Seq[File], String => Option[File]) = {
import config._
import currentSetup._
val absClasspath = classpath.map(_.getAbsoluteFile)
val apiOption = (api: Either[Boolean, Source]) => api.right.toOption
val cArgs = new CompilerArguments(compiler.scalaInstance, compiler.cp)
val searchClasspath = explicitBootClasspath(options.options) ++ withBootclasspath(cArgs, absClasspath)
(searchClasspath, Locate.entry(searchClasspath, definesClass))
}
/** Returns a "lookup file for a given class name" function. */
def classPathLookup(config: CompileConfiguration): String => Option[File] =
searchClasspathAndLookup(config)._2
def apply(config: CompileConfiguration)(implicit log: Logger): MixedAnalyzingCompiler = {
import config._
val (searchClasspath, entry) = searchClasspathAndLookup(config)
// Construct a compiler which can handle both java and scala sources.
new MixedAnalyzingCompiler(
compiler,
// TODO - Construction of analyzing Java compiler MAYBE should be earlier...
new AnalyzingJavaCompiler(javac, classpath, compiler.scalaInstance, entry, searchClasspath),
config,
log
)
}
def withBootclasspath(args: CompilerArguments, classpath: Seq[File]): Seq[File] =
args.bootClasspathFor(classpath) ++ args.extClasspath ++ args.finishClasspath(classpath)
private[this] def explicitBootClasspath(options: Seq[String]): Seq[File] =
options.dropWhile(_ != CompilerArguments.BootClasspathOption).drop(1).take(1).headOption.toList.flatMap(IO.parseClasspath)
private[this] val cache = new collection.mutable.HashMap[File, Reference[AnalysisStore]]
private def staticCache(file: File, backing: => AnalysisStore): AnalysisStore =
synchronized {
cache get file flatMap { ref => Option(ref.get) } getOrElse {
val b = backing
cache.put(file, new SoftReference(b))
b
}
}
/** Create a an analysis store cache at the desired location. */
def staticCachedStore(cacheFile: File) = staticCache(cacheFile, AnalysisStore.sync(AnalysisStore.cached(FileBasedStore(cacheFile))))
}

View File

@ -0,0 +1,98 @@
package sbt.compiler.javac
import java.io.File
import sbt._
import sbt.classfile.Analyze
import sbt.classpath.ClasspathUtilities
import sbt.compiler.CompilerArguments
import sbt.inc.Locate
import xsbti.api.Source
import xsbti.compile._
import xsbti.{ AnalysisCallback, Reporter }
/**
* This is a java compiler which will also report any discovered source dependencies/apis out via
* an analysis callback.
*
* @param searchClasspath Differes from classpath in that we look up binary dependencies via this classpath.
* @param classLookup A mechanism by which we can figure out if a JAR contains a classfile.
*/
final class AnalyzingJavaCompiler private[sbt] (
val javac: xsbti.compile.JavaCompiler,
val classpath: Seq[File],
val scalaInstance: xsbti.compile.ScalaInstance,
val classLookup: (String => Option[File]),
val searchClasspath: Seq[File]) {
/**
* Compile some java code using the current configured compiler.
*
* @param sources The sources to compile
* @param options The options for the Java compiler
* @param output The output configuration for this compiler
* @param callback A callback to report discovered source/binary dependencies on.
* @param reporter A reporter where semantic compiler failures can be reported.
* @param log A place where we can log debugging/error messages.
* @param progressOpt An optional compilation progress reporter. Where we can report back what files we're currently compiling.
*/
def compile(sources: Seq[File], options: Seq[String], output: Output, callback: AnalysisCallback, reporter: Reporter, log: Logger, progressOpt: Option[CompileProgress]): Unit = {
if (!sources.isEmpty) {
val absClasspath = classpath.map(_.getAbsoluteFile)
@annotation.tailrec def ancestor(f1: File, f2: File): Boolean =
if (f2 eq null) false else if (f1 == f2) true else ancestor(f1, f2.getParentFile)
// Here we outline "chunks" of compiles we need to run so that the .class files end up in the right
// location for Java.
val chunks: Map[Option[File], Seq[File]] = output match {
case single: SingleOutput => Map(Some(single.outputDirectory) -> sources)
case multi: MultipleOutput =>
sources groupBy { src =>
multi.outputGroups find { out => ancestor(out.sourceDirectory, src) } map (_.outputDirectory)
}
}
// Report warnings about source files that have no output directory.
chunks.get(None) foreach { srcs =>
log.error("No output directory mapped for: " + srcs.map(_.getAbsolutePath).mkString(","))
}
// Here we try to memoize (cache) the known class files in the output directory.
val memo = for ((Some(outputDirectory), srcs) <- chunks) yield {
val classesFinder = PathFinder(outputDirectory) ** "*.class"
(classesFinder, classesFinder.get, srcs)
}
// Here we construct a class-loader we'll use to load + analyze the
val loader = ClasspathUtilities.toLoader(searchClasspath)
// TODO - Perhaps we just record task 0/2 here
timed("Java compilation", log) {
try javac.compileWithReporter(sources.toArray, absClasspath.toArray, output, options.toArray, reporter, log)
catch {
// Handle older APIs
case _: NoSuchMethodError =>
javac.compile(sources.toArray, absClasspath.toArray, output, options.toArray, log)
}
}
// TODO - Perhaps we just record task 1/2 here
/** Reads the API information directly from the Class[_] object. Used when Analyzing dependencies. */
def readAPI(source: File, classes: Seq[Class[_]]): Set[String] = {
val (api, inherits) = ClassToAPI.process(classes)
callback.api(source, api)
inherits.map(_.getName)
}
// Runs the analysis portion of Javac.
timed("Java analysis", log) {
for ((classesFinder, oldClasses, srcs) <- memo) {
val newClasses = Set(classesFinder.get: _*) -- oldClasses
Analyze(newClasses.toSeq, srcs, log)(callback, loader, readAPI)
}
}
// TODO - Perhaps we just record task 2/2 here
}
}
/** Debugging method to time how long it takes to run various compilation tasks. */
private[this] def timed[T](label: String, log: Logger)(t: => T): T = {
val start = System.nanoTime
val result = t
val elapsed = System.nanoTime - start
log.debug(label + " took " + (elapsed / 1e9) + " s")
result
}
}

View File

@ -150,6 +150,7 @@ class ScalaCompilerForUnitTesting(nameHashing: Boolean = false) {
val args = Array.empty[String]
object output extends SingleOutput {
def outputDirectory: File = outputDir
override def toString = s"SingleOutput($outputDirectory)"
}
val weakLog = new WeakLog(ConsoleLogger(), ConsoleReporter)
val cachedCompiler = new CachedCompiler0(args, output, weakLog, false)

View File

@ -77,7 +77,13 @@ object AnalysisFormats {
implicit val multipleOutputFormat: Format[MultipleOutput] =
wrap[MultipleOutput, Array[OutputGroup]](
(_.outputGroups),
{ groups => new MultipleOutput { def outputGroups = groups } }
{
groups =>
new MultipleOutput {
def outputGroups = groups
override def toString = s"MultipleOutput($outputGroups)"
}
}
)
implicit val singleOutputFormat: Format[SingleOutput] =
wrap[SingleOutput, File](

View File

@ -308,8 +308,10 @@ object TextAnalysisFormat {
case (src: File, out: File) => new MultipleOutput.OutputGroup {
val sourceDirectory = src
val outputDirectory = out
override def toString = s"OutputGroup($src -> $out)"
}
}
override def toString = s"MultipleOuput($outputGroups)"
}
case str: String => throw new ReadException("Unrecognized output mode: " + str)
}

View File

@ -8,9 +8,11 @@ package compiler
import xsbti.compile.{ Output, SingleOutput, MultipleOutput }
import java.io.File
/** Constructor for the `Output` ADT for incremental compiler. Can either take groups (src -> out) or a single output. */
object CompileOutput {
def apply(dir: File): Output = new SingleOutput {
def outputDirectory = dir
override def toString = s"SingleOutput($outputDirectory)"
}
def apply(groups: (File, File)*): Output = new MultipleOutput {
@ -18,7 +20,9 @@ object CompileOutput {
case (src, out) => new MultipleOutput.OutputGroup {
def sourceDirectory = src
def outputDirectory = out
override def toString = s"OutputGroup($src -> $out)"
}
}
override def toString = s"MultiOutput($outputGroups)"
}
}

View File

@ -1,5 +1,10 @@
package xsbti.compile;
/**
* An API for reporting when files are being compiled.
*
* Note; This is tied VERY SPECIFICALLY to scala.
*/
public interface CompileProgress {
void startUnit(String phase, String unitPath);

View File

@ -3,6 +3,9 @@ package xsbti.compile;
import xsbti.Logger;
import xsbti.Reporter;
/**
* An interface which lets us know how to retrieve cached compiler instances form the current JVM.
*/
public interface GlobalsCache
{
public CachedCompiler apply(String[] args, Output output, boolean forceNew, CachedCompilerProvider provider, Logger log, Reporter reporter);

View File

@ -44,8 +44,18 @@ public interface IncrementalCompiler<Analysis, ScalaCompiler>
* @param instance The Scala version to use
* @param interfaceJar The compiler interface jar compiled for the Scala version being used
* @param options Configures how arguments to the underlying Scala compiler will be built.
*
*/
@Deprecated
ScalaCompiler newScalaCompiler(ScalaInstance instance, File interfaceJar, ClasspathOptions options, Logger log);
/**
* Creates a compiler instance that can be used by the `compile` method.
*
* @param instance The Scala version to use
* @param interfaceJar The compiler interface jar compiled for the Scala version being used
* @param options Configures how arguments to the underlying Scala compiler will be built.
*/
ScalaCompiler newScalaCompiler(ScalaInstance instance, File interfaceJar, ClasspathOptions options);
/**
* Compiles the source interface for a Scala version. The resulting jar can then be used by the `newScalaCompiler` method

View File

@ -3,7 +3,7 @@
*/
package sbt
import sbt.compiler.javac.{ IncrementalCompilerJavaTools, JavaCompiler, JavaTools }
import sbt.compiler.javac.{ IncrementalCompilerJavaTools, JavaTools }
import xsbti.{ Logger => _, _ }
import xsbti.compile.{ CompileOrder, GlobalsCache }
import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava }
@ -15,12 +15,16 @@ import java.io.File
object Compiler {
val DefaultMaxErrors = 100
/** Inputs necessary to run the incremental compiler. */
final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup)
/** The inputs for the copiler *and* the previous analysis of source dependecnies. */
final case class InputsWithPrevious(inputs: Inputs, previousAnalysis: PreviousAnalysis)
final case class Options(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMapper: Position => Position, order: CompileOrder)
final case class IncSetup(analysisMap: File => Option[Analysis], definesClass: DefinesClass, skip: Boolean, cacheFile: File, cache: GlobalsCache, incOptions: IncOptions)
private[sbt] trait JavaToolWithNewInterface extends JavaTool {
def newJavac: IncrementalCompilerJavaTools
}
/** The instances of Scalac/Javac used to compile the current project. */
final case class Compilers(scalac: AnalyzingCompiler, javac: JavaTool) {
final def newJavac: Option[IncrementalCompilerJavaTools] =
javac match {
@ -28,9 +32,13 @@ object Compiler {
case _ => None
}
}
final case class NewCompilers(scalac: AnalyzingCompiler, javac: JavaTools)
/** The previous source dependency analysis result from compilation. */
final case class PreviousAnalysis(analysis: Analysis, setup: Option[CompileSetup])
type CompileResult = IC.Result
def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]], order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs =
def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String],
javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]],
order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs =
new Inputs(
compilers,
new Options(classpath, sources, classesDirectory, options, javacOptions, maxErrors, foldMappers(sourcePositionMappers), order),
@ -77,28 +85,47 @@ object Compiler {
val launcher = app.provider.scalaProvider.launcher
val componentManager = new ComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log)
val provider = ComponentCompiler.interfaceProvider(componentManager)
new AnalyzingCompiler(instance, provider, cpOptions, log)
new AnalyzingCompiler(instance, provider, cpOptions)
}
def apply(in: Inputs, log: Logger): Analysis =
{
import in.compilers._
import in.config._
import in.incSetup._
apply(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper))
@deprecated("0.13.8", "Use the `compile` method instead.")
def apply(in: Inputs, log: Logger): Analysis = {
import in.config._
apply(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper))
}
@deprecated("0.13.8", "Use the `compile` method instead.")
def apply(in: Inputs, log: Logger, reporter: xsbti.Reporter): Analysis = {
import in.compilers._
import in.config._
import in.incSetup._
// Here we load the previous analysis since the new paths don't.
val (previousAnalysis, previousSetup) = {
MixedAnalyzingCompiler.staticCachedStore(cacheFile).get().map {
case (a, s) => (a, Some(s))
} getOrElse {
(Analysis.empty(nameHashing = incOptions.nameHashing), None)
}
}
def apply(in: Inputs, log: Logger, reporter: xsbti.Reporter): Analysis =
compile(InputsWithPrevious(in, PreviousAnalysis(previousAnalysis, previousSetup)), log, reporter).analysis
}
def compile(in: InputsWithPrevious, log: Logger): CompileResult =
{
import in.compilers._
import in.config._
import in.incSetup._
val agg = new AggressiveCompile(cacheFile)
import in.inputs.config._
compile(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper))
}
def compile(in: InputsWithPrevious, log: Logger, reporter: xsbti.Reporter): CompileResult =
{
import in.inputs.compilers._
import in.inputs.config._
import in.inputs.incSetup._
// Here is some trickery to choose the more recent (reporter-using) java compiler rather
// than the previously defined versions.
// TODO - Remove this hackery in sbt 1.0.
val javacChosen: xsbti.compile.JavaCompiler =
in.compilers.newJavac.map(_.xsbtiCompiler).getOrElse(in.compilers.javac)
agg(scalac, javacChosen, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions,
analysisMap, definesClass, reporter, order, skip, incOptions)(log)
in.inputs.compilers.newJavac.map(_.xsbtiCompiler).getOrElse(in.inputs.compilers.javac)
// TODO - Why are we not using the IC interface???
IC.incrementalCompile(scalac, javacChosen, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions,
in.previousAnalysis.analysis, in.previousAnalysis.setup, analysisMap, definesClass, reporter, order, skip, incOptions)(log)
}
private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) =

View File

@ -5,6 +5,7 @@ package sbt
import Attributed.data
import Scope.{ fillTaskAxis, GlobalScope, ThisScope }
import sbt.Compiler.InputsWithPrevious
import xsbt.api.Discovery
import xsbti.compile.CompileOrder
import Project.{ inConfig, inScope, inTask, richInitialize, richInitializeTask, richTaskSessionVar }
@ -14,7 +15,8 @@ import Configurations.{ Compile, CompilerPlugin, IntegrationTest, names, Provide
import CrossVersion.{ binarySbtVersion, binaryScalaVersion, partialVersion }
import complete._
import std.TaskExtra._
import inc.{ FileValueCache, IncOptions, Locate }
import sbt.inc.{ Analysis, FileValueCache, IncOptions, Locate }
import sbt.compiler.{ MixedAnalyzingCompiler, AggressiveCompile }
import testing.{ Framework, Runner, AnnotatedFingerprint, SubclassFingerprint }
import sys.error
@ -246,8 +248,10 @@ object Defaults extends BuildCommon {
def compilersSetting = compilers := Compiler.compilers(scalaInstance.value, classpathOptions.value, javaHome.value)(appConfiguration.value, streams.value.log)
lazy val configTasks = docTaskSettings(doc) ++ inTask(compile)(compileInputsSettings) ++ configGlobal ++ Seq(
compile <<= compileTask tag (Tags.Compile, Tags.CPU),
lazy val configTasks = docTaskSettings(doc) ++ inTask(compile)(compileInputsSettings) ++ configGlobal ++ compileAnalysisSettings ++ Seq(
compile <<= compileTask,
manipulateBytecode := compileIncremental.value,
compileIncremental <<= compileIncrementalTask tag (Tags.Compile, Tags.CPU),
printWarnings <<= printWarningsTask,
compileAnalysisFilename := {
// Here, if the user wants cross-scala-versioning, we also append it
@ -648,7 +652,7 @@ object Defaults extends BuildCommon {
key in TaskGlobal <<= packageTask,
packageConfiguration <<= packageConfigurationTask,
mappings <<= mappingsTask,
packagedArtifact := (artifact.value, key.value),
packagedArtifact := (artifact.value -> key.value),
artifact <<= artifactSetting,
artifactPath <<= artifactPathSetting(artifact)
))
@ -778,15 +782,28 @@ object Defaults extends BuildCommon {
@deprecated("Use inTask(compile)(compileInputsSettings)", "0.13.0")
def compileTaskSettings: Seq[Setting[_]] = inTask(compile)(compileInputsSettings)
def compileTask: Initialize[Task[inc.Analysis]] = Def.task { compileTaskImpl(streams.value, (compileInputs in compile).value, (compilerReporter in compile).value) }
private[this] def compileTaskImpl(s: TaskStreams, ci: Compiler.Inputs, reporter: Option[xsbti.Reporter]): inc.Analysis =
def compileTask: Initialize[Task[inc.Analysis]] = Def.task {
val setup: Compiler.IncSetup = compileIncSetup.value
// TODO - expose bytecode manipulation phase.
val analysisResult: Compiler.CompileResult = manipulateBytecode.value
if (analysisResult.hasModified) {
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile)
store.set(analysisResult.analysis, analysisResult.setup)
}
analysisResult.analysis
}
def compileIncrementalTask = Def.task {
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
compileIncrementalTaskImpl(streams.value, (compileInputs in compile).value, previousCompile.value, (compilerReporter in compile).value)
}
private[this] def compileIncrementalTaskImpl(s: TaskStreams, ci: Compiler.Inputs, previous: Compiler.PreviousAnalysis, reporter: Option[xsbti.Reporter]): Compiler.CompileResult =
{
lazy val x = s.text(ExportStream)
def onArgs(cs: Compiler.Compilers) = cs.copy(scalac = cs.scalac.onArgs(exported(x, "scalac")), javac = cs.javac.onArgs(exported(x, "javac")))
val i = ci.copy(compilers = onArgs(ci.compilers))
val i = InputsWithPrevious(ci.copy(compilers = onArgs(ci.compilers)), previous)
try reporter match {
case Some(reporter) => Compiler(i, s.log, reporter)
case None => Compiler(i, s.log)
case Some(reporter) => Compiler.compile(i, s.log, reporter)
case None => Compiler.compile(i, s.log)
}
finally x.close() // workaround for #937
}
@ -803,9 +820,20 @@ object Defaults extends BuildCommon {
def compileInputsSettings: Seq[Setting[_]] =
Seq(compileInputs := {
val cp = classDirectory.value +: data(dependencyClasspath.value)
Compiler.inputs(cp, sources.value, classDirectory.value, scalacOptions.value, javacOptions.value, maxErrors.value, sourcePositionMappers.value, compileOrder.value)(compilers.value, compileIncSetup.value, streams.value.log)
Compiler.inputs(cp, sources.value, classDirectory.value, scalacOptions.value, javacOptions.value,
maxErrors.value, sourcePositionMappers.value, compileOrder.value)(compilers.value, compileIncSetup.value, streams.value.log)
},
compilerReporter := None)
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
previousCompile := {
val setup: Compiler.IncSetup = compileIncSetup.value
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile)
store.get() match {
case Some((an, setup)) => Compiler.PreviousAnalysis(an, Some(setup))
case None => Compiler.PreviousAnalysis(Analysis.empty(nameHashing = setup.incOptions.nameHashing), None)
}
}
)
def printWarningsTask: Initialize[Task[Unit]] =
(streams, compile, maxErrors, sourcePositionMappers) map { (s, analysis, max, spms) =>
@ -1006,7 +1034,7 @@ object Classpaths {
packagedArtifacts :== Map.empty,
crossTarget := target.value,
makePom := { val config = makePomConfiguration.value; IvyActions.makePom(ivyModule.value, config, streams.value.log); config.file },
packagedArtifact in makePom := (artifact in makePom value, makePom value),
packagedArtifact in makePom := ((artifact in makePom).value -> makePom.value),
deliver <<= deliverTask(deliverConfiguration),
deliverLocal <<= deliverTask(deliverLocalConfiguration),
publish <<= publishTask(publishConfiguration, deliver),
@ -1485,7 +1513,7 @@ object Classpaths {
def visit(p: ProjectRef, c: Configuration) {
val applicableConfigs = allConfigs(c)
for (ac <- applicableConfigs) // add all configurations in this project
visited add (p, ac.name)
visited add (p -> ac.name)
val masterConfs = names(getConfigurations(projectRef, data))
for (ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)) {

View File

@ -9,6 +9,7 @@ import Def.ScopedKey
import complete._
import inc.Analysis
import inc.Locate.DefinesClass
import sbt.compiler.MixedAnalyzingCompiler
import std.TaskExtra._
import xsbti.compile.{ CompileOrder, GlobalsCache }
import scala.xml.{ Node => XNode, NodeSeq }
@ -141,6 +142,9 @@ object Keys {
val consoleQuick = TaskKey[Unit]("console-quick", "Starts the Scala interpreter with the project dependencies on the classpath.", ATask, console)
val consoleProject = TaskKey[Unit]("console-project", "Starts the Scala interpreter with the sbt and the build definition on the classpath and useful imports.", AMinusTask)
val compile = TaskKey[Analysis]("compile", "Compiles sources.", APlusTask)
val manipulateBytecode = TaskKey[Compiler.CompileResult]("manipulateBytecode", "Manipulates generated bytecode", BTask)
val compileIncremental = TaskKey[Compiler.CompileResult]("compileIncremental", "Actually runs the incremental compliation", DTask)
val previousCompile = TaskKey[Compiler.PreviousAnalysis]("readAnalysis", "Read the incremental compiler analysis from disk", DTask)
val compilers = TaskKey[Compiler.Compilers]("compilers", "Defines the Scala and Java compilers to use for compilation.", DTask)
val compileAnalysisFilename = TaskKey[String]("compileAnalysisFilename", "Defines the filename used to store the incremental compiler analysis file (inside the streams cacheDirectory).", DTask)
val compileIncSetup = TaskKey[Compiler.IncSetup]("inc-compile-setup", "Configures aspects of incremental compilation.", DTask)

View File

@ -156,8 +156,8 @@ object Sbt extends Build {
// Persists the incremental data structures using SBinary
lazy val compilePersistSub = testedBaseProject(compilePath / "persist", "Persist") dependsOn (compileIncrementalSub, apiSub, compileIncrementalSub % "test->test") settings (sbinary)
// sbt-side interface to compiler. Calls compiler-side interface reflectively
lazy val compilerSub = testedBaseProject(compilePath, "Compile") dependsOn (launchInterfaceSub, interfaceSub % "compile;test->test", logSub, ioSub, classpathSub,
logSub % "test->test", launchSub % "test->test", apiSub % "test") settings (compilerSettings: _*)
lazy val compilerSub = testedBaseProject(compilePath, "Compile") dependsOn (launchInterfaceSub, interfaceSub % "compile;test->test", logSub, ioSub, classpathSub, apiSub, classfileSub,
logSub % "test->test", launchSub % "test->test") settings (compilerSettings: _*)
lazy val compilerIntegrationSub = baseProject(compilePath / "integration", "Compiler Integration") dependsOn (
compileIncrementalSub, compilerSub, compilePersistSub, apiSub, classfileSub)
lazy val compilerIvySub = baseProject(compilePath / "ivy", "Compiler Ivy Integration") dependsOn (ivySub, compilerSub)

View File

@ -4,7 +4,7 @@ libraryDependencies += "org.scala-sbt" % "sbt" % sbtVersion.value
lazy val expectErrorNotCrash = taskKey[Unit]("Ensures that sbt properly set types on Trees so that the compiler doesn't crash on a bad reference to .value, but gives a proper error instead.")
expectErrorNotCrash := {
val fail = (compile in Compile).failure.value
val fail = (compileIncremental in Compile).failure.value
fail.directCause match {
case Some(x: xsbti.CompileFailed) => ()
case _ => sys.error("Compiler crashed instead of providing a compile-time-only exception.")