Merge pull request #2478 from Duhemm/wip/update-incrementalcompiler

Forward port "Handle source, docs artifacts correctly for Ivy [1.0.x]" + update modules
This commit is contained in:
eugene yokota 2016-02-25 08:36:19 -05:00
commit e54475690c
34 changed files with 196 additions and 159 deletions

View File

@ -90,8 +90,8 @@ object CacheIvy {
implicit def callerFormat: Format[Caller] =
wrap[Caller, (ModuleID, Seq[String], Map[String, String], Boolean, Boolean, Boolean, Boolean)](c => (c.caller, c.callerConfigurations, c.callerExtraAttributes, c.isForceDependency, c.isChangingDependency, c.isTransitiveDependency, c.isDirectlyForceDependency),
{ case (c, cc, ea, fd, cd, td, df) => new Caller(c, cc, ea, fd, cd, td, df) })
implicit def exclusionRuleFormat(implicit sf: Format[String]): Format[ExclusionRule] =
wrap[ExclusionRule, (String, String, String, Seq[String])](e => (e.organization, e.name, e.artifact, e.configurations), { case (o, n, a, cs) => ExclusionRule(o, n, a, cs) })
implicit def exclusionRuleFormat(implicit sf: Format[String]): Format[InclExclRule] =
wrap[InclExclRule, (String, String, String, Seq[String])](e => (e.organization, e.name, e.artifact, e.configurations), { case (o, n, a, cs) => InclExclRule(o, n, a, cs) })
implicit def crossVersionFormat: Format[CrossVersion] = wrap(crossToInt, crossFromInt)
implicit def sourcePositionFormat: Format[SourcePosition] =
wrap[SourcePosition, (Int, String, Int, Int)](
@ -115,9 +115,9 @@ object CacheIvy {
private[this] val crossToInt = (c: CrossVersion) => c match { case Disabled => 0; case b: Binary => BinaryValue; case f: Full => FullValue }
implicit def moduleIDFormat(implicit sf: Format[String], bf: Format[Boolean]): Format[ModuleID] =
wrap[ModuleID, ((String, String, String, Option[String]), (Boolean, Boolean, Boolean, Seq[Artifact], Seq[ExclusionRule], Map[String, String], CrossVersion))](
m => ((m.organization, m.name, m.revision, m.configurations), (m.isChanging, m.isTransitive, m.isForce, m.explicitArtifacts, m.exclusions, m.extraAttributes, m.crossVersion)),
{ case ((o, n, r, cs), (ch, t, f, as, excl, x, cv)) => ModuleID(o, n, r, cs, ch, t, f, as, excl, x, cv) }
wrap[ModuleID, ((String, String, String, Option[String]), (Boolean, Boolean, Boolean, Seq[Artifact], Seq[ExclusionRule], Seq[InclusionRule], Map[String, String], CrossVersion))](
m => ((m.organization, m.name, m.revision, m.configurations), (m.isChanging, m.isTransitive, m.isForce, m.explicitArtifacts, m.exclusions, m.inclusions, m.extraAttributes, m.crossVersion)),
{ case ((o, n, r, cs), (ch, t, f, as, excl, incl, x, cv)) => ModuleID(o, n, r, cs, ch, t, f, as, excl, incl, x, cv) }
)
// For some reason sbinary seems to detect unserialized instance Set[ModuleID] to be not equal. #1620
implicit def moduleSetIC: InputCache[Set[ModuleID]] =
@ -169,7 +169,7 @@ object CacheIvy {
implicit def sftpRToHL = (s: SftpRepository) => s.name :+: s.connection :+: s.patterns :+: HNil
implicit def rawRToHL = (r: RawRepository) => r.name :+: r.resolver.getClass.getName :+: HNil
implicit def chainRToHL = (c: ChainedResolver) => c.name :+: c.resolvers :+: HNil
implicit def moduleToHL = (m: ModuleID) => m.organization :+: m.name :+: m.revision :+: m.configurations :+: m.isChanging :+: m.isTransitive :+: m.explicitArtifacts :+: m.exclusions :+: m.extraAttributes :+: m.crossVersion :+: HNil
implicit def moduleToHL = (m: ModuleID) => m.organization :+: m.name :+: m.revision :+: m.configurations :+: m.isChanging :+: m.isTransitive :+: m.explicitArtifacts :+: m.exclusions :+: m.inclusions :+: m.extraAttributes :+: m.crossVersion :+: HNil
}
import L3._
@ -186,7 +186,7 @@ object CacheIvy {
implicit def sshConnectionToHL = (s: SshConnection) => s.authentication :+: s.hostname :+: s.port :+: HNil
implicit def artifactToHL = (a: Artifact) => a.name :+: a.`type` :+: a.extension :+: a.classifier :+: names(a.configurations) :+: a.url :+: a.extraAttributes :+: HNil
implicit def exclusionToHL = (e: ExclusionRule) => e.organization :+: e.name :+: e.artifact :+: e.configurations :+: HNil
implicit def inclExclToHL = (e: InclExclRule) => e.organization :+: e.name :+: e.artifact :+: e.configurations :+: HNil
implicit def sbtExclusionToHL = (e: SbtExclusionRule) => e.organization :+: e.name :+: e.artifact :+: e.configurations :+: e.crossVersion :+: HNil
implicit def crossToHL = (c: CrossVersion) => crossToInt(c) :+: HNil
@ -200,7 +200,7 @@ object CacheIvy {
implicit def ivyFileIC: InputCache[IvyFileConfiguration] = wrapIn
implicit def connectionIC: InputCache[SshConnection] = wrapIn
implicit def artifactIC: InputCache[Artifact] = wrapIn
implicit def exclusionIC: InputCache[ExclusionRule] = wrapIn
implicit def exclusionIC: InputCache[InclExclRule] = wrapIn
implicit def sbtExclusionIC: InputCache[SbtExclusionRule] = wrapIn
implicit def crossVersionIC: InputCache[CrossVersion] = wrapIn
/* implicit def publishConfIC: InputCache[PublishConfiguration] = wrapIn

View File

@ -4,11 +4,10 @@
package sbt
import sbt.internal.inc.javac.{ IncrementalCompilerJavaTools, JavaTools }
import sbt.internal.inc.{ AnalyzingCompiler, ClasspathOptions, CompileSetup, CompileOutput, IC, JavaTool, LoggerReporter, ScalaInstance }
import sbt.internal.inc.{ Analysis, AnalyzingCompiler, ClasspathOptions, CompileOutput, ComponentCompiler, IncrementalCompilerImpl, JavaTool, Locate, LoggerReporter, ScalaInstance }
import xsbti.{ Logger => _, _ }
import xsbti.compile.{ CompileOrder, GlobalsCache }
import xsbti.compile.{ CompileOrder, Compilers, CompileResult, GlobalsCache, IncOptions, Inputs, MiniSetup }
import CompileOrder.{ JavaThenScala, Mixed, ScalaThenJava }
import sbt.internal.inc.{ Analysis, ComponentCompiler, IncOptions, Locate }
import Locate.DefinesClass
import java.io.File
@ -31,29 +30,29 @@ object Compiler {
ComponentCompiler.incrementalVersion, Some("component")).sources()
/** Inputs necessary to run the incremental compiler. */
final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup)
/** The inputs for the copiler *and* the previous analysis of source dependecnies. */
final case class InputsWithPrevious(inputs: Inputs, previousAnalysis: PreviousAnalysis)
final case class Options(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMapper: Position => Position, order: CompileOrder)
final case class IncSetup(analysisMap: File => Option[Analysis], definesClass: DefinesClass, skip: Boolean, cacheFile: File, cache: GlobalsCache, incOptions: IncOptions)
private[sbt] trait JavaToolWithNewInterface extends JavaTool {
def newJavac: IncrementalCompilerJavaTools
}
// final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup)
// /** The inputs for the copiler *and* the previous analysis of source dependecnies. */
// final case class InputsWithPrevious(inputs: Inputs, previousAnalysis: PreviousAnalysis)
// final case class Options(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMapper: Position => Position, order: CompileOrder)
// final case class IncSetup(analysisMap: File => Option[Analysis], definesClass: DefinesClass, skip: Boolean, cacheFile: File, cache: GlobalsCache, incOptions: IncOptions)
// private[sbt] trait JavaToolWithNewInterface extends JavaTool {
// def newJavac: IncrementalCompilerJavaTools
// }
/** The instances of Scalac/Javac used to compile the current project. */
final case class Compilers(scalac: AnalyzingCompiler, javac: IncrementalCompilerJavaTools)
// final case class Compilers(scalac: AnalyzingCompiler, javac: IncrementalCompilerJavaTools)
/** The previous source dependency analysis result from compilation. */
final case class PreviousAnalysis(analysis: Analysis, setup: Option[CompileSetup])
type CompileResult = IC.Result
// final case class PreviousAnalysis(analysis: Analysis, setup: Option[MiniSetup])
def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String],
javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]],
order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs =
new Inputs(
compilers,
new Options(classpath, sources, classesDirectory, options, javacOptions, maxErrors, foldMappers(sourcePositionMappers), order),
incSetup
)
// def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String],
// javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]],
// order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs =
// new Inputs(
// compilers,
// new Options(classpath, sources, classesDirectory, options, javacOptions, maxErrors, foldMappers(sourcePositionMappers), order),
// incSetup
// )
// @deprecated("Use `compilers(ScalaInstance, ClasspathOptions, Option[File], IvyConfiguration)`.", "0.13.10")
// def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File])(implicit app: AppConfiguration, log: Logger): Compilers =
@ -122,7 +121,7 @@ object Compiler {
ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): Compilers = {
val scalac = scalaCompiler(instance, cpOptions, javaHome, ivyConfiguration, sourcesModule)
val javac = JavaTools.directOrFork(instance, cpOptions, javaHome)
new Compilers(scalac, javac)
IncrementalCompilerImpl.Compilers(scalac, javac)
}
def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler =
{
@ -132,25 +131,27 @@ object Compiler {
new AnalyzingCompiler(instance, provider, cpOptions)
}
def compile(in: InputsWithPrevious, log: Logger): CompileResult =
def compile(in: Inputs, log: Logger): CompileResult =
{
import in.inputs.config._
compile(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper))
}
def compile(in: InputsWithPrevious, log: Logger, reporter: xsbti.Reporter): CompileResult =
{
import in.inputs.compilers._
import in.inputs.config._
import in.inputs.incSetup._
// Here is some trickery to choose the more recent (reporter-using) java compiler rather
// than the previously defined versions.
// TODO - Remove this hackery in sbt 1.0.
val javacChosen: xsbti.compile.JavaCompiler =
in.inputs.compilers.javac.xsbtiCompiler // ).getOrElse(in.inputs.compilers.javac)
// TODO - Why are we not using the IC interface???
IC.incrementalCompile(scalac, javacChosen, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions,
in.previousAnalysis.analysis, in.previousAnalysis.setup, analysisMap, definesClass, reporter, order, skip, incOptions)(log)
sbt.inc.IncrementalCompilerUtil.defaultIncrementalCompiler.compile(in, log)
// import in.inputs.config._
// compile(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper))
}
// def compile(in: Inputs, log: Logger, reporter: xsbti.Reporter): CompileResult =
// {
// import in.inputs.compilers._
// import in.inputs.config._
// import in.inputs.incSetup._
// // Here is some trickery to choose the more recent (reporter-using) java compiler rather
// // than the previously defined versions.
// // TODO - Remove this hackery in sbt 1.0.
// val javacChosen: xsbti.compile.JavaCompiler =
// in.inputs.compilers.javac.xsbtiCompiler // ).getOrElse(in.inputs.compilers.javac)
// // TODO - Why are we not using the IC interface???
// val compiler = new IncrementalCompilerImpl
// compiler.incrementalCompile(scalac, javacChosen, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions,
// in.previousAnalysis.analysis, in.previousAnalysis.setup, analysisMap, definesClass, reporter, order, skip, incOptions)(log)
// }
private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) =
mappers.foldRight({ p: A => p }) { (mapper, mappers) => { p: A => mapper(p).getOrElse(mappers(p)) } }

View File

@ -4,10 +4,12 @@
package sbt
import java.io.File
import sbt.internal.inc.AnalyzingCompiler
import sbt.internal.inc.{ AnalyzingCompiler, IncrementalCompilerImpl }
import sbt.internal.util.JLine
import sbt.util.Logger
import xsbti.compile.Inputs
final class Console(compiler: AnalyzingCompiler) {
/** Starts an interactive scala interpreter session with the given classpath.*/
def apply(classpath: Seq[File], log: Logger): Option[String] =
@ -28,5 +30,8 @@ final class Console(compiler: AnalyzingCompiler) {
}
}
object Console {
def apply(conf: Compiler.Inputs): Console = new Console(conf.compilers.scalac)
def apply(conf: Inputs): Console =
conf.compilers match {
case IncrementalCompilerImpl.Compilers(scalac, _) => new Console(scalac)
}
}

View File

@ -10,6 +10,7 @@ import TaskExtra._
import sbt.internal.util.FeedbackProvidedException
import sbt.internal.util.Types._
import xsbti.api.Definition
import xsbti.compile.CompileAnalysis
import ConcurrentRestrictions.Tag
import testing.{ AnnotatedFingerprint, Fingerprint, Framework, SubclassFingerprint, Runner, TaskDef, SuiteSelector, Task => TestTask }
@ -269,10 +270,10 @@ object Tests {
}
def overall(results: Iterable[TestResult.Value]): TestResult.Value =
(TestResult.Passed /: results) { (acc, result) => if (acc.id < result.id) result else acc }
def discover(frameworks: Seq[Framework], analysis: Analysis, log: Logger): (Seq[TestDefinition], Set[String]) =
def discover(frameworks: Seq[Framework], analysis: CompileAnalysis, log: Logger): (Seq[TestDefinition], Set[String]) =
discover(frameworks flatMap TestFramework.getFingerprints, allDefs(analysis), log)
def allDefs(analysis: Analysis) = analysis.apis.internal.values.flatMap(_.api.definitions).toSeq
def allDefs(analysis: CompileAnalysis) = analysis match { case analysis: Analysis => analysis.apis.internal.values.flatMap(_.api.definitions).toSeq }
def discover(fingerprints: Seq[Fingerprint], definitions: Seq[Definition], log: Logger): (Seq[TestDefinition], Set[String]) =
{
val subclasses = fingerprints collect { case sub: SubclassFingerprint => (sub.superclassName, sub.isModule, sub) };

View File

@ -69,5 +69,5 @@ object Build {
@deprecated("Use Attributed.data", "0.13.0")
def data[T](in: Seq[Attributed[T]]): Seq[T] = Attributed.data(in)
def analyzed(in: Seq[Attributed[_]]): Seq[sbt.internal.inc.Analysis] = in.flatMap { _.metadata.get(Keys.analysis) }
def analyzed(in: Seq[Attributed[_]]): Seq[xsbti.compile.CompileAnalysis] = in.flatMap { _.metadata.get(Keys.analysis) }
}

View File

@ -7,7 +7,7 @@ import sbt.util.Logger
import java.io.File
import sbt.librarymanagement.Resolver
import sbt.internal.librarymanagement.{ InlineIvyConfiguration, IvyPaths }
import sbt.internal.inc.{ AnalyzingCompiler, ClasspathOptions }
import sbt.internal.inc.{ AnalyzingCompiler, ClasspathOptions, IncrementalCompilerImpl }
object ConsoleProject {
def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(implicit log: Logger): Unit = {
@ -21,7 +21,7 @@ object ConsoleProject {
val ivyPaths = new IvyPaths(unit.unit.localBase, bootIvyHome(state.configuration))
val ivyConfiguration = new InlineIvyConfiguration(ivyPaths, Resolver.withDefaultResolvers(Nil),
Nil, Nil, localOnly, lock, checksums, None, log)
val compiler: AnalyzingCompiler = Compiler.compilers(ClasspathOptions.repl, ivyConfiguration)(state.configuration, log).scalac
val compiler: AnalyzingCompiler = Compiler.compilers(ClasspathOptions.repl, ivyConfiguration)(state.configuration, log) match { case IncrementalCompilerImpl.Compilers(scalac, _) => scalac }
val imports = BuildUtil.getImports(unit.unit) ++ BuildUtil.importAll(bindings.map(_._1))
val importString = imports.mkString("", ";\n", ";\n\n")
val initCommands = importString + extra

View File

@ -7,10 +7,9 @@ import scala.concurrent.duration.{ FiniteDuration, Duration }
import sbt.internal.util.Attributed
import sbt.internal.util.Attributed.data
import Scope.{ fillTaskAxis, GlobalScope, ThisScope }
import sbt.Compiler.InputsWithPrevious
import sbt.internal.librarymanagement.mavenint.{ PomExtraDependencyAttributes, SbtPomExtraProperties }
import xsbt.api.Discovery
import xsbti.compile.CompileOrder
import xsbti.compile.{ CompileAnalysis, CompileOptions, CompileOrder, CompileResult, DefinesClass, IncOptions, IncOptionsUtil, Inputs, MiniSetup, PreviousResult, Setup, TransactionalManagerType }
import Project.{ inConfig, inScope, inTask, richInitialize, richInitializeTask, richTaskSessionVar }
import Def.{ Initialize, ScopedKey, Setting, SettingsDefinition }
import sbt.internal.librarymanagement.{ CustomPomParser, DependencyFilter }
@ -20,10 +19,10 @@ import sbt.librarymanagement.Configurations.{ Compile, CompilerPlugin, Integrati
import sbt.librarymanagement.CrossVersion.{ binarySbtVersion, binaryScalaVersion, partialVersion }
import sbt.internal.util.complete._
import std.TaskExtra._
import sbt.internal.inc.{ Analysis, ClassfileManager, ClasspathOptions, CompilerCache, FileValueCache, IncOptions, Locate, LoggerReporter, MixedAnalyzingCompiler, ScalaInstance }
import sbt.internal.inc.{ Analysis, ClassfileManager, ClasspathOptions, CompilerCache, FileValueCache, IncrementalCompilerImpl, Locate, LoggerReporter, MixedAnalyzingCompiler, ScalaInstance }
import testing.{ Framework, Runner, AnnotatedFingerprint, SubclassFingerprint }
import sbt.librarymanagement._
import sbt.librarymanagement.{ `package` => _, _ }
import sbt.internal.librarymanagement._
import sbt.internal.librarymanagement.syntax._
import sbt.internal.util._
@ -41,6 +40,9 @@ import sbt.internal.util.Cache.seqFormat
import sbt.util.Logger
import CommandStrings.ExportStream
import xsbti.Maybe
import sbt.util.InterfaceUtil.{ f1, o2m }
import sbt.internal.util.Types._
import sbt.internal.io.WatchState
@ -58,10 +60,10 @@ object Defaults extends BuildCommon {
def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = app.provider.scalaProvider.launcher.globalLock
def extractAnalysis[T](a: Attributed[T]): (T, Analysis) =
def extractAnalysis[T](a: Attributed[T]): (T, CompileAnalysis) =
(a.data, a.metadata get Keys.analysis getOrElse Analysis.Empty)
def analysisMap[T](cp: Seq[Attributed[T]]): T => Option[Analysis] =
def analysisMap[T](cp: Seq[Attributed[T]]): T => Option[CompileAnalysis] =
{
val m = (for (a <- cp; an <- a.metadata get Keys.analysis) yield (a.data, an)).toMap
m.get _
@ -237,8 +239,8 @@ object Defaults extends BuildCommon {
)
def compileBase = inTask(console)(compilersSetting :: Nil) ++ compileBaseGlobal ++ Seq(
incOptions := incOptions.value.withNewClassfileManager(
ClassfileManager.transactional(crossTarget.value / "classes.bak", sbt.util.Logger.Null)),
incOptions := incOptions.value.withClassfileManagerType(
Maybe.just(new TransactionalManagerType(crossTarget.value / "classes.bak", sbt.util.Logger.Null))),
scalaInstance <<= scalaInstanceTask,
crossVersion := (if (crossPaths.value) CrossVersion.binary else CrossVersion.Disabled),
crossTarget := makeCrossTarget(target.value, scalaBinaryVersion.value, sbtBinaryVersion.value, sbtPlugin.value, crossPaths.value),
@ -250,7 +252,7 @@ object Defaults extends BuildCommon {
)
// must be a val: duplication detected by object identity
private[this] lazy val compileBaseGlobal: Seq[Setting[_]] = globalDefaults(Seq(
incOptions := IncOptions.Default,
incOptions := IncOptionsUtil.defaultIncOptions,
classpathOptions :== ClasspathOptions.boot,
classpathOptions in console :== ClasspathOptions.repl,
compileOrder :== CompileOrder.Mixed,
@ -496,7 +498,7 @@ object Defaults extends BuildCommon {
def testQuickFilter: Initialize[Task[Seq[String] => Seq[String => Boolean]]] =
(fullClasspath in test, streams in test) map {
(cp, s) =>
val ans = cp.flatMap(_.metadata get Keys.analysis)
val ans: Seq[Analysis] = cp.flatMap(_.metadata get Keys.analysis) map { case a0: Analysis => a0 }
val succeeded = TestStatus.read(succeededFile(s.cacheDirectory))
val stamps = collection.mutable.Map.empty[File, Long]
def stamp(dep: String): Long = {
@ -679,6 +681,7 @@ object Defaults extends BuildCommon {
a.copy(classifier = Some(classifierString), `type` = Artifact.classifierType(classifierString), configurations = confs)
}
}
@deprecated("The configuration(s) should not be decided based on the classifier.", "1.0")
def artifactConfigurations(base: Artifact, scope: Configuration, classifier: Option[String]): Iterable[Configuration] =
classifier match {
case Some(c) => Artifact.classifierConf(c) :: Nil
@ -787,7 +790,7 @@ object Defaults extends BuildCommon {
fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"),
key in TaskGlobal := {
val s = streams.value
val cs = compilers.value
val cs: IncrementalCompilerImpl.Compilers = compilers.value match { case c: IncrementalCompilerImpl.Compilers => c }
val srcs = sources.value
val out = target.value
val sOpts = scalacOptions.value
@ -800,11 +803,7 @@ object Defaults extends BuildCommon {
val logger: Logger = s.log
val maxer = maxErrors.value
val spms = sourcePositionMappers.value
val reporter: xsbti.Reporter =
(compilerReporter in compile).value match {
case Some(r) => r
case _ => new LoggerReporter(maxer, logger, Compiler.foldMappers(spms))
}
val reporter = (compilerReporter in compile).value
(hasScala, hasJava) match {
case (true, _) =>
val options = sOpts ++ Opts.doc.externalAPI(xapis)
@ -822,7 +821,7 @@ object Defaults extends BuildCommon {
def mainRunTask = run <<= runTask(fullClasspath in Runtime, mainClass in run, runner in run)
def mainRunMainTask = runMain <<= runMainTask(fullClasspath in Runtime, runner in run)
def discoverMainClasses(analysis: Analysis): Seq[String] =
def discoverMainClasses(analysis: CompileAnalysis): Seq[String] =
Discovery.applications(Tests.allDefs(analysis)).collect({ case (definition, discovered) if discovered.hasMain => definition.name }).sorted
def consoleProjectTask = (state, streams, initialCommands in consoleProject) map { (state, s, extra) => ConsoleProject(state, extra)(s.log); println() }
@ -830,7 +829,7 @@ object Defaults extends BuildCommon {
def consoleQuickTask = consoleTask(externalDependencyClasspath, consoleQuick)
def consoleTask(classpath: TaskKey[Classpath], task: TaskKey[_]): Initialize[Task[Unit]] =
(compilers in task, classpath in task, scalacOptions in task, initialCommands in task, cleanupCommands in task, taskTemporaryDirectory in task, scalaInstance in task, streams) map {
(cs, cp, options, initCommands, cleanup, temp, si, s) =>
case (cs: IncrementalCompilerImpl.Compilers, cp, options, initCommands, cleanup, temp, si, s) =>
val cpFiles = data(cp)
val fullcp = (cpFiles ++ si.allJars).distinct
val loader = sbt.internal.inc.classpath.ClasspathUtilities.makeLoader(fullcp, si, IO.createUniqueDirectory(temp))
@ -850,10 +849,10 @@ object Defaults extends BuildCommon {
@deprecated("Use inTask(compile)(compileInputsSettings)", "0.13.0")
def compileTaskSettings: Seq[Setting[_]] = inTask(compile)(compileInputsSettings)
def compileTask: Initialize[Task[Analysis]] = Def.task {
val setup: Compiler.IncSetup = compileIncSetup.value
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
val setup: Setup = compileIncSetup.value
// TODO - expose bytecode manipulation phase.
val analysisResult: Compiler.CompileResult = manipulateBytecode.value
val analysisResult: CompileResult = manipulateBytecode.value
if (analysisResult.hasModified) {
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile)
store.set(analysisResult.analysis, analysisResult.setup)
@ -862,52 +861,70 @@ object Defaults extends BuildCommon {
}
def compileIncrementalTask = Def.task {
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
compileIncrementalTaskImpl(streams.value, (compileInputs in compile).value, previousCompile.value, (compilerReporter in compile).value)
compileIncrementalTaskImpl(streams.value, (compileInputs in compile).value)
}
private[this] def compileIncrementalTaskImpl(s: TaskStreams, ci: Compiler.Inputs, previous: Compiler.PreviousAnalysis, reporter: Option[xsbti.Reporter]): Compiler.CompileResult =
private[this] def compileIncrementalTaskImpl(s: TaskStreams, ci: Inputs): CompileResult =
{
lazy val x = s.text(ExportStream)
def onArgs(cs: Compiler.Compilers) = cs.copy(scalac = cs.scalac.onArgs(exported(x, "scalac")), javac = cs.javac /*.onArgs(exported(x, "javac"))*/ )
val i = InputsWithPrevious(ci.copy(compilers = onArgs(ci.compilers)), previous)
try reporter match {
case Some(reporter) => Compiler.compile(i, s.log, reporter)
case None => Compiler.compile(i, s.log)
}
def onArgs(cs: IncrementalCompilerImpl.Compilers) = cs.copy(scalac = cs.scalac.onArgs(exported(x, "scalac")), javac = cs.javac /*.onArgs(exported(x, "javac"))*/ )
val compilers: IncrementalCompilerImpl.Compilers = ci.compilers match { case compilers: IncrementalCompilerImpl.Compilers => compilers }
val i = ci.withCompilers(onArgs(compilers))
try Compiler.compile(i, s.log)
finally x.close() // workaround for #937
}
def compileIncSetupTask = Def.task {
Compiler.IncSetup(
analysisMap(dependencyClasspath.value),
definesClass.value,
val dc: File => DefinesClass = {
val dc = definesClass.value
f => new DefinesClass { override def apply(className: String): Boolean = dc(f)(className) }
}
new Setup(
f1(t => o2m(analysisMap(dependencyClasspath.value)(t))),
f1(dc),
(skip in compile).value,
// TODO - this is kind of a bad way to grab the cache directory for streams...
streams.value.cacheDirectory / compileAnalysisFilename.value,
compilerCache.value,
incOptions.value)
incOptions.value,
(compilerReporter in compile).value,
// TODO - task / setting for extra,
Array.empty)
}
def compileInputsSettings: Seq[Setting[_]] = {
Seq(
compileOptions := new CompileOptions(
(classDirectory.value +: data(dependencyClasspath.value)).toArray,
sources.value.toArray,
classDirectory.value,
scalacOptions.value.toArray,
javacOptions.value.toArray,
maxErrors.value,
f1(Compiler.foldMappers(sourcePositionMappers.value)),
compileOrder.value),
compilerReporter := new LoggerReporter(maxErrors.value, streams.value.log, Compiler.foldMappers(sourcePositionMappers.value)),
compileInputs := new Inputs(
compilers.value,
compileOptions.value,
compileIncSetup.value,
previousCompile.value)
)
}
def compileInputsSettings: Seq[Setting[_]] =
Seq(compileInputs := {
val cp = classDirectory.value +: data(dependencyClasspath.value)
Compiler.inputs(cp, sources.value, classDirectory.value, scalacOptions.value, javacOptions.value,
maxErrors.value, sourcePositionMappers.value, compileOrder.value)(compilers.value, compileIncSetup.value, streams.value.log)
},
compilerReporter := None)
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
previousCompile := {
val setup: Compiler.IncSetup = compileIncSetup.value
val setup = compileIncSetup.value
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile)
store.get() match {
case Some((an, setup)) => Compiler.PreviousAnalysis(an, Some(setup))
case None => Compiler.PreviousAnalysis(Analysis.empty(nameHashing = setup.incOptions.nameHashing), None)
case Some((an, setup)) => new PreviousResult(Maybe.just(an), Maybe.just(setup))
case None => new PreviousResult(Maybe.nothing[CompileAnalysis], Maybe.nothing[MiniSetup])
}
}
)
def printWarningsTask: Initialize[Task[Unit]] =
(streams, compile, maxErrors, sourcePositionMappers) map { (s, analysis, max, spms) =>
val problems = analysis.infos.allInfos.values.flatMap(i => i.reportedProblems ++ i.unreportedProblems)
val reporter = new LoggerReporter(max, s.log, Compiler.foldMappers(spms))
problems foreach { p => reporter.display(p.position, p.message, p.severity) }
(streams, compile, maxErrors, sourcePositionMappers) map {
case (s, analysis: Analysis, max, spms) =>
val problems = analysis.infos.allInfos.values.flatMap(i => i.reportedProblems ++ i.unreportedProblems)
val reporter = new LoggerReporter(max, s.log, Compiler.foldMappers(spms))
problems foreach { p => reporter.display(p.position, p.message, p.severity) }
}
def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID =
@ -925,7 +942,7 @@ object Defaults extends BuildCommon {
def discoverPlugins: Initialize[Task[Set[String]]] = (compile, sbtPlugin, streams) map { (analysis, isPlugin, s) => if (isPlugin) discoverSbtPlugins(analysis, s.log) else Set.empty }
@deprecated("Use PluginDiscovery.sourceModuleNames[Plugin].", "0.13.2")
def discoverSbtPlugins(analysis: Analysis, log: Logger): Set[String] =
def discoverSbtPlugins(analysis: CompileAnalysis, log: Logger): Set[String] =
PluginDiscovery.sourceModuleNames(analysis, classOf[Plugin].getName).toSet
def copyResourcesTask =
@ -1133,6 +1150,8 @@ object Classpaths {
resolvers :== Nil,
retrievePattern :== Resolver.defaultRetrievePattern,
transitiveClassifiers :== Seq(SourceClassifier, DocClassifier),
sourceArtifactTypes :== Artifact.DefaultSourceTypes,
docArtifactTypes :== Artifact.DefaultDocTypes,
sbtDependency := {
val app = appConfiguration.value
val id = app.provider.id
@ -1194,7 +1213,12 @@ object Classpaths {
projectID <<= defaultProjectID,
projectID <<= pluginProjectID,
projectDescriptors <<= depMap,
updateConfiguration := new UpdateConfiguration(retrieveConfiguration.value, false, ivyLoggingLevel.value),
updateConfiguration := {
// Tell the UpdateConfiguration which artifact types are special (for sources and javadocs)
val specialArtifactTypes = sourceArtifactTypes.value union docArtifactTypes.value
// By default, to retrieve all types *but* these (it's assumed that everything else is binary/resource)
new UpdateConfiguration(retrieveConfiguration.value, false, ivyLoggingLevel.value, ArtifactTypeFilter.forbid(specialArtifactTypes))
},
updateOptions := (updateOptions in Global).value,
retrieveConfiguration := { if (retrieveManaged.value) Some(new RetrieveConfiguration(managedDirectory.value, retrievePattern.value, retrieveManagedSync.value, configurationsToRetrieve.value)) else None },
ivyConfiguration <<= mkIvyConfiguration,
@ -1248,6 +1272,8 @@ object Classpaths {
val mod = (classifiersModule in updateClassifiers).value
val c = updateConfiguration.value
val app = appConfiguration.value
val srcTypes = sourceArtifactTypes.value
val docTypes = docArtifactTypes.value
val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
@ -1255,7 +1281,7 @@ object Classpaths {
val uwConfig = (unresolvedWarningConfiguration in update).value
val logicalClock = LogicalClock(state.value.hashCode)
val depDir = dependencyCacheDirectory.value
IvyActions.updateClassifiers(is, GetClassifiersConfiguration(mod, excludes, c, ivyScala.value), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), s.log)
IvyActions.updateClassifiers(is, GetClassifiersConfiguration(mod, excludes, c.copy(artifactFilter = c.artifactFilter.invert), ivyScala.value, srcTypes, docTypes), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), Vector.empty, s.log)
}
} tag (Tags.Update, Tags.Network)
)
@ -1327,12 +1353,14 @@ object Classpaths {
val mod = classifiersModule.value
val c = updateConfiguration.value
val app = appConfiguration.value
val srcTypes = sourceArtifactTypes.value
val docTypes = docArtifactTypes.value
val out = is.withIvy(s.log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (unresolvedWarningConfiguration in update).value
val depDir = dependencyCacheDirectory.value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
val noExplicitCheck = ivyScala.value.map(_.copy(checkExplicit = false))
IvyActions.transitiveScratch(is, "sbt", GetClassifiersConfiguration(mod, excludes, c, noExplicitCheck), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), s.log)
IvyActions.transitiveScratch(is, "sbt", GetClassifiersConfiguration(mod, excludes, c.copy(artifactFilter = c.artifactFilter.invert), noExplicitCheck, srcTypes, docTypes), uwConfig, LogicalClock(state.value.hashCode), Some(depDir), s.log)
}
} tag (Tags.Update, Tags.Network)
)) ++ Seq(bootIvyConfiguration := (ivyConfiguration in updateSbtClassifiers).value)
@ -1572,7 +1600,7 @@ object Classpaths {
new RawRepository(new ProjectResolver(ProjectResolver.InterProject, m))
}
def analyzed[T](data: T, analysis: Analysis) = Attributed.blank(data).put(Keys.analysis, analysis)
def analyzed[T](data: T, analysis: CompileAnalysis) = Attributed.blank(data).put(Keys.analysis, analysis)
def makeProducts: Initialize[Task[Seq[File]]] = Def.task {
val x1 = compile.value
val x2 = copyResources.value

View File

@ -9,9 +9,9 @@ import scala.concurrent.duration.{ FiniteDuration, Duration }
import Def.ScopedKey
import sbt.internal.util.complete._
import sbt.internal.inc.Locate.DefinesClass
import sbt.internal.inc.{ Analysis, ClasspathOptions, IncOptions, MixedAnalyzingCompiler, ScalaInstance }
import sbt.internal.inc.{ ClasspathOptions, MixedAnalyzingCompiler, ScalaInstance }
import std.TaskExtra._
import xsbti.compile.{ CompileOrder, GlobalsCache }
import xsbti.compile.{ CompileAnalysis, CompileOptions, CompileOrder, Compilers, CompileResult, GlobalsCache, IncOptions, Inputs, PreviousResult, Setup }
import scala.xml.{ Node => XNode, NodeSeq }
import org.apache.ivy.core.module.{ descriptor, id }
import descriptor.ModuleDescriptor, id.ModuleRevisionId
@ -103,7 +103,7 @@ object Keys {
// Command keys
val historyPath = SettingKey(BasicKeys.historyPath)
val shellPrompt = SettingKey(BasicKeys.shellPrompt)
val analysis = AttributeKey[Analysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
val watch = SettingKey(BasicKeys.watch)
val pollInterval = SettingKey[Int]("poll-interval", "Interval between checks for modified sources by the continuous execution command.", BMinusSetting)
val watchSources = TaskKey[Seq[File]]("watch-sources", "Defines the sources in this project for continuous execution to watch for changes.", BMinusSetting)
@ -165,7 +165,8 @@ object Keys {
val compileOrder = SettingKey[CompileOrder]("compile-order", "Configures the order in which Java and sources within a single compilation are compiled. Valid values are: JavaThenScala, ScalaThenJava, or Mixed.", BPlusSetting)
val initialCommands = SettingKey[String]("initial-commands", "Initial commands to execute when starting up the Scala interpreter.", AMinusSetting)
val cleanupCommands = SettingKey[String]("cleanup-commands", "Commands to execute before the Scala interpreter exits.", BMinusSetting)
val compileInputs = TaskKey[Compiler.Inputs]("compile-inputs", "Collects all inputs needed for compilation.", DTask)
val compileOptions = TaskKey[CompileOptions]("compile-options", "Collects basic options to configure compilers", DTask)
val compileInputs = TaskKey[Inputs]("compile-inputs", "Collects all inputs needed for compilation.", DTask)
val scalaHome = SettingKey[Option[File]]("scala-home", "If Some, defines the local Scala installation to use for compilation, running, and testing.", ASetting)
val scalaInstance = TaskKey[ScalaInstance]("scala-instance", "Defines the Scala instance to use for compilation, running, and testing.", DTask)
val scalaOrganization = SettingKey[String]("scala-organization", "Organization/group ID of the Scala used in the project. Default value is 'org.scala-lang'. This is an advanced setting used for clones of the Scala Language. It should be disregarded in standard use cases.", CSetting)
@ -185,13 +186,13 @@ object Keys {
val console = TaskKey[Unit]("console", "Starts the Scala interpreter with the project classes on the classpath.", APlusTask)
val consoleQuick = TaskKey[Unit]("console-quick", "Starts the Scala interpreter with the project dependencies on the classpath.", ATask, console)
val consoleProject = TaskKey[Unit]("console-project", "Starts the Scala interpreter with the sbt and the build definition on the classpath and useful imports.", AMinusTask)
val compile = TaskKey[Analysis]("compile", "Compiles sources.", APlusTask)
val manipulateBytecode = TaskKey[Compiler.CompileResult]("manipulateBytecode", "Manipulates generated bytecode", BTask)
val compileIncremental = TaskKey[Compiler.CompileResult]("compileIncremental", "Actually runs the incremental compilation", DTask)
val previousCompile = TaskKey[Compiler.PreviousAnalysis]("readAnalysis", "Read the incremental compiler analysis from disk", DTask)
val compilers = TaskKey[Compiler.Compilers]("compilers", "Defines the Scala and Java compilers to use for compilation.", DTask)
val compile = TaskKey[CompileAnalysis]("compile", "Compiles sources.", APlusTask)
val manipulateBytecode = TaskKey[CompileResult]("manipulateBytecode", "Manipulates generated bytecode", BTask)
val compileIncremental = TaskKey[CompileResult]("compileIncremental", "Actually runs the incremental compilation", DTask)
val previousCompile = TaskKey[PreviousResult]("readAnalysis", "Read the incremental compiler analysis from disk", DTask)
val compilers = TaskKey[Compilers]("compilers", "Defines the Scala and Java compilers to use for compilation.", DTask)
val compileAnalysisFilename = TaskKey[String]("compileAnalysisFilename", "Defines the filename used to store the incremental compiler analysis file (inside the streams cacheDirectory).", DTask)
val compileIncSetup = TaskKey[Compiler.IncSetup]("inc-compile-setup", "Configures aspects of incremental compilation.", DTask)
val compileIncSetup = TaskKey[Setup]("inc-compile-setup", "Configures aspects of incremental compilation.", DTask)
val compilerCache = TaskKey[GlobalsCache]("compiler-cache", "Cache of scala.tools.nsc.Global instances. This should typically be cached so that it isn't recreated every task run.", DTask)
val stateCompilerCache = AttributeKey[GlobalsCache]("compiler-cache", "Internal use: Global cache.")
val definesClass = TaskKey[DefinesClass]("defines-class", "Internal use: provides a function that determines whether the provided file contains a given class.", Invisible)
@ -306,6 +307,8 @@ object Keys {
val updateClassifiers = TaskKey[UpdateReport]("update-classifiers", "Resolves and optionally retrieves classified artifacts, such as javadocs and sources, for dependency definitions, transitively.", BPlusTask, update)
val transitiveClassifiers = SettingKey[Seq[String]]("transitive-classifiers", "List of classifiers used for transitively obtaining extra artifacts for sbt or declared dependencies.", BSetting)
val updateSbtClassifiers = TaskKey[UpdateReport]("update-sbt-classifiers", "Resolves and optionally retrieves classifiers, such as javadocs and sources, for sbt, transitively.", BPlusTask, updateClassifiers)
val sourceArtifactTypes = SettingKey[Set[String]]("source-artifact-types", "Ivy artifact types that correspond to source artifacts. Used by IDEs to resolve these resources.", BSetting)
val docArtifactTypes = SettingKey[Set[String]]("doc-artifact-types", "Ivy artifact types that correspond to javadoc artifacts. Used by IDEs to resolve these resources.", BSetting)
val publishConfiguration = TaskKey[PublishConfiguration]("publish-configuration", "Configuration for publishing to a repository.", DTask)
val publishLocalConfiguration = TaskKey[PublishConfiguration]("publish-local-configuration", "Configuration for publishing to the local Ivy repository.", DTask)
@ -416,7 +419,7 @@ object Keys {
private[sbt] val taskCancelStrategy = SettingKey[State => TaskCancellationStrategy]("taskCancelStrategy", "Experimental task cancellation handler.", DTask)
// Experimental in sbt 0.13.2 to enable grabing semantic compile failures.
private[sbt] val compilerReporter = TaskKey[Option[xsbti.Reporter]]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask)
private[sbt] val compilerReporter = TaskKey[xsbti.Reporter]("compilerReporter", "Experimental hook to listen (or send) compilation failure messages.", DTask)
val triggeredBy = Def.triggeredBy
val runBefore = Def.runBefore

View File

@ -12,7 +12,6 @@ import java.net.{ URI, URL }
import compiler.{ Eval, EvalImports }
import scala.annotation.tailrec
import collection.mutable
import Compiler.Compilers
import sbt.internal.inc.{ Analysis, ClasspathOptions, FileValueCache, Locate, ModuleUtilities }
import sbt.internal.inc.classpath.ClasspathUtilities
import Project.{ inScope, makeSettings }
@ -30,6 +29,7 @@ import Locate.DefinesClass
import sbt.io.{ GlobFilter, IO, Path }
import sbt.internal.io.Alternatives
import sbt.util.Logger
import xsbti.compile.Compilers
object Load {
// note that there is State passed in but not pulled out

View File

@ -7,7 +7,8 @@ import java.net.URL
import Attributed.data
import Build.analyzed
import xsbt.api.{ Discovered, Discovery }
import sbt.internal.inc.{ Analysis, ModuleUtilities }
import xsbti.compile.CompileAnalysis
import sbt.internal.inc.ModuleUtilities
import sbt.io.IO
@ -48,7 +49,7 @@ object PluginDiscovery {
}
/** Discovers the sbt-plugin-related top-level modules from the provided source `analysis`. */
def discoverSourceAll(analysis: Analysis): DiscoveredNames =
def discoverSourceAll(analysis: CompileAnalysis): DiscoveredNames =
{
def discover[T](implicit classTag: reflect.ClassTag[T]): Seq[String] =
sourceModuleNames(analysis, classTag.runtimeClass.getName)
@ -92,7 +93,7 @@ object PluginDiscovery {
).distinct
/** Discovers top-level modules in `analysis` that inherit from any of `subclasses`. */
def sourceModuleNames(analysis: Analysis, subclasses: String*): Seq[String] =
def sourceModuleNames(analysis: CompileAnalysis, subclasses: String*): Seq[String] =
{
val subclassSet = subclasses.toSet
val ds = Discovery(subclassSet, Set.empty)(Tests.allDefs(analysis))

View File

@ -9,10 +9,10 @@ object Dependencies {
lazy val scala211 = "2.11.7"
// sbt modules
val utilVersion = "0.1.0-M5"
val utilVersion = "0.1.0-M8"
val ioVersion = "1.0.0-M3"
val incremenalcompilerVersion = "0.1.0-M1-168cb7a4877917e01917e35b9b82a62afe5c2a01"
val librarymanagementVersion = "0.1.0-M4"
val incrementalcompilerVersion = "0.1.0-M3"
val librarymanagementVersion = "0.1.0-M7"
lazy val sbtIO = "org.scala-sbt" %% "io" % ioVersion
lazy val utilCollection = "org.scala-sbt" %% "util-collection" % utilVersion
lazy val utilLogging = "org.scala-sbt" %% "util-logging" % utilVersion
@ -28,13 +28,13 @@ object Dependencies {
lazy val rawLauncher = "org.scala-sbt" % "launcher" % "1.0.0-M1"
lazy val testInterface = "org.scala-sbt" % "test-interface" % "1.0"
lazy val incrementalcompiler = "org.scala-sbt" %% "incrementalcompiler" % incremenalcompilerVersion
lazy val incrementalcompilerCompile = "org.scala-sbt" %% "incrementalcompiler-compile" % incremenalcompilerVersion
lazy val compilerInterface = "org.scala-sbt" % "compiler-interface" % incremenalcompilerVersion
lazy val compilerBrdige = "org.scala-sbt" %% "compiler-bridge" % incremenalcompilerVersion
lazy val compilerClasspath = "org.scala-sbt" %% "incrementalcompiler-classpath" % incremenalcompilerVersion
lazy val compilerApiInfo = "org.scala-sbt" %% "incrementalcompiler-apiinfo" % incremenalcompilerVersion
lazy val compilerIvyIntegration = "org.scala-sbt" %% "incrementalcompiler-ivy-integration" % incremenalcompilerVersion
lazy val incrementalcompiler = "org.scala-sbt" %% "incrementalcompiler" % incrementalcompilerVersion
lazy val incrementalcompilerCompile = "org.scala-sbt" %% "incrementalcompiler-compile" % incrementalcompilerVersion
lazy val compilerInterface = "org.scala-sbt" % "compiler-interface" % incrementalcompilerVersion
lazy val compilerBrdige = "org.scala-sbt" %% "compiler-bridge" % incrementalcompilerVersion
lazy val compilerClasspath = "org.scala-sbt" %% "incrementalcompiler-classpath" % incrementalcompilerVersion
lazy val compilerApiInfo = "org.scala-sbt" %% "incrementalcompiler-apiinfo" % incrementalcompilerVersion
lazy val compilerIvyIntegration = "org.scala-sbt" %% "incrementalcompiler-ivy-integration" % incrementalcompilerVersion
lazy val jline = "jline" % "jline" % "2.11"
lazy val ivy = "org.scala-sbt.ivy" % "ivy" % "2.3.0-sbt-d592b1b0f77cf706e882b1b8e0162dee28165fb2"

View File

@ -431,7 +431,7 @@ object Import {
type SelfFirstLoader = sbt.internal.inc.classpath.SelfFirstLoader
}
val IncOptions = sbt.internal.inc.IncOptions
type IncOptions = xsbti.compile.IncOptions
type Analysis = sbt.internal.inc.Analysis
val Analysis = sbt.internal.inc.Analysis
val ClassfileManager = sbt.internal.inc.ClassfileManager

View File

@ -34,8 +34,6 @@ package object sbt extends sbt.std.TaskExtra with sbt.internal.util.Types with s
final val Runtime = C.Runtime
final val IntegrationTest = C.IntegrationTest
final val Default = C.Default
final val Docs = C.Docs
final val Sources = C.Sources
final val Provided = C.Provided
// java.lang.System is more important, so don't alias this one
// final val System = C.System

View File

@ -2,11 +2,11 @@ logLevel := Level.Debug
incOptions ~= { _.withApiDebug(true) }
TaskKey[Unit]("show-apis") <<= (compile in Compile, scalaSource in Compile, javaSource in Compile) map { (a: sbt.internal.inc.Analysis, scalaSrc: java.io.File, javaSrc: java.io.File) =>
TaskKey[Unit]("show-apis") <<= (compile in Compile, scalaSource in Compile, javaSource in Compile) map { case (a: sbt.internal.inc.Analysis, scalaSrc: java.io.File, javaSrc: java.io.File) =>
val aApi = a.apis.internalAPI(scalaSrc / "A.scala").api
val jApi = a.apis.internalAPI(javaSrc / "test/J.java").api
import xsbt.api.DefaultShowAPI
import DefaultShowAPI._
DefaultShowAPI.showSource.show(aApi)
DefaultShowAPI.showSource.show(jApi)
DefaultShowAPI(aApi)
DefaultShowAPI(jApi)
}

View File

@ -1,6 +1,6 @@
// checks number of compilation iterations performed since last `clean` run
InputKey[Unit]("check-number-of-compiler-iterations") <<= inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { (args: Seq[String], a: sbt.internal.inc.Analysis) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: sbt.internal.inc.Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
val allCompilationsSize = a.compilations.allCompilations.size

View File

@ -1 +1 @@
incOptions := IncOptions.Default
incOptions := xsbti.compile.IncOptionsUtil.defaultIncOptions

View File

@ -7,7 +7,7 @@ incOptions := incOptions.value.withNameHashing(false).withAntStyle(true)
* b) checks overall number of compilations performed
*/
TaskKey[Unit]("check-compilations") := {
val analysis = (compile in Compile).value
val analysis = (compile in Compile).value match { case analysis: Analysis => analysis }
val srcDir = (scalaSource in Compile).value
def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f
val allCompilations = analysis.compilations.allCompilations

View File

@ -1,5 +1,5 @@
TaskKey[Unit]("verify-binary-deps") <<= (compile in Compile, classDirectory in Compile, baseDirectory) map {
(a: sbt.internal.inc.Analysis, classDir: java.io.File, base: java.io.File) =>
case (a: sbt.internal.inc.Analysis, classDir: java.io.File, base: java.io.File) =>
val nestedPkgClass = classDir / "test/nested.class"
val fooSrc = base / "src/main/scala/test/nested/Foo.scala"
assert(!a.relations.binaryDeps(fooSrc).contains(nestedPkgClass), a.relations.toString)

View File

@ -2,7 +2,7 @@ logLevel := Level.Debug
// dumps analysis into target/analysis-dump.txt file
InputKey[Unit]("check-number-of-compiler-iterations") <<= inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { (args: Seq[String], a: Analysis) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber))

View File

@ -1,5 +1,5 @@
InputKey[Unit]("check-number-of-compiler-iterations") <<= inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { (args: Seq[String], a: Analysis) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber))

View File

@ -14,7 +14,7 @@ object TestPlugin extends AutoPlugin {
import autoImport._
override def projectSettings = Seq(
savedReporter := new CollectingReporter,
compilerReporter in (Compile, compile) := Some(savedReporter.value),
compilerReporter in (Compile, compile) := savedReporter.value,
problems := savedReporter.value.problems
)
}

View File

@ -4,9 +4,9 @@ scalaVersion := "2.10.4"
crossScalaVersions := List("2.10.4", "2.11.0")
incOptions := incOptions.value.withNewClassfileManager(
ClassfileManager.transactional(
incOptions := incOptions.value.withClassfileManagerType(
xsbti.Maybe.just(new xsbti.compile.TransactionalManagerType(
crossTarget.value / "classes.bak",
(streams in (Compile, compile)).value.log
)
))
)

View File

@ -1,5 +1,5 @@
InputKey[Unit]("check-number-of-compiler-iterations") <<= inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { (args: Seq[String], a: Analysis) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber))

View File

@ -4,7 +4,7 @@ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iteratio
checkIterations := {
val expected: Int = (Space ~> NatBasic).parsed
val actual: Int = (compile in Compile).value.compilations.allCompilations.size
val actual: Int = (compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }
assert(expected == actual, s"Expected $expected compilations, got $actual")
}

View File

@ -4,6 +4,6 @@ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iteratio
checkIterations := {
val expected: Int = (Space ~> NatBasic).parsed
val actual: Int = (compile in Compile).value.compilations.allCompilations.size
val actual: Int = (compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }
assert(expected == actual, s"Expected $expected compilations, got $actual")
}

View File

@ -4,7 +4,7 @@ incOptions := incOptions.value.withNameHashing(false)
lazy val verifyDeps = taskKey[Unit]("verify inherited dependencies are properly extracted")
verifyDeps := {
val a = compile.in(Compile).value
val a = compile.in(Compile).value match { case a: Analysis => a }
val baseDir = baseDirectory.value
def relative(f: java.io.File): java.io.File = f.relativeTo(baseDir) getOrElse f
def toFile(s: String) = relative(baseDir / (s + ".scala"))

View File

@ -4,7 +4,7 @@ incOptions := incOptions.value.withNameHashing(false)
lazy val verifyDeps = taskKey[Unit]("verify inherited dependencies are properly extracted")
verifyDeps := {
val a = compile.in(Compile).value
val a = compile.in(Compile).value match { case a: Analysis => a }
same(a.relations.publicInherited.internal.forwardMap, expectedDeps.forwardMap)
}

View File

@ -1,6 +1,6 @@
// Check that a file has not been recompiled during last compilation
InputKey[Unit]("check-not-recompiled") <<= inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { (args: Seq[String], a: Analysis) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val fileCompilation = a.apis.internal.collect { case (file, src) if file.name.endsWith(args(0)) => src.compilation }.head
val lastCompilation = a.compilations.allCompilations.last

View File

@ -1,6 +1,6 @@
name := "test"
TaskKey[Unit]("check-same") <<= compile in Configurations.Compile map { analysis =>
TaskKey[Unit]("check-same") <<= compile in Configurations.Compile map { case analysis: Analysis =>
analysis.apis.internal foreach { case (_, api) =>
assert( xsbt.api.SameAPI(api.api, api.api) )
}

View File

@ -4,7 +4,7 @@ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iteratio
checkIterations := {
val expected: Int = (Space ~> NatBasic).parsed
val actual: Int = (compile in Compile).value.compilations.allCompilations.size
val actual: Int = (compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }
assert(expected == actual, s"Expected $expected compilations, got $actual")
}

View File

@ -4,7 +4,7 @@ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iteratio
checkIterations := {
val expected: Int = (Space ~> NatBasic).parsed
val actual: Int = (compile in Compile).value.compilations.allCompilations.size
val actual: Int = (compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }
assert(expected == actual, s"Expected $expected compilations, got $actual")
}

View File

@ -6,7 +6,7 @@ val checkIterations = inputKey[Unit]("Verifies the accumlated number of iteratio
checkIterations := {
val expected: Int = (Space ~> NatBasic).parsed
val actual: Int = (compile in Compile).value.compilations.allCompilations.size
val actual: Int = (compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }
assert(expected == actual, s"Expected $expected compilations, got $actual")
}

View File

@ -13,7 +13,7 @@ incOptions := incOptions.value.withRecompileAllFraction(1.0)
* b) checks overall number of compilations performed
*/
TaskKey[Unit]("check-compilations") := {
val analysis = (compile in Compile).value
val analysis = (compile in Compile).value match { case a: Analysis => a }
val srcDir = (scalaSource in Compile).value
def relative(f: java.io.File): java.io.File = f.relativeTo(srcDir) getOrElse f
val allCompilations = analysis.compilations.allCompilations