mirror of https://github.com/sbt/sbt.git
Merge pull request #3228 from eed3si9n/wip/bump3
Adjust to Zinc and lm changes
This commit is contained in:
commit
6e1e6ea384
|
|
@ -1,95 +0,0 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
|
||||
import sbt.internal.inc.javac.JavaTools
|
||||
import sbt.internal.inc.{
|
||||
AnalyzingCompiler,
|
||||
ComponentCompiler,
|
||||
ScalaInstance,
|
||||
ZincComponentManager,
|
||||
IncrementalCompilerImpl
|
||||
}
|
||||
import xsbti.{ Logger => _, _ }
|
||||
import xsbti.compile.{ ClasspathOptions, Compilers, CompileResult, Inputs }
|
||||
import java.io.File
|
||||
|
||||
import sbt.internal.librarymanagement.IvyConfiguration
|
||||
import sbt.librarymanagement.{ ModuleID, VersionNumber }
|
||||
import sbt.util.Logger
|
||||
|
||||
object Compiler {
|
||||
val DefaultMaxErrors = 100
|
||||
|
||||
private[sbt] def defaultCompilerBridgeSource(sv: String): ModuleID =
|
||||
VersionNumber(sv) match {
|
||||
case VersionNumber(Seq(2, y, _), _, _) if y <= 10 => scalaCompilerBridgeSource2_10
|
||||
case VersionNumber(Seq(2, y, _), _, _) if y == 11 => scalaCompilerBridgeSource2_11
|
||||
case _ => scalaCompilerBridgeSource2_12
|
||||
}
|
||||
|
||||
private[this] def scalaCompilerBridgeSource(suffix: String): ModuleID =
|
||||
ModuleID(xsbti.ArtifactInfo.SbtOrganization,
|
||||
s"compiler-bridge_$suffix",
|
||||
ComponentCompiler.incrementalVersion)
|
||||
.withConfigurations(Some("component"))
|
||||
.sources()
|
||||
|
||||
private[sbt] def scalaCompilerBridgeSource2_10: ModuleID = scalaCompilerBridgeSource("2.10")
|
||||
private[sbt] def scalaCompilerBridgeSource2_11: ModuleID = scalaCompilerBridgeSource("2.11")
|
||||
private[sbt] def scalaCompilerBridgeSource2_12: ModuleID = scalaCompilerBridgeSource("2.12")
|
||||
|
||||
def compilers(
|
||||
cpOptions: ClasspathOptions,
|
||||
ivyConfiguration: IvyConfiguration
|
||||
)(implicit app: AppConfiguration, log: Logger): Compilers = {
|
||||
val scalaProvider = app.provider.scalaProvider
|
||||
val instance = ScalaInstance(scalaProvider.version, scalaProvider.launcher)
|
||||
val sourceModule = scalaCompilerBridgeSource2_12
|
||||
compilers(instance, cpOptions, None, ivyConfiguration, sourceModule)
|
||||
}
|
||||
|
||||
// TODO: Get java compiler
|
||||
def compilers(
|
||||
instance: ScalaInstance,
|
||||
cpOptions: ClasspathOptions,
|
||||
javaHome: Option[File],
|
||||
ivyConfiguration: IvyConfiguration,
|
||||
sourcesModule: ModuleID
|
||||
)(implicit app: AppConfiguration, log: Logger): Compilers = {
|
||||
val scalac = scalaCompiler(instance, cpOptions, javaHome, ivyConfiguration, sourcesModule)
|
||||
val javac = JavaTools.directOrFork(instance, cpOptions, javaHome)
|
||||
new Compilers(scalac, javac)
|
||||
}
|
||||
|
||||
def scalaCompiler(
|
||||
instance: ScalaInstance,
|
||||
cpOptions: ClasspathOptions,
|
||||
javaHome: Option[File],
|
||||
ivyConfiguration: IvyConfiguration,
|
||||
sourcesModule: ModuleID
|
||||
)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = {
|
||||
val launcher = app.provider.scalaProvider.launcher
|
||||
val componentManager = new ZincComponentManager(launcher.globalLock,
|
||||
app.provider.components,
|
||||
Option(launcher.ivyHome),
|
||||
log)
|
||||
val provider =
|
||||
ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, sourcesModule)
|
||||
new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None)
|
||||
}
|
||||
|
||||
private val compiler = new IncrementalCompilerImpl
|
||||
|
||||
def compile(in: Inputs, log: Logger): CompileResult = compiler.compile(in, log)
|
||||
|
||||
private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) =
|
||||
mappers.foldRight({ p: A =>
|
||||
p
|
||||
}) { (mapper, mappers) =>
|
||||
{ p: A =>
|
||||
mapper(p).getOrElse(mappers(p))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -27,6 +27,9 @@ object BuildPaths {
|
|||
"The base directory for caching dependency resolution.",
|
||||
DSetting)
|
||||
|
||||
val globalZincDirectory =
|
||||
AttributeKey[File]("global-zinc-directory", "The base directory for Zinc internals.", DSetting)
|
||||
|
||||
import sbt.io.syntax._
|
||||
|
||||
def getGlobalBase(state: State): File = {
|
||||
|
|
@ -59,6 +62,9 @@ object BuildPaths {
|
|||
DependencyBaseProperty,
|
||||
defaultDependencyBase(globalBase))(state)
|
||||
|
||||
def getZincDirectory(state: State, globalBase: File): File =
|
||||
fileSetting(globalZincDirectory, GlobalZincProperty, defaultGlobalZinc(globalBase))(state)
|
||||
|
||||
private[this] def fileSetting(stateKey: AttributeKey[File], property: String, default: File)(
|
||||
state: State): File =
|
||||
getFileSetting(stateKey, property, default)(state)
|
||||
|
|
@ -81,6 +87,7 @@ object BuildPaths {
|
|||
private[this] def defaultStaging(globalBase: File) = globalBase / "staging"
|
||||
private[this] def defaultGlobalPlugins(globalBase: File) = globalBase / PluginsDirectoryName
|
||||
private[this] def defaultDependencyBase(globalBase: File) = globalBase / "dependency"
|
||||
private[this] def defaultGlobalZinc(globalBase: File) = globalBase / "zinc"
|
||||
|
||||
def configurationSources(base: File): Seq[File] = (base * (GlobFilter("*.sbt") - ".sbt")).get
|
||||
def pluginDirectory(definitionBase: File) = definitionBase / PluginsDirectoryName
|
||||
|
|
@ -98,6 +105,7 @@ object BuildPaths {
|
|||
final val GlobalPluginsProperty = "sbt.global.plugins"
|
||||
final val GlobalSettingsProperty = "sbt.global.settings"
|
||||
final val DependencyBaseProperty = "sbt.dependency.base"
|
||||
final val GlobalZincProperty = "sbt.global.zinc"
|
||||
|
||||
def crossPath(base: File, instance: xsbti.compile.ScalaInstance): File =
|
||||
base / ("scala_" + instance.version)
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ package sbt
|
|||
import Def.{ Initialize, ScopedKey, Setting, SettingsDefinition }
|
||||
import java.io.{ File, PrintWriter }
|
||||
import java.net.{ URI, URL }
|
||||
import java.util.Optional
|
||||
import java.util.concurrent.{ TimeUnit, Callable }
|
||||
import Keys._
|
||||
import org.apache.ivy.core.module.{ descriptor, id }, descriptor.ModuleDescriptor,
|
||||
|
|
@ -20,6 +21,8 @@ import Project.{
|
|||
}
|
||||
import sbt.internal._
|
||||
import sbt.internal.CommandStrings.ExportStream
|
||||
import sbt.internal.inc.ZincUtil
|
||||
import sbt.internal.inc.JavaInterfaceUtil._
|
||||
import sbt.internal.io.WatchState
|
||||
import sbt.internal.librarymanagement._
|
||||
import sbt.internal.librarymanagement.mavenint.{
|
||||
|
|
@ -59,7 +62,7 @@ import sbt.librarymanagement.Configurations.{
|
|||
import sbt.librarymanagement.CrossVersion.{ binarySbtVersion, binaryScalaVersion, partialVersion }
|
||||
import sbt.librarymanagement.{ `package` => _, _ }
|
||||
import sbt.librarymanagement.syntax._
|
||||
import sbt.util.InterfaceUtil.{ f1, o2m }
|
||||
import sbt.util.InterfaceUtil.f1
|
||||
import sbt.util._
|
||||
import sbt.util.CacheImplicits._
|
||||
import scala.concurrent.duration.FiniteDuration
|
||||
|
|
@ -70,11 +73,14 @@ import sjsonnew.{ IsoLList, JsonFormat, LList, LNil }, LList.:*:
|
|||
import std.TaskExtra._
|
||||
import testing.{ Framework, Runner, AnnotatedFingerprint, SubclassFingerprint }
|
||||
import xsbti.compile.IncToolOptionsUtil
|
||||
import xsbti.{ CrossValue, Maybe }
|
||||
import xsbti.CrossValue
|
||||
|
||||
// incremental compiler
|
||||
import xsbt.api.Discovery
|
||||
import xsbti.compile.{
|
||||
ClassFileManagerType,
|
||||
ClasspathOptionsUtil,
|
||||
CompilerCache,
|
||||
Compilers,
|
||||
CompileAnalysis,
|
||||
CompileOptions,
|
||||
|
|
@ -92,13 +98,11 @@ import xsbti.compile.{
|
|||
import sbt.internal.inc.{
|
||||
AnalyzingCompiler,
|
||||
Analysis,
|
||||
CompilerCache,
|
||||
FileValueCache,
|
||||
Locate,
|
||||
LoggerReporter,
|
||||
MixedAnalyzingCompiler,
|
||||
ScalaInstance,
|
||||
ClasspathOptionsUtil
|
||||
ScalaInstance
|
||||
}
|
||||
|
||||
object Defaults extends BuildCommon {
|
||||
|
|
@ -353,10 +357,11 @@ object Defaults extends BuildCommon {
|
|||
)
|
||||
|
||||
def compileBase = inTask(console)(compilersSetting :: Nil) ++ compileBaseGlobal ++ Seq(
|
||||
incOptions := incOptions.value.withClassfileManagerType(
|
||||
Maybe.just(
|
||||
new TransactionalManagerType(crossTarget.value / "classes.bak", sbt.util.Logger.Null))
|
||||
),
|
||||
incOptions := incOptions.value
|
||||
.withClassfileManagerType(
|
||||
Option(new TransactionalManagerType(crossTarget.value / "classes.bak",
|
||||
sbt.util.Logger.Null): ClassFileManagerType).toOptional
|
||||
),
|
||||
scalaInstance := scalaInstanceTask.value,
|
||||
crossVersion := (if (crossPaths.value) CrossVersion.binary else Disabled()),
|
||||
scalaVersion := {
|
||||
|
|
@ -364,9 +369,7 @@ object Defaults extends BuildCommon {
|
|||
val sv = (sbtBinaryVersion in pluginCrossBuild).value
|
||||
val isPlugin = sbtPlugin.value
|
||||
if (isPlugin) {
|
||||
val x = scalaVersionFromSbtBinaryVersion(sv)
|
||||
println(s"scalaVersionFromSbtBinaryVersion($sv) = $x")
|
||||
x
|
||||
scalaVersionFromSbtBinaryVersion(sv)
|
||||
} else scalaV
|
||||
},
|
||||
sbtBinaryVersion in pluginCrossBuild := binarySbtVersion(
|
||||
|
|
@ -389,7 +392,7 @@ object Defaults extends BuildCommon {
|
|||
ModuleID(scalaOrganization.value, "dotty-sbt-bridge", scalaVersion.value)
|
||||
.withConfigurations(Some("component"))
|
||||
.sources()
|
||||
else Compiler.defaultCompilerBridgeSource(scalaVersion.value)
|
||||
else ZincUtil.getDefaultBridgeModule(scalaVersion.value)
|
||||
}
|
||||
)
|
||||
// must be a val: duplication detected by object identity
|
||||
|
|
@ -441,13 +444,26 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def compilersSetting = {
|
||||
compilers := {
|
||||
val compilers = Compiler.compilers(
|
||||
scalaInstance.value,
|
||||
classpathOptions.value,
|
||||
javaHome.value,
|
||||
bootIvyConfiguration.value,
|
||||
scalaCompilerBridgeSource.value
|
||||
)(appConfiguration.value, streams.value.log)
|
||||
val st = state.value
|
||||
val g = BuildPaths.getGlobalBase(st)
|
||||
val zincDir = BuildPaths.getZincDirectory(st, g)
|
||||
val app = appConfiguration.value
|
||||
val launcher = app.provider.scalaProvider.launcher
|
||||
val scalac = ZincUtil.scalaCompiler(
|
||||
scalaInstance = scalaInstance.value,
|
||||
classpathOptions = classpathOptions.value,
|
||||
globalLock = launcher.globalLock,
|
||||
componentProvider = app.provider.components,
|
||||
secondaryCacheDir = Option(zincDir),
|
||||
ivyConfiguration = bootIvyConfiguration.value,
|
||||
compilerBridgeSource = scalaCompilerBridgeSource.value,
|
||||
scalaJarsTarget = zincDir,
|
||||
log = streams.value.log
|
||||
)
|
||||
val compilers = ZincUtil.compilers(instance = scalaInstance.value,
|
||||
classpathOptions = classpathOptions.value,
|
||||
javaHome = javaHome.value,
|
||||
scalac)
|
||||
val classLoaderCache = state.value.classLoaderCache
|
||||
if (java.lang.Boolean.getBoolean("sbt.disable.interface.classloader.cache")) compilers
|
||||
else {
|
||||
|
|
@ -1350,6 +1366,7 @@ object Defaults extends BuildCommon {
|
|||
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
|
||||
compileIncrementalTaskImpl(streams.value, (compileInputs in compile).value)
|
||||
}
|
||||
private val incCompiler = ZincUtil.defaultIncrementalCompiler
|
||||
private[this] def compileIncrementalTaskImpl(s: TaskStreams, ci: Inputs): CompileResult = {
|
||||
lazy val x = s.text(ExportStream)
|
||||
def onArgs(cs: Compilers) =
|
||||
|
|
@ -1364,16 +1381,17 @@ object Defaults extends BuildCommon {
|
|||
//)
|
||||
val compilers: Compilers = ci.compilers
|
||||
val i = ci.withCompilers(onArgs(compilers))
|
||||
try Compiler.compile(i, s.log)
|
||||
finally x.close() // workaround for #937
|
||||
try {
|
||||
incCompiler.compile(i, s.log)
|
||||
} finally x.close() // workaround for #937
|
||||
}
|
||||
def compileIncSetupTask = Def.task {
|
||||
val lookup = new PerClasspathEntryLookup {
|
||||
private val cachedAnalysisMap = analysisMap(dependencyClasspath.value)
|
||||
private val cachedPerEntryDefinesClassLookup = Keys.classpathEntryDefinesClass.value
|
||||
|
||||
override def analysis(classpathEntry: File): Maybe[CompileAnalysis] =
|
||||
o2m(cachedAnalysisMap(classpathEntry))
|
||||
override def analysis(classpathEntry: File): Optional[CompileAnalysis] =
|
||||
cachedAnalysisMap(classpathEntry).toOptional
|
||||
override def definesClass(classpathEntry: File): DefinesClass =
|
||||
cachedPerEntryDefinesClassLookup(classpathEntry)
|
||||
}
|
||||
|
|
@ -1385,7 +1403,7 @@ object Defaults extends BuildCommon {
|
|||
compilerCache.value,
|
||||
incOptions.value,
|
||||
(compilerReporter in compile).value,
|
||||
xsbti.Maybe.nothing(),
|
||||
None.toOptional,
|
||||
// TODO - task / setting for extra,
|
||||
Array.empty
|
||||
)
|
||||
|
|
@ -1399,12 +1417,12 @@ object Defaults extends BuildCommon {
|
|||
scalacOptions.value.toArray,
|
||||
javacOptions.value.toArray,
|
||||
maxErrors.value,
|
||||
f1(Compiler.foldMappers(sourcePositionMappers.value)),
|
||||
f1(foldMappers(sourcePositionMappers.value)),
|
||||
compileOrder.value
|
||||
),
|
||||
compilerReporter := new LoggerReporter(maxErrors.value,
|
||||
streams.value.log,
|
||||
Compiler.foldMappers(sourcePositionMappers.value)),
|
||||
foldMappers(sourcePositionMappers.value)),
|
||||
compileInputs := new Inputs(
|
||||
compilers.value,
|
||||
compileOptions.value,
|
||||
|
|
@ -1413,13 +1431,25 @@ object Defaults extends BuildCommon {
|
|||
)
|
||||
)
|
||||
}
|
||||
|
||||
private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) =
|
||||
mappers.foldRight({ p: A =>
|
||||
p
|
||||
}) { (mapper, mappers) =>
|
||||
{ p: A =>
|
||||
mapper(p).getOrElse(mappers(p))
|
||||
}
|
||||
}
|
||||
private[sbt] def none[A]: Option[A] = (None: Option[A])
|
||||
private[sbt] def jnone[A]: Optional[A] = none[A].toOptional
|
||||
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
|
||||
previousCompile := {
|
||||
val setup = compileIncSetup.value
|
||||
val store = MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile)
|
||||
store.get() match {
|
||||
case Some((an, setup)) => new PreviousResult(Maybe.just(an), Maybe.just(setup))
|
||||
case None => new PreviousResult(Maybe.nothing[CompileAnalysis], Maybe.nothing[MiniSetup])
|
||||
case Some((an, setup)) =>
|
||||
new PreviousResult(Option(an).toOptional, Option(setup).toOptional)
|
||||
case None => new PreviousResult(jnone[CompileAnalysis], jnone[MiniSetup])
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
@ -1430,8 +1460,9 @@ object Defaults extends BuildCommon {
|
|||
val max = maxErrors.value
|
||||
val spms = sourcePositionMappers.value
|
||||
val problems =
|
||||
analysis.infos.allInfos.values.flatMap(i => i.reportedProblems ++ i.unreportedProblems)
|
||||
val reporter = new LoggerReporter(max, streams.value.log, Compiler.foldMappers(spms))
|
||||
analysis.infos.allInfos.values.flatMap(i =>
|
||||
i.getReportedProblems ++ i.getUnreportedProblems)
|
||||
val reporter = new LoggerReporter(max, streams.value.log, foldMappers(spms))
|
||||
problems foreach { p =>
|
||||
reporter.display(p)
|
||||
}
|
||||
|
|
@ -1792,10 +1823,14 @@ object Classpaths {
|
|||
// Tell the UpdateConfiguration which artifact types are special (for sources and javadocs)
|
||||
val specialArtifactTypes = sourceArtifactTypes.value union docArtifactTypes.value
|
||||
// By default, to retrieve all types *but* these (it's assumed that everything else is binary/resource)
|
||||
UpdateConfiguration(retrieveConfiguration.value,
|
||||
false,
|
||||
ivyLoggingLevel.value,
|
||||
ArtifactTypeFilter.forbid(specialArtifactTypes))
|
||||
UpdateConfiguration(
|
||||
retrieve = retrieveConfiguration.value,
|
||||
missingOk = false,
|
||||
logging = ivyLoggingLevel.value,
|
||||
artifactFilter = ArtifactTypeFilter.forbid(specialArtifactTypes),
|
||||
offline = offline.value,
|
||||
frozen = false
|
||||
)
|
||||
},
|
||||
retrieveConfiguration := {
|
||||
if (retrieveManaged.value)
|
||||
|
|
@ -2022,16 +2057,16 @@ object Classpaths {
|
|||
explicit orElse bootRepositories(appConfiguration.value) getOrElse externalResolvers.value
|
||||
},
|
||||
ivyConfiguration := new InlineIvyConfiguration(
|
||||
ivyPaths.value,
|
||||
externalResolvers.value.toVector,
|
||||
Vector.empty,
|
||||
Vector.empty,
|
||||
offline.value,
|
||||
Option(lock(appConfiguration.value)),
|
||||
checksums.value.toVector,
|
||||
Some(crossTarget.value / "resolution-cache"),
|
||||
UpdateOptions(),
|
||||
streams.value.log
|
||||
paths = ivyPaths.value,
|
||||
resolvers = externalResolvers.value.toVector,
|
||||
otherResolvers = Vector.empty,
|
||||
moduleConfigurations = Vector.empty,
|
||||
lock = Option(lock(appConfiguration.value)),
|
||||
checksums = checksums.value.toVector,
|
||||
managedChecksums = false,
|
||||
resolutionCacheDir = Some(crossTarget.value / "resolution-cache"),
|
||||
updateOptions = UpdateOptions(),
|
||||
log = streams.value.log
|
||||
),
|
||||
ivySbt := ivySbt0.value,
|
||||
classifiersModule := classifiersModuleTask.value,
|
||||
|
|
@ -2426,7 +2461,7 @@ object Classpaths {
|
|||
}
|
||||
case _ =>
|
||||
Def.task {
|
||||
val analysisOpt = previousCompile.value.analysis
|
||||
val analysisOpt = previousCompile.value.analysis.toOption
|
||||
dirs map { x =>
|
||||
(x,
|
||||
if (analysisOpt.isDefined) analysisOpt.get
|
||||
|
|
@ -2450,7 +2485,7 @@ object Classpaths {
|
|||
}
|
||||
case _ =>
|
||||
Def.task {
|
||||
val analysisOpt = previousCompile.value.analysis
|
||||
val analysisOpt = previousCompile.value.analysis.toOption
|
||||
Seq(jar) map { x =>
|
||||
(x,
|
||||
if (analysisOpt.isDefined) analysisOpt.get
|
||||
|
|
@ -2493,18 +2528,18 @@ object Classpaths {
|
|||
val (rs, other) = (fullResolvers.value.toVector, otherResolvers.value.toVector)
|
||||
val s = streams.value
|
||||
warnResolversConflict(rs ++: other, s.log)
|
||||
val resCacheDir = crossTarget.value / "resolution-cache"
|
||||
new InlineIvyConfiguration(
|
||||
ivyPaths.value,
|
||||
rs,
|
||||
other,
|
||||
moduleConfigurations.value.toVector,
|
||||
offline.value,
|
||||
Option(lock(appConfiguration.value)),
|
||||
(checksums in update).value.toVector,
|
||||
Some(resCacheDir),
|
||||
updateOptions.value,
|
||||
s.log
|
||||
paths = ivyPaths.value,
|
||||
resolvers = rs,
|
||||
otherResolvers = other,
|
||||
moduleConfigurations = moduleConfigurations.value.toVector,
|
||||
// offline.value,
|
||||
lock = Option(lock(appConfiguration.value)),
|
||||
checksums = (checksums in update).value.toVector,
|
||||
managedChecksums = false,
|
||||
resolutionCacheDir = Some(crossTarget.value / "resolution-cache"),
|
||||
updateOptions = updateOptions.value,
|
||||
log = s.log
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -3077,7 +3112,7 @@ trait BuildCommon {
|
|||
overridden ++ newConfigs
|
||||
}
|
||||
|
||||
// these are intended for use in input tasks for creating parsers
|
||||
// these are intended for use in in put tasks for creating parsers
|
||||
def getFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State): Option[T] =
|
||||
SessionVar.get(SessionVar.resolveContext(task.scopedKey, context.scope, s), s)
|
||||
|
||||
|
|
|
|||
|
|
@ -38,11 +38,12 @@ import sbt.internal.util.{
|
|||
import sbt.util.{ Level, Logger }
|
||||
|
||||
import sbt.internal.util.complete.{ DefaultParsers, Parser }
|
||||
import sbt.internal.inc.{ CompilerCache, ScalaInstance }
|
||||
import sbt.internal.inc.ScalaInstance
|
||||
import sbt.compiler.EvalImports
|
||||
import Types.{ const, idFun }
|
||||
import Aggregation.AnyKeys
|
||||
import Project.LoadAction
|
||||
import xsbti.compile.CompilerCache
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import sbt.io.IO
|
||||
|
|
@ -695,7 +696,7 @@ object BuiltinCommands {
|
|||
case e: NumberFormatException =>
|
||||
throw new RuntimeException("Resident compiler limit must be an integer.", e)
|
||||
}
|
||||
if (num <= 0) CompilerCache.fresh else CompilerCache(num)
|
||||
if (num <= 0) CompilerCache.fresh else CompilerCache.createCacheFor(num)
|
||||
}
|
||||
s.put(Keys.stateCompilerCache, cache)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,41 @@
|
|||
package sbt
|
||||
|
||||
import java.util.Optional
|
||||
|
||||
// Todo: port this back to Zinc in place of JavaInterfaceUtil.
|
||||
trait OptionSyntax {
|
||||
import OptionSyntax._
|
||||
implicit def sbtOptionSyntaxRichOptional[A](optional: Optional[A]): RichOptional[A] =
|
||||
new RichOptional[A](optional)
|
||||
|
||||
implicit def sbtOptionSyntaxRichOption[A](option: Option[A]): RichOption[A] =
|
||||
new RichOption[A](option)
|
||||
|
||||
implicit def sbtOptionSyntaxOptionIdOps[A](a: A): OptionIdOps[A] =
|
||||
new OptionIdOps[A](a)
|
||||
|
||||
final def none[A]: Option[A] = None
|
||||
}
|
||||
|
||||
object OptionSyntax extends OptionSyntax {
|
||||
|
||||
/** Injects some method. */
|
||||
final class OptionIdOps[A](val a: A) extends AnyVal {
|
||||
def some: Option[A] = Some(a)
|
||||
}
|
||||
|
||||
/** Injects asScala method. */
|
||||
final class RichOptional[A](val optional: Optional[A]) extends AnyVal {
|
||||
def asScala: Option[A] =
|
||||
if (!optional.isPresent) None
|
||||
else Some(optional.get())
|
||||
}
|
||||
|
||||
/** Injects asJava method. */
|
||||
final class RichOption[A](val option: Option[A]) extends AnyVal {
|
||||
def asJava: Optional[A] = option match {
|
||||
case Some(value) => Optional.of(value)
|
||||
case None => Optional.empty[A]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -37,10 +37,10 @@ object AltLibraryManagementCodec extends LibraryManagementCodec {
|
|||
RawRepository]
|
||||
|
||||
type InlineIvyHL = (IvyPaths :+: Vector[Resolver] :+: Vector[Resolver] :+: Vector[
|
||||
ModuleConfiguration] :+: Boolean :+: Vector[String] :+: HNil)
|
||||
ModuleConfiguration] :+: Vector[String] :+: Boolean :+: HNil)
|
||||
def inlineIvyToHL(i: InlineIvyConfiguration): InlineIvyHL = (
|
||||
i.paths :+: i.resolvers :+: i.otherResolvers :+: i.moduleConfigurations :+: i.localOnly
|
||||
:+: i.checksums :+: HNil
|
||||
i.paths :+: i.resolvers :+: i.otherResolvers :+: i.moduleConfigurations :+:
|
||||
i.checksums :+: i.managedChecksums :+: HNil
|
||||
)
|
||||
|
||||
type ExternalIvyHL = PlainFileInfo :+: Array[Byte] :+: HNil
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ package sbt
|
|||
package internal
|
||||
|
||||
import sbt.util.Logger
|
||||
import sbt.internal.inc.{ ClasspathOptionsUtil, ScalaInstance }
|
||||
import sbt.internal.inc.{ ScalaInstance, ZincUtil }
|
||||
import xsbti.compile.ClasspathOptionsUtil
|
||||
|
||||
object ConsoleProject {
|
||||
def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(
|
||||
|
|
@ -19,12 +20,21 @@ object ConsoleProject {
|
|||
val scalaProvider = state.configuration.provider.scalaProvider
|
||||
ScalaInstance(scalaProvider.version, scalaProvider.launcher)
|
||||
}
|
||||
val sourcesModule = extracted.get(Keys.scalaCompilerBridgeSource)
|
||||
val compiler = Compiler.scalaCompiler(scalaInstance,
|
||||
ClasspathOptionsUtil.repl,
|
||||
None,
|
||||
ivyConf,
|
||||
sourcesModule)(state.configuration, log)
|
||||
val g = BuildPaths.getGlobalBase(state)
|
||||
val zincDir = BuildPaths.getZincDirectory(state, g)
|
||||
val app = state.configuration
|
||||
val launcher = app.provider.scalaProvider.launcher
|
||||
val compiler = ZincUtil.scalaCompiler(
|
||||
scalaInstance = scalaInstance,
|
||||
classpathOptions = ClasspathOptionsUtil.repl,
|
||||
globalLock = launcher.globalLock,
|
||||
componentProvider = app.provider.components,
|
||||
secondaryCacheDir = Option(zincDir),
|
||||
ivyConfiguration = ivyConf,
|
||||
compilerBridgeSource = extracted.get(Keys.scalaCompilerBridgeSource),
|
||||
scalaJarsTarget = zincDir,
|
||||
log = log
|
||||
)
|
||||
val imports = BuildUtil.getImports(unit.unit) ++ BuildUtil.importAll(bindings.map(_._1))
|
||||
val importString = imports.mkString("", ";\n", ";\n\n")
|
||||
val initCommands = importString + extra
|
||||
|
|
|
|||
|
|
@ -38,8 +38,13 @@ object LibraryManagement {
|
|||
|
||||
log.info(s"Updating $label...")
|
||||
val reportOrUnresolved: Either[UnresolvedWarning, UpdateReport] =
|
||||
//try {
|
||||
IvyActions.updateEither(module, updateConfig, uwConfig, logicalClock, depDir, log)
|
||||
|
||||
// } catch {
|
||||
// case e: Throwable =>
|
||||
// e.printStackTrace
|
||||
// throw e
|
||||
// }
|
||||
val report = reportOrUnresolved match {
|
||||
case Right(report0) => report0
|
||||
case Left(unresolvedWarning) =>
|
||||
|
|
|
|||
|
|
@ -4,18 +4,13 @@
|
|||
package sbt
|
||||
package internal
|
||||
|
||||
import sbt.librarymanagement.{ Configuration, Configurations, Resolver, UpdateOptions }
|
||||
import sbt.internal.librarymanagement.{ InlineIvyConfiguration, IvyPaths }
|
||||
|
||||
import BuildPaths._
|
||||
import BuildStreams._
|
||||
import collection.mutable
|
||||
import compiler.Eval
|
||||
import Def.{ isDummy, ScopedKey, ScopeLocal, Setting }
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import compiler.Eval
|
||||
import scala.annotation.tailrec
|
||||
import collection.mutable
|
||||
import sbt.internal.inc.ClasspathOptionsUtil
|
||||
import sbt.internal.inc.classpath.ClasspathUtilities
|
||||
import Project.inScope
|
||||
import Def.{ isDummy, ScopedKey, ScopeLocal, Setting }
|
||||
import Keys.{
|
||||
appConfiguration,
|
||||
baseDirectory,
|
||||
|
|
@ -34,17 +29,20 @@ import Keys.{
|
|||
thisProjectRef,
|
||||
update
|
||||
}
|
||||
import scala.tools.nsc.reporters.ConsoleReporter
|
||||
import sbt.internal.util.{ Attributed, Settings, ~> }
|
||||
import sbt.util.{ Eval => Ev, Show }
|
||||
import Project.inScope
|
||||
import sbt.internal.inc.classpath.ClasspathUtilities
|
||||
import sbt.internal.librarymanagement.{ InlineIvyConfiguration, IvyPaths }
|
||||
import sbt.internal.inc.{ ZincUtil, ScalaInstance }
|
||||
import sbt.internal.util.Attributed.data
|
||||
import Scope.GlobalScope
|
||||
import sbt.internal.util.Types.const
|
||||
import BuildPaths._
|
||||
import BuildStreams._
|
||||
import sbt.internal.util.{ Attributed, Settings, ~> }
|
||||
import sbt.io.{ GlobFilter, IO, Path }
|
||||
import sbt.util.Logger
|
||||
import xsbti.compile.Compilers
|
||||
import sbt.librarymanagement.{ Configuration, Configurations, Resolver, UpdateOptions }
|
||||
import sbt.util.{ Eval => Ev, Show, Logger }
|
||||
import scala.annotation.tailrec
|
||||
import scala.tools.nsc.reporters.ConsoleReporter
|
||||
import Scope.GlobalScope
|
||||
import xsbti.compile.{ ClasspathOptionsUtil, Compilers }
|
||||
|
||||
private[sbt] object Load {
|
||||
// note that there is State passed in but not pulled out
|
||||
|
|
@ -70,27 +68,43 @@ private[sbt] object Load {
|
|||
baseDirectory: File,
|
||||
globalBase: File,
|
||||
log: Logger): LoadBuildConfiguration = {
|
||||
val provider = state.configuration.provider
|
||||
val scalaProvider = provider.scalaProvider
|
||||
val app = state.configuration
|
||||
val provider = app.provider
|
||||
val scalaProvider = app.provider.scalaProvider
|
||||
val launcher = scalaProvider.launcher
|
||||
val stagingDirectory = getStagingDirectory(state, globalBase).getCanonicalFile
|
||||
val loader = getClass.getClassLoader
|
||||
val classpath = Attributed.blankSeq(provider.mainClasspath ++ scalaProvider.jars)
|
||||
val localOnly = false
|
||||
val lock = None
|
||||
val checksums = Vector.empty
|
||||
val ivyPaths = IvyPaths(baseDirectory, bootIvyHome(state.configuration))
|
||||
val ivyConfiguration = new InlineIvyConfiguration(ivyPaths,
|
||||
Resolver.withDefaultResolvers(Nil).toVector,
|
||||
Vector.empty,
|
||||
Vector.empty,
|
||||
localOnly,
|
||||
lock,
|
||||
checksums,
|
||||
None,
|
||||
UpdateOptions(),
|
||||
log)
|
||||
val compilers =
|
||||
Compiler.compilers(ClasspathOptionsUtil.boot, ivyConfiguration)(state.configuration, log)
|
||||
val ivyConfiguration = new InlineIvyConfiguration(
|
||||
paths = IvyPaths(baseDirectory, bootIvyHome(state.configuration)),
|
||||
resolvers = Resolver.withDefaultResolvers(Nil).toVector,
|
||||
otherResolvers = Vector.empty,
|
||||
moduleConfigurations = Vector.empty,
|
||||
lock = None,
|
||||
checksums = Vector.empty,
|
||||
managedChecksums = false,
|
||||
resolutionCacheDir = None,
|
||||
updateOptions = UpdateOptions(),
|
||||
log = log
|
||||
)
|
||||
val si = ScalaInstance(scalaProvider.version, scalaProvider.launcher)
|
||||
val zincDir = BuildPaths.getZincDirectory(state, globalBase)
|
||||
val classpathOptions = ClasspathOptionsUtil.boot
|
||||
val scalac = ZincUtil.scalaCompiler(
|
||||
scalaInstance = si,
|
||||
classpathOptions = classpathOptions,
|
||||
globalLock = launcher.globalLock,
|
||||
componentProvider = app.provider.components,
|
||||
secondaryCacheDir = Option(zincDir),
|
||||
ivyConfiguration = ivyConfiguration,
|
||||
compilerBridgeSource = ZincUtil.getDefaultBridgeModule(scalaProvider.version),
|
||||
scalaJarsTarget = zincDir,
|
||||
log = log
|
||||
)
|
||||
val compilers = ZincUtil.compilers(instance = si,
|
||||
classpathOptions = classpathOptions,
|
||||
javaHome = None,
|
||||
scalac)
|
||||
val evalPluginDef = EvaluateTask.evalPluginDef(log) _
|
||||
val delegates = defaultDelegates
|
||||
val pluginMgmt = PluginManagement(loader)
|
||||
|
|
|
|||
|
|
@ -13,8 +13,8 @@ object Dependencies {
|
|||
// sbt modules
|
||||
private val ioVersion = "1.0.0-M11"
|
||||
private val utilVersion = "1.0.0-M24"
|
||||
private val lmVersion = "1.0.0-X11"
|
||||
private val zincVersion = "1.0.0-X14"
|
||||
private val lmVersion = "1.0.0-X15"
|
||||
private val zincVersion = "1.0.0-X16"
|
||||
|
||||
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
|
||||
|
||||
|
|
|
|||
|
|
@ -8,10 +8,12 @@ package object sbt
|
|||
with sbt.internal.librarymanagement.impl.DependencyBuilders
|
||||
with sbt.ProjectExtra
|
||||
with sbt.librarymanagement.DependencyFilterExtra
|
||||
with sbt.librarymanagement.LibraryManagementSyntax
|
||||
with sbt.BuildExtra
|
||||
with sbt.TaskMacroExtra
|
||||
with sbt.ScopeFilter.Make
|
||||
with sbt.BuildSyntax
|
||||
with sbt.OptionSyntax
|
||||
with sbt.Import {
|
||||
// IO
|
||||
def uri(s: String): URI = new URI(s)
|
||||
|
|
@ -30,9 +32,6 @@ package object sbt
|
|||
}
|
||||
type CompileOrder = xsbti.compile.CompileOrder
|
||||
|
||||
implicit def maybeToOption[S](m: xsbti.Maybe[S]): Option[S] =
|
||||
if (m.isDefined) Some(m.get) else None
|
||||
|
||||
final val ThisScope = Scope.ThisScope
|
||||
final val Global = Scope.Global
|
||||
final val GlobalScope = Scope.GlobalScope
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
lazy val a = project.settings(
|
||||
scalaVersion := "2.9.2",
|
||||
scalaVersion := "2.12.2",
|
||||
scalaInstance in (Compile,doc) := (scalaInstance in b).value,
|
||||
// 2.10.1-only, so this will only succeed if `doc` recognizes the more specific scalaInstance scoped to `doc`
|
||||
scalacOptions in (Compile,doc) += "-implicits"
|
||||
)
|
||||
|
||||
lazy val b = project.settings(
|
||||
scalaVersion := "2.10.6"
|
||||
scalaVersion := "2.12.2"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,14 +8,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ scalaVersion := "2.10.6"
|
|||
crossScalaVersions := List("2.10.6", "2.11.8")
|
||||
|
||||
incOptions := incOptions.value.withClassfileManagerType(
|
||||
xsbti.Maybe.just(new xsbti.compile.TransactionalManagerType(
|
||||
Option(new xsbti.compile.TransactionalManagerType(
|
||||
crossTarget.value / "classes.bak",
|
||||
(streams in (Compile, compile)).value.log
|
||||
))
|
||||
): xsbti.compile.ClassFileManagerType).asJava
|
||||
)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ def commonSettings: Seq[Def.Setting[_]] =
|
|||
|
||||
def consolidatedResolutionSettings: Seq[Def.Setting[_]] =
|
||||
commonSettings ++ Seq(
|
||||
updateOptions := updateOptions.value.withConsolidatedResolution(true)
|
||||
updateOptions := updateOptions.value.withCachedResolution(true)
|
||||
)
|
||||
|
||||
// overrides cached
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import sbt.librarymanagement.syntax._
|
||||
|
||||
libraryDependencies += "org.scalacheck" % "scalacheck" % "1.5"
|
||||
|
||||
ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy-home"))).value
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
managedScalaInstance := false
|
||||
managedScalaInstance := false
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import sbt.librarymanagement.syntax._
|
||||
|
||||
Seq(externalIvySettings(), externalIvyFile())
|
||||
externalIvySettings()
|
||||
externalIvyFile()
|
||||
|
||||
TaskKey[Unit]("check") := {
|
||||
val files = update.value.matching( moduleFilter(organization = "org.scalacheck", name = "scalacheck*", revision = "1.11.4") )
|
||||
assert(files.nonEmpty, "ScalaCheck module not found in update report")
|
||||
val ur = update.value
|
||||
val files = ur.matching( moduleFilter(organization = "org.scalacheck", name = "scalacheck*", revision = "1.13.4") )
|
||||
assert(files.nonEmpty, "ScalaCheck module not found in update report")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,6 @@
|
|||
<ivy-module version="2.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation= "http://ant.apache.org/ivy/schemas/ivy.xsd">
|
||||
<info organisation="org" module="ivy-settings-test"/>
|
||||
<dependencies>
|
||||
<dependency org="org.scalacheck" name="scalacheck_2.11" rev="1.11.4"/>
|
||||
<dependency org="org.scalacheck" name="scalacheck_2.12" rev="1.13.4"/>
|
||||
</dependencies>
|
||||
</ivy-module>
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
> debug
|
||||
> update
|
||||
# works because scalaVersion is the same as sbtScalaVersion
|
||||
> compile
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import sbt.librarymanagement.syntax._
|
||||
|
||||
externalIvySettings()
|
||||
|
||||
libraryDependencies += "org.scalacheck" % "scalacheck" % "1.5"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import sbt.librarymanagement.syntax._
|
||||
|
||||
scalaHome := Some(baseDirectory.value / "home")
|
||||
|
||||
val checkUpdate = taskKey[Unit]("Ensures that resolved Scala artifacts are replaced with ones from the configured Scala home directory")
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
import sbt.librarymanagement.syntax._
|
||||
|
||||
lazy val root = (project in file(".")).
|
||||
settings(
|
||||
libraryDependencies += "net.liftweb" % "lift-webkit" % "1.0" intransitive(),
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,14 +6,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,14 +8,12 @@ val recordPreviousIterations = taskKey[Unit]("Record previous iterations.")
|
|||
recordPreviousIterations := {
|
||||
val log = streams.value.log
|
||||
CompileState.previousIterations = {
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis
|
||||
if (previousAnalysis.isEmpty) {
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
} else {
|
||||
previousAnalysis.get match {
|
||||
case a: Analysis => a.compilations.allCompilations.size
|
||||
}
|
||||
val previousAnalysis = (previousCompile in Compile).value.analysis.asScala
|
||||
previousAnalysis match {
|
||||
case None =>
|
||||
log.info("No previous analysis detected")
|
||||
0
|
||||
case Some(a: Analysis) => a.compilations.allCompilations.size
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import xsbti.compile.{PreviousResult, CompileAnalysis, MiniSetup}
|
|||
previousCompile in Compile := {
|
||||
val previous = (previousCompile in Compile).value
|
||||
if (!CompileState.isNew) {
|
||||
val res = new PreviousResult(Maybe.nothing[CompileAnalysis], Maybe.nothing[MiniSetup])
|
||||
val res = new PreviousResult(none[CompileAnalysis].asJava, none[MiniSetup].asJava)
|
||||
CompileState.isNew = true
|
||||
res
|
||||
} else previous
|
||||
|
|
@ -25,7 +25,7 @@ TaskKey[Unit]("checkCompilations") := {
|
|||
val allCompilations = analysis.compilations.allCompilations
|
||||
val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c =>
|
||||
val recompiledFiles = analysis.apis.internal.collect {
|
||||
case (cn, api) if api.compilationTimestamp == c.startTime => findFile(cn)
|
||||
case (cn, api) if api.compilationTimestamp == c.getStartTime => findFile(cn)
|
||||
}
|
||||
recompiledFiles.toSet
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ logLevel := Level.Debug
|
|||
previousCompile in Compile := {
|
||||
val previous = (previousCompile in Compile).value
|
||||
if (!CompileState.isNew) {
|
||||
val res = new PreviousResult(Maybe.nothing[CompileAnalysis], Maybe.nothing[MiniSetup])
|
||||
val res = new PreviousResult(none[CompileAnalysis].asJava, none[MiniSetup].asJava)
|
||||
CompileState.isNew = true
|
||||
res
|
||||
} else previous
|
||||
|
|
@ -34,7 +34,7 @@ TaskKey[Unit]("checkCompilations") := {
|
|||
val allCompilations = analysis.compilations.allCompilations
|
||||
val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c =>
|
||||
val recompiledFiles = analysis.apis.internal.collect {
|
||||
case (cn, api) if api.compilationTimestamp == c.startTime => findFile(cn)
|
||||
case (cn, api) if api.compilationTimestamp == c.getStartTime => findFile(cn)
|
||||
}
|
||||
recompiledFiles.toSet
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue