Merge pull request #3082 from dwijnand/remove-most-deprecated

Remove most @deprecated
This commit is contained in:
Dale Wijnand 2017-04-13 10:32:10 +01:00 committed by GitHub
commit 308abcde9f
46 changed files with 484 additions and 678 deletions

View File

@ -17,3 +17,8 @@ Migration notes
- `TestResult.Value` is now `TestResult`.
- the scripted plugin is cross-versioned now, so you must use %% when depending on it
- Removed the methods settingsSets from Project (along with add/setSbtFiles)
- Dropped deprecated InputTask apply method and inputTask DSL method - replace with `Def.inputTask` & `Def.spaceDelimited().parsed`
- Dropped deprecated ProjectReference implicit lifts - replace with `RootProject(<uri>)`, `RootProject(<file>)` or `LocalProject(<string>)`
- Dropped deprecated seq DSL method - replace with `Seq` or just enumerate without wrapping
- Dropped deprecated File/Seq[File] setting enrichments - replace with `.value` and `Def.setting`
- Dropped deprecated SubProcess apply overload - replace with `SubProcess(ForkOptions(runJVMOptions = ..))`

View File

@ -16,148 +16,55 @@ import sbt.internal.util.CacheStore
object Compiler {
val DefaultMaxErrors = 100
private[sbt] def defaultCompilerBridgeSource(sv: String): ModuleID =
VersionNumber(sv) match {
// 2.10 and before
case VersionNumber(ns, _, _) if (ns.size == 3) && (ns(0) == 2) && (ns(1) <= 10) => scalaCompilerBridgeSource2_10
// 2.11
case VersionNumber(ns, _, _) if (ns.size == 3) && (ns(0) == 2) && (ns(1) == 11) => scalaCompilerBridgeSource2_11
case _ => scalaCompilerBridgeSource2_12
}
private[sbt] def scalaCompilerBridgeSource2_10: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.10",
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
private[sbt] def scalaCompilerBridgeSource2_11: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.11",
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
private[sbt] def scalaCompilerBridgeSource2_12: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.12",
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
/** Inputs necessary to run the incremental compiler. */
// final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup)
// /** The inputs for the compiler *and* the previous analysis of source dependencies. */
// final case class InputsWithPrevious(inputs: Inputs, previousAnalysis: PreviousAnalysis)
// final case class Options(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMapper: Position => Position, order: CompileOrder)
// final case class IncSetup(analysisMap: File => Option[Analysis], definesClass: DefinesClass, skip: Boolean, cacheFile: File, cache: GlobalsCache, incOptions: IncOptions)
// private[sbt] trait JavaToolWithNewInterface extends JavaTool {
// def newJavac: IncrementalCompilerJavaTools
// }
/** The instances of Scalac/Javac used to compile the current project. */
// final case class Compilers(scalac: AnalyzingCompiler, javac: IncrementalCompilerJavaTools)
/** The previous source dependency analysis result from compilation. */
// final case class PreviousAnalysis(analysis: Analysis, setup: Option[MiniSetup])
// def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String],
// javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]],
// order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs =
// new Inputs(
// compilers,
// new Options(classpath, sources, classesDirectory, options, javacOptions, maxErrors, foldMappers(sourcePositionMappers), order),
// incSetup
// )
// @deprecated("Use `compilers(ScalaInstance, ClasspathOptions, Option[File], IvyConfiguration)`.", "0.13.10")
// def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File])(implicit app: AppConfiguration, log: Logger): Compilers =
// {
// val javac =
// AggressiveCompile.directOrFork(instance, cpOptions, javaHome)
// val javac2 =
// JavaTools.directOrFork(instance, cpOptions, javaHome)
// // Hackery to enable both the new and deprecated APIs to coexist peacefully.
// case class CheaterJavaTool(newJavac: IncrementalCompilerJavaTools, delegate: JavaTool) extends JavaTool with JavaToolWithNewInterface {
// def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit =
// javac.compile(contract, sources, classpath, outputDirectory, options)(log)
// def onArgs(f: Seq[String] => Unit): JavaTool = CheaterJavaTool(newJavac, delegate.onArgs(f))
// }
// compilers(instance, cpOptions, CheaterJavaTool(javac2, javac))
// }
// def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration)(implicit app: AppConfiguration, log: Logger): Compilers =
// {
// val javac =
// AggressiveCompile.directOrFork(instance, cpOptions, javaHome)
// val javac2 =
// JavaTools.directOrFork(instance, cpOptions, javaHome)
// // Hackery to enable both the new and deprecated APIs to coexist peacefully.
// case class CheaterJavaTool(newJavac: IncrementalCompilerJavaTools, delegate: JavaTool) extends JavaTool with JavaToolWithNewInterface {
// def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit =
// javac.compile(contract, sources, classpath, outputDirectory, options)(log)
// def onArgs(f: Seq[String] => Unit): JavaTool = CheaterJavaTool(newJavac, delegate.onArgs(f))
// }
// val scalac = scalaCompiler(instance, cpOptions, ivyConfiguration)
// new Compilers(scalac, CheaterJavaTool(javac2, javac))
// }
// @deprecated("Deprecated in favor of new sbt.compiler.javac package.", "0.13.8")
// def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: sbt.compiler.JavaCompiler.Fork)(implicit app: AppConfiguration, log: Logger): Compilers =
// {
// val javaCompiler = sbt.compiler.JavaCompiler.fork(cpOptions, instance)(javac)
// compilers(instance, cpOptions, javaCompiler)
// }
// @deprecated("Deprecated in favor of new sbt.compiler.javac package.", "0.13.8")
// def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: JavaTool)(implicit app: AppConfiguration, log: Logger): Compilers =
// {
// val scalac = scalaCompiler(instance, cpOptions)
// new Compilers(scalac, javac)
// }
// @deprecated("Use `scalaCompiler(ScalaInstance, ClasspathOptions, IvyConfiguration)`.", "0.13.10")
// def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler =
// {
// val launcher = app.provider.scalaProvider.launcher
// val componentManager = new ComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log)
// val provider = ComponentCompiler.interfaceProvider(componentManager)
// new AnalyzingCompiler(instance, provider, cpOptions)
// }
def compilers(cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore)(implicit app: AppConfiguration, log: Logger): Compilers =
{
val scalaProvider = app.provider.scalaProvider
val instance = ScalaInstance(scalaProvider.version, scalaProvider.launcher)
val sourceModule = scalaCompilerBridgeSource2_12
compilers(instance, cpOptions, None, ivyConfiguration, fileToStore, sourceModule)
case VersionNumber(Seq(2, y, _), _, _) if y <= 10 => scalaCompilerBridgeSource2_10
case VersionNumber(Seq(2, y, _), _, _) if y == 11 => scalaCompilerBridgeSource2_11
case _ => scalaCompilerBridgeSource2_12
}
// def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): Compilers =
// compilers(instance, cpOptions, None)
private[this] def scalaCompilerBridgeSource(suffix: String): ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, s"compiler-bridge_$suffix", ComponentCompiler.incrementalVersion)
.withConfigurations(Some("component"))
.sources()
private[sbt] def scalaCompilerBridgeSource2_10: ModuleID = scalaCompilerBridgeSource("2.10")
private[sbt] def scalaCompilerBridgeSource2_11: ModuleID = scalaCompilerBridgeSource("2.11")
private[sbt] def scalaCompilerBridgeSource2_12: ModuleID = scalaCompilerBridgeSource("2.12")
def compilers(
cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore
)(implicit app: AppConfiguration, log: Logger): Compilers = {
val scalaProvider = app.provider.scalaProvider
val instance = ScalaInstance(scalaProvider.version, scalaProvider.launcher)
val sourceModule = scalaCompilerBridgeSource2_12
compilers(instance, cpOptions, None, ivyConfiguration, fileToStore, sourceModule)
}
// TODO: Get java compiler
def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File],
ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): Compilers = {
def compilers(
instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File],
ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID
)(implicit app: AppConfiguration, log: Logger): Compilers = {
val scalac = scalaCompiler(instance, cpOptions, javaHome, ivyConfiguration, fileToStore, sourcesModule)
val javac = JavaTools.directOrFork(instance, cpOptions, javaHome)
new Compilers(scalac, javac)
}
def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler =
{
val launcher = app.provider.scalaProvider.launcher
val componentManager = new ZincComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log)
val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, fileToStore, sourcesModule)
new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None)
}
def scalaCompiler(
instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File],
ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID
)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = {
val launcher = app.provider.scalaProvider.launcher
val componentManager = new ZincComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log)
val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, fileToStore, sourcesModule)
new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None)
}
private val compiler = new IncrementalCompilerImpl
def compile(in: Inputs, log: Logger): CompileResult =
{
compiler.compile(in, log)
// import in.inputs.config._
// compile(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper))
}
// def compile(in: Inputs, log: Logger, reporter: xsbti.Reporter): CompileResult =
// {
// import in.inputs.compilers._
// import in.inputs.config._
// import in.inputs.incSetup._
// // Here is some trickery to choose the more recent (reporter-using) java compiler rather
// // than the previously defined versions.
// // TODO - Remove this hackery in sbt 1.0.
// val javacChosen: xsbti.compile.JavaCompiler =
// in.inputs.compilers.javac.xsbtiCompiler // ).getOrElse(in.inputs.compilers.javac)
// // TODO - Why are we not using the IC interface???
// val compiler = new IncrementalCompilerImpl
// compiler.incrementalCompile(scalac, javacChosen, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions,
// in.previousAnalysis.analysis, in.previousAnalysis.setup, analysisMap, definesClass, reporter, order, skip, incOptions)(log)
// }
def compile(in: Inputs, log: Logger): CompileResult = compiler.compile(in, log)
private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) =
mappers.foldRight({ p: A => p }) { (mapper, mappers) => { p: A => mapper(p).getOrElse(mappers(p)) } }

View File

@ -17,7 +17,6 @@ import sbt.internal.util.{ CacheStoreFactory, FilesInfo, HashFileInfo, HNil, Mod
import sbt.internal.util.FileInfo.{ exists, hash, lastModified }
import xsbti.compile.ClasspathOptions
import sbt.util.Logger
import sbt.internal.util.ManagedLogger
object RawCompileLike {

View File

@ -110,10 +110,6 @@ object Tests {
/** Configures a group of tests to be forked in a new JVM with forking options specified by `config`. */
final case class SubProcess(config: ForkOptions) extends TestRunPolicy
object SubProcess {
@deprecated("Construct SubProcess with a ForkOptions argument.", "0.13.0")
def apply(javaOptions: Seq[String]): SubProcess = SubProcess(ForkOptions(runJVMOptions = javaOptions))
}
/** A named group of tests configured to run in the same JVM or be forked. */
final case class Group(name: String, tests: Seq[TestDefinition], runPolicy: TestRunPolicy)
@ -315,11 +311,6 @@ object Tests {
val mains = discovered collect { case (df, di) if di.hasMain => df.name }
(tests, mains.toSet)
}
@deprecated("Tests.showResults() has been superseded with TestResultLogger and setting 'testResultLogger'.", "0.13.5")
def showResults(log: Logger, results: Output, noTestsMessage: => String): Unit =
TestResultLogger.Default.copy(printNoTests = TestResultLogger.const(_ info noTestsMessage))
.run(log, results, "")
}
final class TestsFailedException extends RuntimeException("Tests unsuccessful") with FeedbackProvidedException

View File

@ -61,8 +61,6 @@ object BasicCommands {
System.out.println(message)
s
}
@deprecated("Use Help.moreMessage", "0.13.0")
def moreHelp(more: Seq[String]): String = Help.moreMessage(more)
def completionsCommand = Command.make(CompletionsCommand, CompletionsBrief, CompletionsDetailed)(completionsParser)
def completionsParser(state: State) =

View File

@ -91,10 +91,6 @@ object InputTask {
separate(p)(act)
}
@deprecated("Use another InputTask constructor or the `Def.inputTask` macro.", "0.13.0")
def apply[I, T](p: State => Parser[I])(action: TaskKey[I] => Initialize[Task[T]]): Initialize[InputTask[T]] =
apply(Def.value(p))(action)
/**
* The proper solution is to have a Manifest context bound and accept slight source incompatibility,
* The affected InputTask construction methods are all deprecated and so it is better to keep complete
@ -120,6 +116,7 @@ object InputTask {
val t: Task[I] = Task(Info[I]().set(key, None), Pure(f, false))
(key, t)
}
private[this] def subForDummy[I, T](marker: AttributeKey[Option[I]], value: I, task: Task[T]): Task[T] =
{
val seen = new java.util.IdentityHashMap[Task[_], Task[_]]

View File

@ -94,13 +94,4 @@ object Reference {
case BuildRef(b) => Some(b)
case _ => None
}
@deprecated("Explicitly wrap the URI in a call to RootProject.", "0.13.0")
implicit def uriToRef(u: URI): ProjectReference = RootProject(u)
@deprecated("Explicitly wrap the File in a call to RootProject.", "0.13.0")
implicit def fileToRef(f: File): ProjectReference = RootProject(f)
@deprecated("Explicitly wrap the String in a call to LocalProject.", "0.13.0")
implicit def stringToReference(s: String): ProjectReference = LocalProject(s)
}

View File

@ -5,10 +5,6 @@ package sbt
import scala.language.experimental.macros
import java.io.File
import sbt.io.{ FileFilter, PathFinder }
import sbt.io.syntax._
import sbt.internal.util.Types._
import sbt.internal.util.{ ~>, AList, AttributeKey, Settings, SourcePosition }
import sbt.util.OptJsonWriter
@ -54,7 +50,8 @@ sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitiali
final def transform(f: T => T, source: SourcePosition): Setting[T] = set(scopedKey(f), source)
protected[this] def make[S](other: Initialize[S], source: SourcePosition)(f: (T, S) => T): Setting[T] = set((this, other)(f), source)
protected[this] def make[S](other: Initialize[S], source: SourcePosition)(f: (T, S) => T): Setting[T] =
set((this, other)(f), source)
}
/**
@ -83,7 +80,7 @@ sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[
final def removeN[V](vs: Initialize[Task[V]], source: SourcePosition)(implicit r: Remove.Values[T, V]): Setting[Task[T]] = make(vs, source)(r.removeValues)
private[this] def make[S](other: Initialize[Task[S]], source: SourcePosition)(f: (T, S) => T): Setting[Task[T]] =
set((this, other) { (a, b) => (a, b) map f.tupled }, source)
set((this, other)((a, b) => (a, b) map f.tupled), source)
}
/**
@ -206,10 +203,12 @@ object Scoped {
def ??[T >: S](or: => T): Initialize[Task[T]] = Def.optional(scopedKey)(_ getOrElse mktask(or))
def or[T >: S](i: Initialize[Task[T]]): Initialize[Task[T]] = (this.? zipWith i)((x, y) => (x, y) map { case (a, b) => a getOrElse b })
}
final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f
def dependsOn(tasks: AnyInitTask*): Initialize[Task[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.dependsOn(deps: _*) }
def dependsOn(tasks: AnyInitTask*): Initialize[Task[S]] =
(i, Initialize.joinAny[Task](tasks))((thisTask, deps) => thisTask.dependsOn(deps: _*))
def failure: Initialize[Task[Incomplete]] = i(_.failure)
def result: Initialize[Task[Result[S]]] = i(_.result)
@ -217,12 +216,16 @@ object Scoped {
def xtriggeredBy[T](tasks: Initialize[Task[T]]*): Initialize[Task[S]] = nonLocal(tasks, Def.triggeredBy)
def triggeredBy[T](tasks: Initialize[Task[T]]*): Initialize[Task[S]] = nonLocal(tasks, Def.triggeredBy)
def runBefore[T](tasks: Initialize[Task[T]]*): Initialize[Task[S]] = nonLocal(tasks, Def.runBefore)
private[this] def nonLocal(tasks: Seq[AnyInitTask], key: AttributeKey[Seq[Task[_]]]): Initialize[Task[S]] =
(Initialize.joinAny[Task](tasks), i) { (ts, i) => i.copy(info = i.info.set(key, ts)) }
(Initialize.joinAny[Task](tasks), i)((ts, i) => i.copy(info = i.info.set(key, ts)))
}
final class RichInitializeInputTask[S](i: Initialize[InputTask[S]]) extends RichInitTaskBase[S, InputTask] {
protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f)
def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*)) }
def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] =
(i, Initialize.joinAny[Task](tasks))((thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*)))
}
sealed abstract class RichInitTaskBase[S, R[_]] {
@ -264,29 +267,6 @@ object Scoped {
def dependOn: Initialize[Task[Unit]] = Initialize.joinAny[Task](keys).apply(deps => nop.dependsOn(deps: _*))
}
implicit def richFileSetting(s: SettingKey[File]): RichFileSetting = new RichFileSetting(s)
implicit def richFilesSetting(s: SettingKey[Seq[File]]): RichFilesSetting = new RichFilesSetting(s)
final class RichFileSetting(s: SettingKey[File]) extends RichFileBase {
@deprecated("Use a standard setting definition.", "0.13.0")
def /(c: String): Initialize[File] = s { _ / c }
protected[this] def map0(f: PathFinder => PathFinder) = s(file => finder(f)(file :: Nil))
}
final class RichFilesSetting(s: SettingKey[Seq[File]]) extends RichFileBase {
@deprecated("Use a standard setting definition.", "0.13.0")
def /(s: String): Initialize[Seq[File]] = map0 { _ / s }
protected[this] def map0(f: PathFinder => PathFinder) = s(finder(f))
}
sealed abstract class RichFileBase {
@deprecated("Use a standard setting definition.", "0.13.0")
def *(filter: FileFilter): Initialize[Seq[File]] = map0 { _ * filter }
@deprecated("Use a standard setting definition.", "0.13.0")
def **(filter: FileFilter): Initialize[Seq[File]] = map0 { _ ** filter }
protected[this] def map0(f: PathFinder => PathFinder): Initialize[Seq[File]]
protected[this] def finder(f: PathFinder => PathFinder): Seq[File] => Seq[File] =
in => f(in).get
}
// this is the least painful arrangement I came up with
@deprecated("The sbt 0.10 style DSL is deprecated: '(k1, k2) map { (x, y) => ... }' should now be '{ val x = k1.value; val y = k2.value }'.\nSee http://www.scala-sbt.org/0.13/docs/Migrating-from-sbt-012x.html", "0.13.13") implicit def t2ToTable2[A, B](t2: (ScopedTaskable[A], ScopedTaskable[B])): RichTaskable2[A, B] = new RichTaskable2(t2)
@deprecated("The sbt 0.10 style DSL is deprecated: '(k1, k2) map { (x, y) => ... }' should now be '{ val x = k1.value; val y = k2.value }'.\nSee http://www.scala-sbt.org/0.13/docs/Migrating-from-sbt-012x.html", "0.13.13") implicit def t3ToTable3[A, B, C](t3: (ScopedTaskable[A], ScopedTaskable[B], ScopedTaskable[C])): RichTaskable3[A, B, C] = new RichTaskable3(t3)
@ -359,7 +339,6 @@ object Scoped {
def identityMap = map(mkTuple10)
protected def convert[M[_], R](z: Fun[M, R]) = z.tupled
}
final class RichTaskable11[A, B, C, D, E, F, G, H, I, J, K](t11: ((ST[A], ST[B], ST[C], ST[D], ST[E], ST[F], ST[G], ST[H], ST[I], ST[J], ST[K]))) extends RichTaskables[AList.T11K[A, B, C, D, E, F, G, H, I, J, K]#l](t11)(AList.tuple11[A, B, C, D, E, F, G, H, I, J, K]) {
type Fun[M[_], Ret] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K]) => Ret
def identityMap = map(mkTuple11)

View File

@ -42,23 +42,21 @@ object FullInstance extends Instance.Composed[Initialize, Task](InitializeInstan
type SS = sbt.internal.util.Settings[Scope]
val settingsData = TaskKey[SS]("settings-data", "Provides access to the project data for the build.", KeyRanks.DTask)
def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] =
{
import Scoped._
(in, settingsData, Def.capturedTransformations) apply {
(a: Task[Initialize[Task[T]]], data: Task[SS], f) =>
import TaskExtra.multT2Task
(a, data) flatMap { case (a, d) => f(a) evaluate d }
}
def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = {
import Scoped._
(in, settingsData, Def.capturedTransformations) { (a: Task[Initialize[Task[T]]], data: Task[SS], f) =>
import TaskExtra.multT2Task
(a, data) flatMap { case (a, d) => f(a) evaluate d }
}
def flattenFun[S, T](in: Initialize[Task[S => Initialize[Task[T]]]]): Initialize[S => Task[T]] =
{
import Scoped._
(in, settingsData, Def.capturedTransformations) apply { (a: Task[S => Initialize[Task[T]]], data: Task[SS], f) => (s: S) =>
import TaskExtra.multT2Task
(a, data) flatMap { case (af, d) => f(af(s)) evaluate d }
}
}
def flattenFun[S, T](in: Initialize[Task[S => Initialize[Task[T]]]]): Initialize[S => Task[T]] = {
import Scoped._
(in, settingsData, Def.capturedTransformations) { (a: Task[S => Initialize[Task[T]]], data: Task[SS], f) => (s: S) =>
import TaskExtra.multT2Task
(a, data) flatMap { case (af, d) => f(af(s)) evaluate d }
}
}
}
object TaskMacro {
@ -129,18 +127,25 @@ object TaskMacro {
def itaskAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[Initialize[Task[T]]]): c.Expr[Setting[Task[T]]] =
settingAssignPosition(c)(app)
def taskAssignPositionT[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[Task[T]]): c.Expr[Setting[Task[T]]] =
itaskAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def taskAssignPositionPure[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[T]): c.Expr[Setting[Task[T]]] =
taskAssignPositionT(c)(c.universe.reify { TaskExtra.constant(app.splice) })
def taskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(f: c.Expr[S => S]): c.Expr[Setting[Task[S]]] =
c.Expr[Setting[Task[S]]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def itaskTransformPosition[S: c.WeakTypeTag](c: blackbox.Context)(f: c.Expr[S => S]): c.Expr[Setting[S]] =
c.Expr[Setting[S]](transformMacroImpl(c)(f.tree)(TransformInitName))
def settingAssignPure[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[T]): c.Expr[Setting[T]] =
settingAssignPosition(c)(c.universe.reify { Def.valueStrict(app.splice) })
def settingAssignPosition[T: c.WeakTypeTag](c: blackbox.Context)(app: c.Expr[Initialize[T]]): c.Expr[Setting[T]] =
c.Expr[Setting[T]](transformMacroImpl(c)(app.tree)(AssignInitName))
@ -151,6 +156,7 @@ object TaskMacro {
val assign = transformMacroImpl(c)(init.tree)(AssignInitName)
c.Expr[Setting[InputTask[T]]](assign)
}
/** Implementation of += macro for tasks. */
def taskAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[Task[T]]] =
{
@ -158,6 +164,7 @@ object TaskMacro {
val append = appendMacroImpl(c)(init.tree, a.tree)(Append1InitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of += macro for settings. */
def settingAppend1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(a: c.Expr[Append.Value[T, U]]): c.Expr[Setting[T]] =
{
@ -179,6 +186,7 @@ object TaskMacro {
c.Expr[Setting[T]](append)
}
}
/** Implementation of ++= macro for tasks. */
def taskAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[Task[T]]] =
{
@ -186,6 +194,7 @@ object TaskMacro {
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[Task[T]]](append)
}
/** Implementation of ++= macro for settings. */
def settingAppendNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(a: c.Expr[Append.Values[T, U]]): c.Expr[Setting[T]] =
{
@ -193,6 +202,7 @@ object TaskMacro {
val append = appendMacroImpl(c)(init.tree, a.tree)(AppendNInitName)
c.Expr[Setting[T]](append)
}
/** Implementation of -= macro for tasks. */
def taskRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[Task[T]]] =
{
@ -200,6 +210,7 @@ object TaskMacro {
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of -= macro for settings. */
def settingRemove1Impl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(v: c.Expr[U])(r: c.Expr[Remove.Value[T, U]]): c.Expr[Setting[T]] =
{
@ -207,6 +218,7 @@ object TaskMacro {
val remove = removeMacroImpl(c)(init.tree, r.tree)(Remove1InitName)
c.Expr[Setting[T]](remove)
}
/** Implementation of --= macro for tasks. */
def taskRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[Task[T]]] =
{
@ -214,6 +226,7 @@ object TaskMacro {
val remove = removeMacroImpl(c)(init.tree, r.tree)(RemoveNInitName)
c.Expr[Setting[Task[T]]](remove)
}
/** Implementation of --= macro for settings. */
def settingRemoveNImpl[T: c.WeakTypeTag, U: c.WeakTypeTag](c: blackbox.Context)(vs: c.Expr[U])(r: c.Expr[Remove.Values[T, U]]): c.Expr[Setting[T]] =
{
@ -231,6 +244,7 @@ object TaskMacro {
case x => ContextUtil.unexpectedTree(x)
}
}
private[this] def removeMacroImpl(c: blackbox.Context)(init: c.Tree, remove: c.Tree)(newName: String): c.Tree =
{
import c.universe._
@ -240,6 +254,7 @@ object TaskMacro {
case x => ContextUtil.unexpectedTree(x)
}
}
private[this] def transformMacroImpl(c: blackbox.Context)(init: c.Tree)(newName: String): c.Tree =
{
import c.universe._
@ -250,6 +265,7 @@ object TaskMacro {
}
Apply.apply(Select(target, TermName(newName).encodedName), init :: sourcePosition(c).tree :: Nil)
}
private[this] def sourcePosition(c: blackbox.Context): c.Expr[SourcePosition] =
{
import c.universe.reify
@ -262,6 +278,7 @@ object TaskMacro {
} else
reify { NoPosition }
}
private[this] def settingSource(c: blackbox.Context, path: String, name: String): String =
{
@tailrec def inEmptyPackage(s: c.Symbol): Boolean = s != c.universe.NoSymbol && (
@ -281,6 +298,7 @@ object TaskMacro {
def inputTaskMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[T]): c.Expr[Initialize[InputTask[T]]] =
inputTaskMacro0[T](c)(t)
def inputTaskDynMacroImpl[T: c.WeakTypeTag](c: blackbox.Context)(t: c.Expr[Initialize[Task[T]]]): c.Expr[Initialize[InputTask[T]]] =
inputTaskDynMacro0[T](c)(t)
@ -298,6 +316,7 @@ object TaskMacro {
val cond = c.Expr[T](conditionInputTaskTree(c)(t.tree))
Instance.contImpl[T, M](c, InitializeInstance, InputInitConvert, MixedBuilder)(Left(cond), inner)
}
private[this] def conditionInputTaskTree(c: blackbox.Context)(t: c.Tree): c.Tree =
{
import c.universe._

View File

@ -8,7 +8,6 @@ import KeyRanks.DSetting
import sbt.io.{ GlobFilter, Path }
import sbt.internal.util.AttributeKey
import sbt.util.Logger
object BuildPaths {
val globalBaseDirectory = AttributeKey[File]("global-base-directory", "The base directory for global sbt configuration and staging.", DSetting)
@ -73,11 +72,6 @@ object BuildPaths {
def projectStandard(base: File) = base / "project"
@deprecated("Use projectStandard. The alternative project directory location has been removed.", "0.13.0")
def projectHidden(base: File) = projectStandard(base)
@deprecated("Use projectStandard. The alternative project directory location has been removed.", "0.13.0")
def selectProjectDir(base: File, log: Logger) = projectStandard(base)
final val PluginsDirectoryName = "plugins"
final val DefaultTargetName = "target"
final val ConfigDirectoryName = ".sbt"

View File

@ -382,14 +382,6 @@ object Defaults extends BuildCommon {
def generate(generators: SettingKey[Seq[Task[Seq[File]]]]): Initialize[Task[Seq[File]]] = generators { _.join.map(_.flatten) }
@deprecated("Use the new <key>.all(<ScopeFilter>) API", "0.13.0")
def inAllConfigurations[T](key: TaskKey[T]): Initialize[Task[Seq[T]]] = (state, thisProjectRef) flatMap { (state, ref) =>
val structure = Project structure state
val configurations = Project.getProject(ref, structure).toList.flatMap(_.configurations)
configurations.flatMap { conf =>
key in (ref, conf) get structure.data
} join
}
def watchTransitiveSourcesTask: Initialize[Task[Seq[File]]] = {
import ScopeFilter.Make.{ inDependencies => inDeps, _ }
val selectDeps = ScopeFilter(inAggregates(ThisProject) || inDeps(ThisProject))
@ -425,8 +417,6 @@ object Defaults extends BuildCommon {
}
}
@deprecated("Use scalaInstanceTask.", "0.13.0")
def scalaInstanceSetting = scalaInstanceTask
def scalaInstanceTask: Initialize[Task[ScalaInstance]] = Def.taskDyn {
// if this logic changes, ensure that `unmanagedScalaInstanceOnly` and `update` are changed
// appropriately to avoid cycles
@ -441,6 +431,7 @@ object Defaults extends BuildCommon {
scalaInstanceFromUpdate
}
}
// Returns the ScalaInstance only if it was not constructed via `update`
// This is necessary to prevent cycles between `update` and `scalaInstance`
private[sbt] def unmanagedScalaInstanceOnly: Initialize[Task[Option[ScalaInstance]]] = Def.taskDyn {
@ -527,14 +518,7 @@ object Defaults extends BuildCommon {
)) ++ inScope(GlobalScope)(Seq(
derive(testGrouping := singleTestGroupDefault.value)
))
@deprecated("Doesn't provide for closing the underlying resources.", "0.13.1")
def testLogger(manager: Streams, baseKey: Scoped)(tdef: TestDefinition): Logger =
{
val scope = baseKey.scope
val extra = scope.extra match { case Select(x) => x; case _ => AttributeMap.empty }
val key = ScopedKey(scope.copy(extra = Select(testExtra(extra, tdef))), baseKey.key)
manager(key).log
}
private[this] def closeableTestLogger(manager: Streams, baseKey: Scoped, buffered: Boolean)(tdef: TestDefinition): TestLogger.PerTest =
{
val scope = baseKey.scope
@ -543,12 +527,18 @@ object Defaults extends BuildCommon {
val s = manager(key)
new TestLogger.PerTest(s.log, () => s.close(), buffered)
}
def buffered(log: Logger): Logger = new BufferedLogger(FullLogger(log))
def testExtra(extra: AttributeMap, tdef: TestDefinition): AttributeMap =
{
val mod = tdef.fingerprint match { case f: SubclassFingerprint => f.isModule; case f: AnnotatedFingerprint => f.isModule; case _ => false }
extra.put(name.key, tdef.name).put(isModule, mod)
def testExtra(extra: AttributeMap, tdef: TestDefinition): AttributeMap = {
val mod = tdef.fingerprint match {
case f: SubclassFingerprint => f.isModule
case f: AnnotatedFingerprint => f.isModule
case _ => false
}
extra.put(name.key, tdef.name).put(isModule, mod)
}
def singleTestGroup(key: Scoped): Initialize[Task[Seq[Tests.Group]]] = inTask(key, singleTestGroupDefault)
def singleTestGroupDefault: Initialize[Task[Seq[Tests.Group]]] = Def.task {
val tests = definedTests.value
@ -740,13 +730,6 @@ object Defaults extends BuildCommon {
def packageDocMappings = doc map { Path.allSubpaths(_).toSeq }
def packageSrcMappings = concatMappings(resourceMappings, sourceMappings)
@deprecated("Use `packageBinMappings` instead", "0.12.0")
def packageBinTask = packageBinMappings
@deprecated("Use `packageDocMappings` instead", "0.12.0")
def packageDocTask = packageDocMappings
@deprecated("Use `packageSrcMappings` instead", "0.12.0")
def packageSrcTask = packageSrcMappings
private type Mappings = Initialize[Task[Seq[(File, String)]]]
def concatMappings(as: Mappings, bs: Mappings) = (as zipWith bs)((a, b) => (a, b) map { case (a, b) => a ++ b })
@ -772,6 +755,7 @@ object Defaults extends BuildCommon {
val f = artifactName.value
(crossTarget.value / f(ScalaVersion((scalaVersion in artifactName).value, (scalaBinaryVersion in artifactName).value), projectID.value, art.value)).asFile
}
def artifactSetting: Initialize[Artifact] =
Def.setting {
val a = artifact.value
@ -789,18 +773,14 @@ object Defaults extends BuildCommon {
.withConfigurations(cOpt.toVector)
}
}
@deprecated("The configuration(s) should not be decided based on the classifier.", "1.0")
@deprecated("The configuration(s) should not be decided based on the classifier.", "1.0.0")
def artifactConfigurations(base: Artifact, scope: Configuration, classifier: Option[String]): Iterable[Configuration] =
classifier match {
case Some(c) => Artifact.classifierConf(c) :: Nil
case None => scope :: Nil
}
@deprecated("Use `Util.pairID` instead", "0.12.0")
def pairID = Util.pairID
@deprecated("Use `packageTaskSettings` instead", "0.12.0")
def packageTasks(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) = packageTaskSettings(key, mappingsTask)
def packageTaskSettings(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) =
inTask(key)(Seq(
key in TaskGlobal := packageTask.value,
@ -810,6 +790,7 @@ object Defaults extends BuildCommon {
artifact := artifactSetting.value,
artifactPath := artifactPathSetting(artifact).value
))
def packageTask: Initialize[Task[File]] =
Def.task {
val config = packageConfiguration.value
@ -817,12 +798,16 @@ object Defaults extends BuildCommon {
Package(config, s.cacheStoreFactory, s.log)
config.jar
}
def packageConfigurationTask: Initialize[Task[Package.Configuration]] =
Def.task { new Package.Configuration(mappings.value, artifactPath.value, packageOptions.value) }
def askForMainClass(classes: Seq[String]): Option[String] =
sbt.SelectMainClass(Some(SimpleReader readLine _), classes)
def pickMainClass(classes: Seq[String]): Option[String] =
sbt.SelectMainClass(None, classes)
private def pickMainClassOrWarn(classes: Seq[String], logger: Logger): Option[String] = {
classes match {
case multiple if multiple.size > 1 => logger.warn("Multiple main classes detected. Run 'show discoveredMainClasses' to see the list")
@ -950,8 +935,6 @@ object Defaults extends BuildCommon {
def bgStopTask: Initialize[InputTask[Unit]] = foreachJobTask { (manager, handle) => manager.stop(handle) }
def bgWaitForTask: Initialize[InputTask[Unit]] = foreachJobTask { (manager, handle) => manager.waitFor(handle) }
@deprecated("Use `docTaskSettings` instead", "0.12.0")
def docSetting(key: TaskKey[File]) = docTaskSettings(key)
def docTaskSettings(key: TaskKey[File] = doc): Seq[Setting[_]] = inTask(key)(Seq(
apiMappings ++= { if (autoAPIMappings.value) APIMappings.extract(dependencyClasspath.value, streams.value.log).toMap else Map.empty[File, URL] },
fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"),
@ -1022,9 +1005,6 @@ object Defaults extends BuildCommon {
finally w.close() // workaround for #937
}
@deprecated("Use inTask(compile)(compileInputsSettings)", "0.13.0")
def compileTaskSettings: Seq[Setting[_]] = inTask(compile)(compileInputsSettings)
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
val setup: Setup = compileIncSetup.value
// TODO - expose bytecode manipulation phase.
@ -1166,38 +1146,6 @@ object Defaults extends BuildCommon {
recurse ?? Nil
}
@deprecated("Use the new <key>.all(<ScopeFilter>) API", "0.13.0")
def inDependencies[T](key: SettingKey[T], default: ProjectRef => T, includeRoot: Boolean = true, classpath: Boolean = true, aggregate: Boolean = false): Initialize[Seq[T]] =
forDependencies[T, T](ref => (key in ref) ?? default(ref), includeRoot, classpath, aggregate)
@deprecated("Use the new <key>.all(<ScopeFilter>) API", "0.13.0")
def forDependencies[T, V](init: ProjectRef => Initialize[V], includeRoot: Boolean = true, classpath: Boolean = true, aggregate: Boolean = false): Initialize[Seq[V]] =
Def.bind((loadedBuild, thisProjectRef).identity) {
case (lb, base) =>
transitiveDependencies(base, lb, includeRoot, classpath, aggregate) map init join;
}
def transitiveDependencies(base: ProjectRef, structure: LoadedBuild, includeRoot: Boolean, classpath: Boolean = true, aggregate: Boolean = false): Seq[ProjectRef] =
{
def tdeps(enabled: Boolean, f: ProjectRef => Seq[ProjectRef]): Seq[ProjectRef] =
{
val full = if (enabled) Dag.topologicalSort(base)(f) else Nil
if (includeRoot) full else full dropRight 1
}
def fullCp = tdeps(classpath, getDependencies(structure, classpath = true, aggregate = false))
def fullAgg = tdeps(aggregate, getDependencies(structure, classpath = false, aggregate = true))
(classpath, aggregate) match {
case (true, true) => (fullCp ++ fullAgg).distinct
case (true, false) => fullCp
case _ => fullAgg
}
}
def getDependencies(structure: LoadedBuild, classpath: Boolean = true, aggregate: Boolean = false): ProjectRef => Seq[ProjectRef] =
ref => Project.getProject(ref, structure).toList flatMap { p =>
(if (classpath) p.dependencies.map(_.project) else Nil) ++
(if (aggregate) p.aggregate else Nil)
}
val CompletionsID = "completions"
def noAggregation: Seq[Scoped] = Seq(run, runMain, bgRun, bgRunMain, console, consoleQuick, consoleProject)
@ -1293,12 +1241,14 @@ object Classpaths {
def packaged(pkgTasks: Seq[TaskKey[File]]): Initialize[Task[Map[Artifact, File]]] =
enabledOnly(packagedArtifact.task, pkgTasks) apply (_.join.map(_.toMap))
def artifactDefs(pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[Artifact]] =
enabledOnly(artifact, pkgTasks)
def enabledOnly[T](key: SettingKey[T], pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[T]] =
(forallIn(key, pkgTasks) zipWith forallIn(publishArtifact, pkgTasks))(_ zip _ collect { case (a, true) => a })
def forallIn[T](key: SettingKey[T], pkgTasks: Seq[TaskKey[_]]): Initialize[Seq[T]] =
def forallIn[T](key: Scoped.ScopingSetting[SettingKey[T]], pkgTasks: Seq[TaskKey[_]]): Initialize[Seq[T]] =
pkgTasks.map(pkg => key in pkg.scope in pkg).join
private[this] def publishGlobalDefaults = Defaults.globalDefaults(Seq(
@ -1913,18 +1863,23 @@ object Classpaths {
f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)*/
}*/
def defaultRepositoryFilter = (repo: MavenRepository) => !repo.root.startsWith("file:")
def getPublishTo(repo: Option[Resolver]): Resolver = repo getOrElse sys.error("Repository for publishing is not specified.")
def defaultRepositoryFilter: MavenRepository => Boolean = repo => !repo.root.startsWith("file:")
def getPublishTo(repo: Option[Resolver]): Resolver =
repo getOrElse sys.error("Repository for publishing is not specified.")
def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging = UpdateLogging.DownloadOnly) =
new DeliverConfiguration(deliverPattern(outputDirectory), status, None, logging)
@deprecated("Previous semantics allowed overwriting cached files, which was unsafe. Please specify overwrite parameter.", "0.13.2")
def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String, logging: UpdateLogging): PublishConfiguration =
publishConfig(artifacts, ivyFile, checksums, resolverName, logging, overwrite = true)
def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging = UpdateLogging.DownloadOnly, overwrite: Boolean = false) =
def publishConfig(
artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String],
resolverName: String = "local", logging: UpdateLogging = UpdateLogging.DownloadOnly,
overwrite: Boolean = false
) =
new PublishConfiguration(ivyFile, resolverName, artifacts, checksums.toVector, logging, overwrite)
def deliverPattern(outputPath: File): String = (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath
def deliverPattern(outputPath: File): String =
(outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath
def projectDependenciesTask: Initialize[Task[Seq[ModuleID]]] =
Def.task {
@ -1937,10 +1892,12 @@ object Classpaths {
}
}
}
private[sbt] def depMap: Initialize[Task[Map[ModuleRevisionId, ModuleDescriptor]]] =
Def.taskDyn {
depMap(buildDependencies.value classpathTransitiveRefs thisProjectRef.value, settingsData.value, streams.value.log)
}
private[sbt] def depMap(projects: Seq[ProjectRef], data: Settings[Scope], log: Logger): Initialize[Task[Map[ModuleRevisionId, ModuleDescriptor]]] =
Def.value {
projects.flatMap(ivyModule in _ get data).join.map { mod =>
@ -2156,54 +2113,41 @@ object Classpaths {
flatten(defaultConfiguration in p get data) getOrElse Configurations.Default
def flatten[T](o: Option[Option[T]]): Option[T] = o flatMap idFun
val sbtIvySnapshots = Resolver.sbtIvyRepo("snapshots")
lazy val typesafeReleases = Resolver.typesafeIvyRepo("releases")
@deprecated("Use `typesafeReleases` instead", "0.12.0")
lazy val typesafeResolver = typesafeReleases
@deprecated("Use `Resolver.typesafeIvyRepo` instead", "0.12.0")
def typesafeRepo(status: String) = Resolver.typesafeIvyRepo(status)
lazy val sbtPluginReleases = Resolver.sbtPluginRepo("releases")
val sbtIvySnapshots: URLRepository = Resolver.sbtIvyRepo("snapshots")
val typesafeReleases: URLRepository = Resolver.typesafeIvyRepo("releases")
val sbtPluginReleases: URLRepository = Resolver.sbtPluginRepo("releases")
def modifyForPlugin(plugin: Boolean, dep: ModuleID): ModuleID =
if (plugin) dep.withConfigurations(Some(Provided.name)) else dep
@deprecated("Explicitly specify the organization using the other variant.", "0.13.0")
def autoLibraryDependency(auto: Boolean, plugin: Boolean, version: String): Seq[ModuleID] =
if (auto)
modifyForPlugin(plugin, ScalaArtifacts.libraryDependency(version)) :: Nil
else
Nil
def autoLibraryDependency(auto: Boolean, plugin: Boolean, org: String, version: String): Seq[ModuleID] =
if (auto)
modifyForPlugin(plugin, ModuleID(org, ScalaArtifacts.LibraryID, version)) :: Nil
else
Nil
def addUnmanagedLibrary: Seq[Setting[_]] = Seq(
unmanagedJars in Compile ++= unmanagedScalaLibrary.value
)
def unmanagedScalaLibrary: Initialize[Task[Seq[File]]] =
Def.taskDyn {
if (autoScalaLibrary.value && scalaHome.value.isDefined)
Def.task { scalaInstance.value.libraryJar :: Nil }
else
Def.task { Nil }
}
def addUnmanagedLibrary: Seq[Setting[_]] =
Seq(unmanagedJars in Compile ++= unmanagedScalaLibrary.value)
def unmanagedScalaLibrary: Initialize[Task[Seq[File]]] = Def.taskDyn {
if (autoScalaLibrary.value && scalaHome.value.isDefined)
Def.task { scalaInstance.value.libraryJar :: Nil }
else
Def.task { Nil }
}
import DependencyFilter._
def managedJars(config: Configuration, jarTypes: Set[String], up: UpdateReport): Classpath =
up.filter(configurationFilter(config.name) && artifactFilter(`type` = jarTypes)).toSeq.map {
case (conf, module, art, file) =>
Attributed(file)(AttributeMap.empty.put(artifact.key, art).put(moduleID.key, module).put(configuration.key, config))
} distinct;
Attributed(file)(
AttributeMap.empty.put(artifact.key, art).put(moduleID.key, module).put(configuration.key, config)
)
}.distinct
def findUnmanagedJars(config: Configuration, base: File, filter: FileFilter, excl: FileFilter): Classpath =
(base * (filter -- excl) +++ (base / config.name).descendantsExcept(filter, excl)).classpath
@deprecated("Specify the classpath that includes internal dependencies", "0.13.0")
def autoPlugins(report: UpdateReport): Seq[String] = autoPlugins(report, Nil)
def autoPlugins(report: UpdateReport, internalPluginClasspath: Seq[File]): Seq[String] =
{
val pluginClasspath = report.matching(configurationFilter(CompilerPlugin.name)) ++ internalPluginClasspath
@ -2228,14 +2172,6 @@ object Classpaths {
}
)
@deprecated("Doesn't properly handle non-standard Scala organizations.", "0.13.0")
def substituteScalaFiles(scalaInstance: ScalaInstance, report: UpdateReport): UpdateReport =
substituteScalaFiles(scalaInstance, ScalaArtifacts.Organization, report)
@deprecated("Directly provide the jar files per Scala version.", "0.13.0")
def substituteScalaFiles(scalaInstance: ScalaInstance, scalaOrg: String, report: UpdateReport): UpdateReport =
substituteScalaFiles(scalaOrg, report)(const(scalaInstance.allJars))
def substituteScalaFiles(scalaOrg: String, report: UpdateReport)(scalaJars: String => Seq[File]): UpdateReport =
report.substitute { (configuration, module, arts) =>
if (module.organization == scalaOrg) {
@ -2384,14 +2320,12 @@ trait BuildExtra extends BuildCommon with DefExtra {
Seq(artLocal := artifact.value, taskLocal := taskDef.value, art, pkgd)
}
// because this was commonly used, this might need to be kept longer than usual
@deprecated("In build.sbt files, this call can be removed. In other cases, this can usually be replaced by Seq.", "0.13.0")
def seq(settings: Setting[_]*): SettingsDefinition = new Def.SettingList(settings)
def externalIvySettings(file: Initialize[File] = inBase("ivysettings.xml"), addMultiResolver: Boolean = true): Setting[Task[IvyConfiguration]] =
externalIvySettingsURI(file(_.toURI), addMultiResolver)
def externalIvySettingsURL(url: URL, addMultiResolver: Boolean = true): Setting[Task[IvyConfiguration]] =
externalIvySettingsURI(Def.value(url.toURI), addMultiResolver)
def externalIvySettingsURI(uri: Initialize[URI], addMultiResolver: Boolean = true): Setting[Task[IvyConfiguration]] =
{
val other = Def.task { (baseDirectory.value, appConfiguration.value, projectResolver.value, updateOptions.value, streams.value) }
@ -2404,10 +2338,12 @@ trait BuildExtra extends BuildCommon with DefExtra {
}
}).value
}
private[this] def inBase(name: String): Initialize[File] = Def.setting { baseDirectory.value / name }
def externalIvyFile(file: Initialize[File] = inBase("ivy.xml"), iScala: Initialize[Option[IvyScala]] = ivyScala): Setting[Task[ModuleSettings]] =
moduleSettings := IvyFileConfiguration(ivyValidate.value, iScala.value, file.value, managedScalaInstance.value)
def externalPom(file: Initialize[File] = inBase("pom.xml"), iScala: Initialize[Option[IvyScala]] = ivyScala): Setting[Task[ModuleSettings]] =
moduleSettings := PomConfiguration(ivyValidate.value, ivyScala.value, file.value, managedScalaInstance.value)
@ -2419,6 +2355,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
val args = spaceDelimited().parsed
r.run(mainClass, data(cp), baseArguments ++ args, streams.value.log).get
}
def runTask(config: Configuration, mainClass: String, arguments: String*): Initialize[Task[Unit]] =
Def.task {
val cp = (fullClasspath in config).value
@ -2426,9 +2363,14 @@ trait BuildExtra extends BuildCommon with DefExtra {
val s = streams.value
r.run(mainClass, data(cp), arguments, s.log).get
}
// public API
/** Returns a vector of settings that create custom run input task. */
def fullRunInputTask(scoped: InputKey[Unit], config: Configuration, mainClass: String, baseArguments: String*): Vector[Setting[_]] =
def fullRunInputTask(scoped: InputKey[Unit], config: Configuration, mainClass: String, baseArguments: String*): Vector[Setting[_]] = {
// Use Def.inputTask with the `Def.spaceDelimited()` parser
def inputTask[T](f: TaskKey[Seq[String]] => Initialize[Task[T]]): Initialize[InputTask[T]] =
InputTask.apply(Def.value((s: State) => Def.spaceDelimited()))(f)
Vector(
scoped := (inputTask { result =>
(initScoped(scoped.scopedKey, runnerInit)
@ -2440,6 +2382,8 @@ trait BuildExtra extends BuildCommon with DefExtra {
}
}).evaluated
) ++ inTask(scoped)(forkOptions := forkOptionsTask.value)
}
// public API
/** Returns a vector of settings that create custom run task. */
def fullRunTask(scoped: TaskKey[Unit], config: Configuration, mainClass: String, arguments: String*): Vector[Setting[_]] =
@ -2467,15 +2411,13 @@ trait BuildExtra extends BuildCommon with DefExtra {
def filterKeys(ss: Seq[Setting[_]], transitive: Boolean = false)(f: ScopedKey[_] => Boolean): Seq[Setting[_]] =
ss filter (s => f(s.key) && (!transitive || s.dependencies.forall(f)))
}
trait DefExtra {
private[this] val ts: TaskSequential = new TaskSequential {}
implicit def toTaskSequential(d: Def.type): TaskSequential = ts
}
trait BuildCommon {
@deprecated("Use Def.inputTask with the `Def.spaceDelimited()` parser.", "0.13.0")
def inputTask[T](f: TaskKey[Seq[String]] => Initialize[Task[T]]): Initialize[InputTask[T]] =
InputTask.apply(Def.value((s: State) => Def.spaceDelimited()))(f)
trait BuildCommon {
/**
* Allows a String to be used where a `NameFilter` is expected.
* Asterisks (`*`) in the string are interpreted as wildcards.

View File

@ -27,9 +27,6 @@ import scala.Console.RED
import std.Transform.DummyTaskMap
import TaskName._
@deprecated("Use EvaluateTaskConfig instead.", "0.13.5")
final case class EvaluateConfig(cancelable: Boolean, restrictions: Seq[Tags.Rule], checkCycles: Boolean = false, progress: ExecuteProgress[Task] = EvaluateTask.defaultProgress)
/**
* An API that allows you to cancel executing tasks upon some signal.
*
@ -40,8 +37,9 @@ trait RunningTaskEngine {
/** Attempts to kill and shutdown the running task engine.*/
def cancelAndShutdown(): Unit
}
/**
* A strategy for being able to cancle tasks.
* A strategy for being able to cancel tasks.
*
* Implementations of this trait determine what will trigger `cancel()` for
* the task engine, providing in the `start` method.
@ -51,6 +49,7 @@ trait RunningTaskEngine {
trait TaskCancellationStrategy {
/** The state used by this task. */
type State
/**
* Called when task evaluation starts.
*
@ -58,9 +57,11 @@ trait TaskCancellationStrategy {
* @return Whatever state you need to cleanup in your finish method.
*/
def onTaskEngineStart(canceller: RunningTaskEngine): State
/** Called when task evaluation completes, either in success or failure. */
def onTaskEngineFinish(state: State): Unit
}
object TaskCancellationStrategy {
/** An empty handler that does not cancel tasks. */
object Null extends TaskCancellationStrategy {
@ -69,14 +70,16 @@ object TaskCancellationStrategy {
def onTaskEngineFinish(state: Unit): Unit = ()
override def toString: String = "Null"
}
/** Cancel handler which registers for SIGINT and cancels tasks when it is received. */
object Signal extends TaskCancellationStrategy {
type State = Signals.Registration
def onTaskEngineStart(canceller: RunningTaskEngine): Signals.Registration = {
def onTaskEngineStart(canceller: RunningTaskEngine): Signals.Registration =
Signals.register(() => canceller.cancelAndShutdown())
}
def onTaskEngineFinish(registration: Signals.Registration): Unit =
registration.remove()
def onTaskEngineFinish(registration: Signals.Registration): Unit = registration.remove()
override def toString: String = "Signal"
}
}
@ -93,28 +96,15 @@ sealed trait EvaluateTaskConfig {
def checkCycles: Boolean
def progressReporter: ExecuteProgress[Task]
def cancelStrategy: TaskCancellationStrategy
/**
* If true, we force a finalizer/gc run (or two) after task execution completes when needed.
*/
/** If true, we force a finalizer/gc run (or two) after task execution completes when needed. */
def forceGarbageCollection: Boolean
/**
* Interval to force GC.
*/
/** Interval to force GC. */
def minForcegcInterval: Duration
}
object EvaluateTaskConfig {
@deprecated("Use the alternative that specifies minForcegcInterval", "0.13.9")
def apply(
restrictions: Seq[Tags.Rule],
checkCycles: Boolean,
progressReporter: ExecuteProgress[Task],
cancelStrategy: TaskCancellationStrategy,
forceGarbageCollection: Boolean
): EvaluateTaskConfig =
apply(restrictions, checkCycles, progressReporter, cancelStrategy, forceGarbageCollection,
GCUtil.defaultMinForcegcInterval)
object EvaluateTaskConfig {
/** Raw constructor for EvaluateTaskConfig. */
def apply(
restrictions: Seq[Tags.Rule],
@ -123,26 +113,28 @@ object EvaluateTaskConfig {
cancelStrategy: TaskCancellationStrategy,
forceGarbageCollection: Boolean,
minForcegcInterval: Duration
): EvaluateTaskConfig = {
val r = restrictions
val check = checkCycles
val cs = cancelStrategy
val pr = progressReporter
val fgc = forceGarbageCollection
val mfi = minForcegcInterval
object SimpleEvaluateTaskConfig extends EvaluateTaskConfig {
def restrictions = r
def checkCycles = check
def progressReporter = pr
def cancelStrategy = cs
def forceGarbageCollection = fgc
def minForcegcInterval = mfi
}
SimpleEvaluateTaskConfig
}
): EvaluateTaskConfig =
DefaultEvaluateTaskConfig(
restrictions, checkCycles, progressReporter, cancelStrategy, forceGarbageCollection, minForcegcInterval
)
private[this] case class DefaultEvaluateTaskConfig(
restrictions: Seq[Tags.Rule],
checkCycles: Boolean,
progressReporter: ExecuteProgress[Task],
cancelStrategy: TaskCancellationStrategy,
forceGarbageCollection: Boolean,
minForcegcInterval: Duration
) extends EvaluateTaskConfig
}
final case class PluginData(dependencyClasspath: Seq[Attributed[File]], definitionClasspath: Seq[Attributed[File]], resolvers: Option[Seq[Resolver]], report: Option[UpdateReport], scalacOptions: Seq[String]) {
final case class PluginData(
dependencyClasspath: Seq[Attributed[File]],
definitionClasspath: Seq[Attributed[File]],
resolvers: Option[Seq[Resolver]],
report: Option[UpdateReport],
scalacOptions: Seq[String]
) {
val classpath: Seq[Attributed[File]] = definitionClasspath ++ dependencyClasspath
}
@ -162,32 +154,6 @@ object EvaluateTask {
val SystemProcessors = Runtime.getRuntime.availableProcessors
@deprecated("Use extractedTaskConfig.", "0.13.0")
def defaultConfig(state: State): EvaluateConfig =
{
val extracted = Project.extract(state)
extractedConfig(extracted, extracted.structure, state)
}
@deprecated("Use extractedTaskConfig.", "0.13.0")
def defaultConfig(extracted: Extracted, structure: BuildStructure) =
EvaluateConfig(false, restrictions(extracted, structure), progress = defaultProgress)
@deprecated("Use other extractedTaskConfig", "0.13.2")
def extractedConfig(extracted: Extracted, structure: BuildStructure): EvaluateConfig =
{
val workers = restrictions(extracted, structure)
val canCancel = cancelable(extracted, structure)
EvaluateConfig(cancelable = canCancel, restrictions = workers, progress = defaultProgress)
}
@deprecated("Use other extractedTaskConfig", "0.13.5")
def extractedConfig(extracted: Extracted, structure: BuildStructure, state: State): EvaluateConfig =
{
val workers = restrictions(extracted, structure)
val canCancel = cancelable(extracted, structure)
val progress = executeProgress(extracted, structure, state)
EvaluateConfig(cancelable = canCancel, restrictions = workers, progress = progress)
}
def extractedTaskConfig(extracted: Extracted, structure: BuildStructure, state: State): EvaluateTaskConfig =
{
val rs = restrictions(extracted, structure)
@ -207,15 +173,19 @@ object EvaluateTask {
val extracted = Project.extract(state)
restrictions(extracted, extracted.structure)
}
def restrictions(extracted: Extracted, structure: BuildStructure): Seq[Tags.Rule] =
getSetting(Keys.concurrentRestrictions, defaultRestrictions(extracted, structure), extracted, structure)
def maxWorkers(extracted: Extracted, structure: BuildStructure): Int =
if (getSetting(Keys.parallelExecution, true, extracted, structure))
SystemProcessors
else
1
def cancelable(extracted: Extracted, structure: BuildStructure): Boolean =
getSetting(Keys.cancelable, false, extracted, structure)
def cancelStrategy(extracted: Extracted, structure: BuildStructure, state: State): TaskCancellationStrategy =
getSetting(Keys.taskCancelStrategy, { (_: State) => TaskCancellationStrategy.Null }, extracted, structure)(state)

View File

@ -439,13 +439,6 @@ object Keys {
val state = Def.stateKey
val streamsManager = Def.streamsManagerKey
@deprecated("Implementation detail.", "0.13.1")
val isDummyTask = Def.isDummyTask
@deprecated("Implementation detail.", "0.13.1")
val dummyState = Def.dummyState
@deprecated("Implementation detail.", "0.13.2")
val dummyStreamsManager = Def.dummyStreamsManager
val stateStreams = AttributeKey[Streams]("streams-manager", "Streams manager, which provides streams for different contexts. Setting this on State will override the default Streams implementation.")
val resolvedScoped = Def.resolvedScoped
val pluginData = TaskKey[PluginData]("plugin-data", "Information from the plugin build needed in the main build definition.", DTask)
@ -464,11 +457,4 @@ object Keys {
type Streams = std.Streams[ScopedKey[_]]
type TaskStreams = std.TaskStreams[ScopedKey[_]]
@deprecated("Implementation detail.", "0.13.1")
def dummy[T: Manifest](name: String, description: String): (TaskKey[T], Task[T]) = Def.dummy(name, description)
@deprecated("Implementation detail.", "0.13.1")
def dummyTask[T](name: String): Task[T] = Def.dummyTask(name)
@deprecated("Implementation detail.", "0.13.1")
def isDummy(t: Task[_]): Boolean = Def.isDummy(t)
}

View File

@ -20,6 +20,7 @@ import sbt.internal.{
ProjectNavigation,
Script,
SessionSettings,
SetResult,
SettingCompletions,
LogManager,
DefaultBackgroundJobService
@ -323,7 +324,9 @@ object BuiltinCommands {
val result = Load.mkEval(classpath, s.baseDir, Nil).eval(arg, srcName = "<eval>", imports = new EvalImports(Nil, ""))
s.log.info(s"ans: ${result.tpe} = ${result.getValue(app.loader)}")
}
def sessionCommand = Command.make(SessionCommand, sessionBrief, SessionSettings.Help)(SessionSettings.command)
def sessionCommand: Command = Command.make(SessionCommand, sessionBrief, SessionSettings.Help)(SessionSettings.command)
def reapply(newSession: SessionSettings, structure: BuildStructure, s: State): State =
{
s.log.info("Reapplying settings...")
@ -333,7 +336,8 @@ object BuiltinCommands {
val newStructure = Load.reapply(withLogger.mergeSettings, structure)(Project.showContextKey(newSession, structure))
Project.setProject(newSession, newStructure, s)
}
def set = Command(SetCommand, setBrief, setDetailed)(setParser) {
def set: Command = Command(SetCommand, setBrief, setDetailed)(setParser) {
case (s, (all, arg)) =>
val extracted = Project extract s
import extracted._
@ -355,18 +359,16 @@ object BuiltinCommands {
s.log.debug(setResult.verboseSummary)
reapply(setResult.session, structure, s)
}
// @deprecated("Use SettingCompletions.setThis", "0.13.0")
def setThis(s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String) =
def setThis(s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String): SetResult =
SettingCompletions.setThis(s, extracted, settings, arg)
def inspect = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) {
def inspect: Command = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) {
case (s, (option, sk)) =>
s.log.info(Inspect.output(s, option, sk))
s
}
@deprecated("Use Inspect.output", "0.13.0")
def inspectOutput(s: State, option: Inspect.Mode, sk: Def.ScopedKey[_]): String = Inspect.output(s, option, sk)
def lastGrep = Command(LastGrepCommand, lastGrepBrief, lastGrepDetailed)(lastGrepParser) {
case (s, (pattern, Some(sks))) =>
val (str, _, display) = extractLast(s)
@ -388,18 +390,6 @@ object BuiltinCommands {
SettingCompletions.settingParser(structure.data, structure.index.keyMap, currentProject)
}
@deprecated("Use Inspect.parser", "0.13.0")
def inspectParser: State => Parser[(Inspect.Mode, Def.ScopedKey[_])] = Inspect.parser
@deprecated("Use Inspect.spacedModeParser", "0.13.0")
val spacedModeParser: State => Parser[Inspect.Mode] = Inspect.spacedModeParser
@deprecated("Use Inspect.allKeyParser", "0.13.0")
def allKeyParser(s: State): Parser[AttributeKey[_]] = Inspect.allKeyParser(s)
@deprecated("Use Inspect.spacedKeyParser", "0.13.0")
val spacedKeyParser: State => Parser[Def.ScopedKey[_]] = Inspect.spacedKeyParser
val spacedAggregatedParser = (s: State) => Act.requireSession(s, token(Space) ~> Act.aggregatedKeyParser(s))
val aggregatedKeyValueParser: State => Parser[Option[AnyKeys]] = (s: State) => spacedAggregatedParser(s).map(x => Act.keyValues(s)(x)).?
@ -532,12 +522,9 @@ object BuiltinCommands {
removeBase.map(toRemove => (xs: List[URI]) => xs.filterNot(toRemove.toSet))
}
def project = Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command)
def project: Command = Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command)
def loadFailed = Command(LoadFailed)(loadProjectParser)(doLoadFailed)
@deprecated("No longer used.", "0.13.2")
def handleLoadFailed(s: State): State = doLoadFailed(s, "")
def loadFailed: Command = Command(LoadFailed)(loadProjectParser)(doLoadFailed)
@tailrec
private[this] def doLoadFailed(s: State, loadArg: String): State =
@ -548,28 +535,33 @@ object BuiltinCommands {
def ignoreMsg = if (Project.isProjectLoaded(s)) "using previously loaded project" else "no project loaded"
result match {
case "" => retry
case _ if matches("retry") => retry
case _ if matches(Quit) => s.exit(ok = false)
case _ if matches("ignore") =>
s.log.warn(s"Ignoring load failure: $ignoreMsg."); s
case _ if matches("last") => LastCommand :: loadProjectCommand(LoadFailed, loadArg) :: s
case _ => println("Invalid response."); doLoadFailed(s, loadArg)
case "" => retry
case _ if matches("retry") => retry
case _ if matches(Quit) => s.exit(ok = false)
case _ if matches("ignore") => s.log.warn(s"Ignoring load failure: $ignoreMsg."); s
case _ if matches("last") => LastCommand :: loadProjectCommand(LoadFailed, loadArg) :: s
case _ => println("Invalid response."); doLoadFailed(s, loadArg)
}
}
def loadProjectCommands(arg: String) =
def loadProjectCommands(arg: String): List[String] =
StashOnFailure ::
(OnFailure + " " + loadProjectCommand(LoadFailed, arg)) ::
loadProjectCommand(LoadProjectImpl, arg) ::
PopOnFailure ::
State.FailureWall ::
Nil
def loadProject = Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser) { (s, arg) => loadProjectCommands(arg) ::: s }
def loadProject: Command =
Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser) { (s, arg) =>
loadProjectCommands(arg) ::: s
}
private[this] def loadProjectParser = (s: State) => matched(Project.loadActionParser)
private[this] def loadProjectCommand(command: String, arg: String): String = s"$command $arg".trim
def loadProjectImpl = Command(LoadProjectImpl)(_ => Project.loadActionParser)(doLoadProject)
def loadProjectImpl: Command = Command(LoadProjectImpl)(_ => Project.loadActionParser)(doLoadProject)
def doLoadProject(s0: State, action: LoadAction.Value): State =
{
val (s1, base) = Project.loadAction(SessionVar.clear(s0), action)
@ -590,6 +582,7 @@ object BuiltinCommands {
SessionSettings.checkSession(session, s)
Project.setProject(session, structure, s)
}
def registerCompilerCache(s: State): State =
{
val maxCompilers = System.getProperty("sbt.resident.limit")
@ -605,7 +598,7 @@ object BuiltinCommands {
s.put(Keys.stateCompilerCache, cache)
}
def server = Command.command(Server, Help.more(Server, ServerDetailed)) { s0 =>
def server: Command = Command.command(Server, Help.more(Server, ServerDetailed)) { s0 =>
import sbt.internal.{ ConsolePromptEvent, ConsoleUnpromptEvent }
val exchange = StandardMain.exchange
val s1 = exchange run s0

View File

@ -122,11 +122,5 @@ object MainLoop {
newState
}
@deprecated("Use State.handleError", "0.13.0")
def handleException(e: Throwable, s: State): State = s.handleError(e)
@deprecated("Use State.handleError", "0.13.0")
def handleException(t: Throwable, s: State, log: Logger): State = State.handleException(t, s, log)
def logFullException(e: Throwable, log: Logger): Unit = State.logFullException(e, log)
}

View File

@ -62,8 +62,6 @@ object DefaultOptions {
def addResolvers: Setting[_] = Keys.resolvers ++= { resolvers(Keys.isSnapshot.value) }
def addPluginResolvers: Setting[_] = Keys.resolvers ++= pluginResolvers(Keys.sbtPlugin.value, Keys.isSnapshot.value)
@deprecated("Use `credentials(State)` instead to make use of configuration path dynamically configured via `Keys.globalSettingsDirectory`; relying on ~/.ivy2 is not recommended anymore.", "0.12.0")
def credentials: Credentials = Credentials(userHome / ".ivy2" / ".credentials")
def credentials(state: State): Credentials = Credentials(getGlobalSettingsDirectory(state, getGlobalBase(state)) / ".credentials")
def addCredentials: Setting[_] = Keys.credentials += { credentials(Keys.state.value) }

View File

@ -69,10 +69,12 @@ sealed trait ProjectDefinition[PR <: ProjectReference] {
private[sbt] def autoPlugins: Seq[AutoPlugin]
override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode
override final def equals(o: Any) = o match {
case p: ProjectDefinition[_] => p.getClass == this.getClass && p.id == id && p.base == base
case _ => false
}
override def toString =
{
val agg = ifNonEmpty("aggregate", aggregate)
@ -82,8 +84,10 @@ sealed trait ProjectDefinition[PR <: ProjectReference] {
val fields = s"id $id" :: s"base: $base" :: agg ::: dep ::: conf ::: (s"plugins: List($plugins)" :: autos)
s"Project(${fields.mkString(", ")})"
}
private[this] def ifNonEmpty[T](label: String, ts: Iterable[T]): List[String] = if (ts.isEmpty) Nil else s"$label: $ts" :: Nil
}
sealed trait Project extends ProjectDefinition[ProjectReference] {
private[sbt] def settingsEval: Eval[Seq[Def.Setting[_]]]
private[sbt] def aggregateEval: Eval[Seq[ProjectReference]]
@ -119,6 +123,7 @@ sealed trait Project extends ProjectDefinition[ProjectReference] {
settingsEval,
configurations, plugins, autoPlugins, projectOrigin)
}
def resolveBuild(resolveRef: ProjectReference => ProjectReference): Project =
{
def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef
@ -224,6 +229,7 @@ sealed trait Project extends ProjectDefinition[ProjectReference] {
unresolved(id, base, aggregateEval = aggregateEval, dependenciesEval = dependenciesEval, delegatesEval = delegatesEval, settingsEval, configurations, plugins, autoPlugins, origin)
}
}
sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] {
/** The [[AutoPlugin]]s enabled for this project as computed from [[plugins]].*/
def autoPlugins: Seq[AutoPlugin]
@ -546,7 +552,6 @@ object Project extends ProjectExtra {
def reverseDependencies(cMap: Map[ScopedKey[_], Flattened], scoped: ScopedKey[_]): Iterable[ScopedKey[_]] =
for ((key, compiled) <- cMap; dep <- compiled.dependencies if dep == scoped) yield key
//@deprecated("Use SettingCompletions.setAll when available.", "0.13.0")
def setAll(extracted: Extracted, settings: Seq[Def.Setting[_]]): SessionSettings =
SettingCompletions.setAll(extracted, settings).session
@ -609,14 +614,19 @@ object Project extends ProjectExtra {
import SessionVar.{ persistAndSet, resolveContext, set, transform => tx }
def updateState(f: (State, S) => State): Def.Initialize[Task[S]] = i(t => tx(t, f))
def storeAs(key: TaskKey[S])(implicit f: JsonFormat[S]): Def.Initialize[Task[S]] = (Keys.resolvedScoped, i) { (scoped, task) =>
tx(task, (state, value) => persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f))
}
def storeAs(key: TaskKey[S])(implicit f: JsonFormat[S]): Def.Initialize[Task[S]] =
(Keys.resolvedScoped, i)((scoped, task) =>
tx(task, (state, value) => persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f))
)
def keepAs(key: TaskKey[S]): Def.Initialize[Task[S]] =
(i, Keys.resolvedScoped)((t, scoped) => tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value)))
(i, Keys.resolvedScoped)((t, scoped) =>
tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value))
)
}
import reflect.macros._
import scala.reflect.macros._
def projectMacroImpl(c: blackbox.Context): c.Expr[Project] =
{
@ -631,44 +641,62 @@ private[sbt] trait GeneratedRootProject
trait ProjectExtra0 {
implicit def wrapProjectReferenceSeqEval[T](rs: => Seq[T])(implicit ev: T => ProjectReference): Seq[Eval[ProjectReference]] =
rs map { r => Eval.later(r: ProjectReference) }
rs map (r => Eval.later(r: ProjectReference))
}
trait ProjectExtra extends ProjectExtra0 {
implicit def classpathDependencyEval[T](p: => T)(implicit ev: T => ClasspathDep[ProjectReference]): Eval[ClasspathDep[ProjectReference]] =
Eval.later(p: ClasspathDep[ProjectReference])
implicit def wrapProjectReferenceEval[T](ref: => T)(implicit ev: T => ProjectReference): Eval[ProjectReference] =
Eval.later(ref: ProjectReference)
implicit def wrapSettingDefinitionEval[T](d: => T)(implicit ev: T => Def.SettingsDefinition): Eval[Def.SettingsDefinition] = Eval.later(d)
implicit def wrapSettingSeqEval(ss: => Seq[Setting[_]]): Eval[Def.SettingsDefinition] = Eval.later(new Def.SettingList(ss))
implicit def wrapSettingDefinitionEval[T](d: => T)(implicit ev: T => Def.SettingsDefinition): Eval[Def.SettingsDefinition] =
Eval.later(d)
implicit def configDependencyConstructor[T](p: T)(implicit ev: T => ProjectReference): Constructor = new Constructor(p)
implicit def classpathDependency[T](p: T)(implicit ev: T => ProjectReference): ClasspathDep[ProjectReference] = new ClasspathDependency(p, None)
implicit def wrapSettingSeqEval(ss: => Seq[Setting[_]]): Eval[Def.SettingsDefinition] =
Eval.later(new Def.SettingList(ss))
implicit def configDependencyConstructor[T](p: T)(implicit ev: T => ProjectReference): Constructor =
new Constructor(p)
implicit def classpathDependency[T](p: T)(implicit ev: T => ProjectReference): ClasspathDep[ProjectReference] =
ClasspathDependency(p, None)
// These used to be in Project so that they didn't need to get imported (due to Initialize being nested in Project).
// Moving Initialize and other settings types to Def and decoupling Project, Def, and Structure means these go here for now
implicit def richInitializeTask[T](init: Initialize[Task[T]]): Scoped.RichInitializeTask[T] = new Scoped.RichInitializeTask(init)
implicit def richInitializeInputTask[T](init: Initialize[InputTask[T]]): Scoped.RichInitializeInputTask[T] = new Scoped.RichInitializeInputTask(init)
implicit def richInitializeTask[T](init: Initialize[Task[T]]): Scoped.RichInitializeTask[T] =
new Scoped.RichInitializeTask(init)
implicit def richInitializeInputTask[T](init: Initialize[InputTask[T]]): Scoped.RichInitializeInputTask[T] =
new Scoped.RichInitializeInputTask(init)
implicit def richInitialize[T](i: Initialize[T]): Scoped.RichInitialize[T] = new Scoped.RichInitialize[T](i)
implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] = new Project.RichTaskSessionVar(init)
implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] =
new Project.RichTaskSessionVar(init)
def inThisBuild(ss: Seq[Setting[_]]): Seq[Setting[_]] =
inScope(ThisScope.copy(project = Select(ThisBuild)))(ss)
def inConfig(conf: Configuration)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
inScope(ThisScope.copy(config = Select(conf)))((configuration :== conf) +: ss)
def inTask(t: Scoped)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
inScope(ThisScope.copy(task = Select(t.key)))(ss)
def inScope(scope: Scope)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
Project.transform(Scope.replaceThis(scope), ss)
private[sbt] def inThisBuild[T](i: Initialize[T]): Initialize[T] =
inScope(ThisScope.copy(project = Select(ThisBuild)), i)
private[sbt] def inConfig[T](conf: Configuration, i: Initialize[T]): Initialize[T] =
inScope(ThisScope.copy(config = Select(conf)), i)
private[sbt] def inTask[T](t: Scoped, i: Initialize[T]): Initialize[T] =
inScope(ThisScope.copy(task = Select(t.key)), i)
private[sbt] def inScope[T](scope: Scope, i: Initialize[T]): Initialize[T] =
i mapReferenced Project.mapScope(Scope.replaceThis(scope))

View File

@ -280,9 +280,6 @@ object Act {
(MultiTaskCommand ^^^ MultiAction)) <~ Space
) ?? SingleAction
@deprecated("No longer used.", "0.13.2")
def showParser = token((ShowCommand ~ Space) ^^^ true) ?? false
def scopedKeyParser(state: State): Parser[ScopedKey[_]] = scopedKeyParser(Project extract state)
def scopedKeyParser(extracted: Extracted): Parser[ScopedKey[_]] = scopedKeyParser(extracted.structure, extracted.currentRef)
def scopedKeyParser(structure: BuildStructure, currentRef: ProjectRef): Parser[ScopedKey[_]] =

View File

@ -7,9 +7,8 @@ package internal
import Def.ScopedKey
import Keys.{ showSuccess, showTiming, timingFormat }
import sbt.internal.util.complete.Parser
import sbt.internal.util.{ Dag, HList, Relation, Settings, Util }
import sbt.internal.util.{ Dag, HList, Settings, Util }
import sbt.util.{ Logger, Show }
import java.net.URI
import Parser.{ seq, failure, success }
import std.Transform.DummyTaskMap
@ -194,8 +193,4 @@ object Aggregation {
def aggregationEnabled(key: ScopedKey[_], data: Settings[Scope]): Boolean =
Keys.aggregate in Scope.fillTaskAxis(key.scope, key.key) get data getOrElse true
@deprecated("Use BuildUtil.aggregationRelation", "0.13.0")
def relation(units: Map[URI, LoadedBuildUnit]): Relation[ProjectRef, ProjectRef] =
BuildUtil.aggregationRelation(units)
}

View File

@ -147,10 +147,6 @@ final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin], val builds
else None
}.partition(nonTopLevelPlugin)
/** A function to select the right [[AutoPlugin]]s from [[autoPlugins]] for a [[Project]]. */
@deprecated("Use deducePluginsFromProject", "0.13.8")
lazy val deducePlugins: (Plugins, Logger) => Seq[AutoPlugin] = Plugins.deducer(autoPlugins.toList map { _.value })
/** Selects the right [[AutoPlugin]]s from a [[Project]]. */
def deducePluginsFromProject(p: Project, log: Logger): Seq[AutoPlugin] =
{
@ -166,7 +162,6 @@ final class DetectedPlugins(val autoPlugins: Seq[DetectedAutoPlugin], val builds
private[this] def autoImports(pluginNames: Seq[String]) = pluginNames.map(_ + ".autoImport")
private[this] def nonTopLevelPlugin(name: String) = name.contains('.')
}
/**
@ -181,6 +176,7 @@ final class LoadedPlugins(val base: File, val pluginData: PluginData, val loader
def fullClasspath: Seq[Attributed[File]] = pluginData.classpath
def classpath = data(fullClasspath)
}
/**
* The loaded, but unresolved build unit.
* @param uri The uniquely identifying URI for the build.

View File

@ -51,9 +51,9 @@ object BuildUtil {
def dependencies(units: Map[URI, LoadedBuildUnit]): BuildDependencies =
{
import collection.mutable.HashMap
val agg = new HashMap[ProjectRef, Seq[ProjectRef]]
val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]]
import scala.collection.mutable
val agg = new mutable.HashMap[ProjectRef, Seq[ProjectRef]]
val cp = new mutable.HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]]
for (lbu <- units.values; rp <- lbu.defined.values) {
val ref = ProjectRef(lbu.unit.uri, rp.id)
cp(ref) = rp.dependencies
@ -76,14 +76,12 @@ object BuildUtil {
def getImports(unit: BuildUnit): Seq[String] = unit.plugins.detected.imports ++ unit.definitions.dslDefinitions.imports
@deprecated("Use getImports(Seq[String]).", "0.13.2")
def getImports(pluginNames: Seq[String], buildNames: Seq[String]): Seq[String] = getImports(pluginNames ++ buildNames)
/** `import sbt._, Keys._`, and wildcard import `._` for all names. */
def getImports(names: Seq[String]): Seq[String] = baseImports ++ importAllRoot(names)
/** Import just the names. */
def importNames(names: Seq[String]): Seq[String] = if (names.isEmpty) Nil else names.mkString("import ", ", ", "") :: Nil
/** Prepend `_root_` and import just the names. */
def importNamesRoot(names: Seq[String]): Seq[String] = importNames(names map rootedName)
@ -96,7 +94,7 @@ object BuildUtil {
{
val depPairs =
for {
(uri, unit) <- units.toIterable
(uri, unit) <- units.toIterable // don't lose this toIterable, doing so breaks actions/cross-multiproject & actions/update-state-fail
project <- unit.defined.values
ref = ProjectRef(uri, project.id)
agg <- project.aggregate

View File

@ -24,11 +24,6 @@ object CommandStrings {
Evaluates the given Scala expression and prints the result and type."""
@deprecated("Misnomer: was only for `show`. Use showBrief.", "0.13.2")
def actBrief = showBrief
@deprecated("Misnomer: was only for `show`. Use showDetailed.", "0.13.2")
def actDetailed = showDetailed
def actHelp = showHelp ++ multiTaskHelp
def multiTaskHelp = Help(MultiTaskCommand, (multiTaskSyntax, multiTaskBrief), multiTaskDetailed)

View File

@ -23,8 +23,6 @@ import sbt.io.IO
* 1. Parsing high-level constructs (definitions, settings, imports)
* 2. Compiling scala code into local .class files
* 3. Evaluating the expressions and obtaining in-memory objects of the results (Setting[_] instances, or val references).
*
*
*/
private[sbt] object EvaluateConfigurations {
@ -46,7 +44,6 @@ private[sbt] object EvaluateConfigurations {
* return a parsed, compiled + evaluated [[LoadedSbtFile]]. The result has
* raw sbt-types that can be accessed and used.
*/
@deprecated("We no longer merge build.sbt files together unless they are in the same directory.", "0.13.6")
def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] =
{
val loadFiles = srcs.sortBy(_.getName) map { src => evaluateSbtFile(eval, src, IO.readLines(src), imports, 0) }
@ -142,13 +139,13 @@ private[sbt] object EvaluateConfigurations {
new LoadedSbtFile(settings, projects, importDefs, manipulations, definitions, allGeneratedFiles)
}
}
/** move a project to be relative to this file after we've evaluated it. */
private[this] def resolveBase(f: File, p: Project) = p.copy(base = IO.resolve(f, p.base))
@deprecated("Will no longer be public.", "0.13.6")
def flatten(mksettings: Seq[ClassLoader => Seq[Setting[_]]]): ClassLoader => Seq[Setting[_]] =
loader => mksettings.flatMap(_ apply loader)
def addOffset(offset: Int, lines: Seq[(String, Int)]): Seq[(String, Int)] =
lines.map { case (s, i) => (s, i + offset) }
def addOffsetToRange(offset: Int, ranges: Seq[(String, LineRange)]): Seq[(String, LineRange)] =
ranges.map { case (s, r) => (s, r shift offset) }
@ -203,13 +200,12 @@ private[sbt] object EvaluateConfigurations {
* @return A method that given an sbt classloader, can return the actual Seq[Setting[_]] defined by
* the expression.
*/
@deprecated("Build DSL now includes non-Setting[_] type settings.", "0.13.6") // Note: This method is used by the SET command, so we may want to evaluate that sucker a bit.
// Build DSL now includes non-Setting[_] type settings.
// Note: This method is used by the SET command, so we may want to evaluate that sucker a bit.
def evaluateSetting(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): LazyClassLoaded[Seq[Setting[_]]] =
{
evaluateDslEntry(eval, name, imports, expression, range).result andThen {
case DslEntry.ProjectSettings(values) => values
case _ => Nil
}
evaluateDslEntry(eval, name, imports, expression, range).result andThen {
case DslEntry.ProjectSettings(values) => values
case _ => Nil
}
/**
@ -226,19 +222,23 @@ private[sbt] object EvaluateConfigurations {
private[this] def splitSettingsDefinitions(lines: Seq[(String, LineRange)]): (Seq[(String, LineRange)], Seq[(String, LineRange)]) =
lines partition { case (line, range) => isDefinition(line) }
private[this] def isDefinition(line: String): Boolean =
{
val trimmed = line.trim
DefinitionKeywords.exists(trimmed startsWith _)
}
private[this] def extractedValTypes: Seq[String] =
Seq(classOf[Project], classOf[InputKey[_]], classOf[TaskKey[_]], classOf[SettingKey[_]]).map(_.getName)
private[this] def evaluateDefinitions(eval: Eval, name: String, imports: Seq[(String, Int)], definitions: Seq[(String, LineRange)], file: Option[File]): compiler.EvalDefinitions =
{
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(convertedRanges, new EvalImports(imports, name), name, file, extractedValTypes)
}
}
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] =
{
@ -247,10 +247,13 @@ object Index {
val pairs = for (scope <- data.scopes; AttributeEntry(key, value: Task[_]) <- data.data(scope).entries) yield (value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]])) // unclear why this cast is needed even with a type test in the above filter
pairs.toMap[Task[_], ScopedKey[Task[_]]]
}
def allKeys(settings: Seq[Setting[_]]): Set[ScopedKey[_]] =
settings.flatMap(s => if (s.key.key.isLocal) Nil else s.key +: s.dependencies).filter(!_.key.isLocal).toSet
def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[_]] =
settings.data.values.flatMap(_.keys).toSet[AttributeKey[_]]
def stringToKeyMap(settings: Set[AttributeKey[_]]): Map[String, AttributeKey[_]] =
stringToKeyMap0(settings)(_.label)
@ -263,7 +266,9 @@ object Index {
else
sys.error(duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", ""))
}
private[this]type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]
def triggers(ss: Settings[Scope]): Triggers[Task] =
{
val runBefore = new TriggerMap
@ -276,6 +281,7 @@ object Index {
val onComplete = Keys.onComplete in GlobalScope get ss getOrElse { () => () }
new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
}
private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit =
for (tasks <- tasksOpt; task <- tasks)
map(task) = base +: map.getOrElse(task, Nil)

View File

@ -8,11 +8,11 @@ import sbt.librarymanagement.{ Configuration, Configurations, Resolver, UpdateOp
import sbt.internal.librarymanagement.{ DefaultFileToStore, InlineIvyConfiguration, IvyPaths }
import java.io.File
import java.net.{ URI, URL }
import java.net.URI
import compiler.Eval
import scala.annotation.tailrec
import collection.mutable
import sbt.internal.inc.{ Analysis, ClasspathOptionsUtil, ModuleUtilities }
import sbt.internal.inc.ClasspathOptionsUtil
import sbt.internal.inc.classpath.ClasspathUtilities
import Project.inScope
import Def.{ isDummy, ScopedKey, ScopeLocal, Setting }
@ -83,14 +83,17 @@ private[sbt] object Load {
LoadBuildConfiguration(stagingDirectory, classpath, loader, compilers, evalPluginDef, delegates,
EvaluateTask.injectStreams, pluginMgmt, inject, None, Nil, log)
}
private def bootIvyHome(app: xsbti.AppConfiguration): Option[File] =
try { Option(app.provider.scalaProvider.launcher.ivyHome) }
catch { case _: NoSuchMethodError => None }
def injectGlobal(state: State): Seq[Setting[_]] =
(appConfiguration in GlobalScope :== state.configuration) +:
LogManager.settingsLogger(state) +:
DefaultBackgroundJobService.backgroundJobServiceSetting +:
EvaluateTask.injectSettings
def defaultWithGlobal(state: State, base: File, rawConfig: LoadBuildConfiguration, globalBase: File, log: Logger): LoadBuildConfiguration =
{
val globalPluginsDir = getGlobalPluginsDirectory(state, globalBase)
@ -105,6 +108,7 @@ private[sbt] object Load {
if (files.isEmpty || base == globalBase) const(Nil) else buildGlobalSettings(globalBase, files, config)
config.copy(injectSettings = config.injectSettings.copy(projectLoaded = compiled))
}
def buildGlobalSettings(base: File, files: Seq[File], config: LoadBuildConfiguration): ClassLoader => Seq[Setting[_]] =
{
val eval = mkEval(data(config.globalPluginClasspath), base, defaultEvalOptions)
@ -141,11 +145,13 @@ private[sbt] object Load {
(project, extra) => Nil
)
}
def configInherit(lb: LoadedBuild, ref: ResolvedReference, config: ConfigKey, rootProject: URI => String): Seq[ConfigKey] =
ref match {
case pr: ProjectRef => configInheritRef(lb, pr, config)
case BuildRef(uri) => configInheritRef(lb, ProjectRef(uri, rootProject(uri)), config)
}
def configInheritRef(lb: LoadedBuild, ref: ProjectRef, config: ConfigKey): Seq[ConfigKey] =
configurationOpt(lb.units, ref.build, ref.project, config).toList.flatMap(_.extendsConfigs).map(c => ConfigKey(c.name))
@ -220,6 +226,7 @@ private[sbt] object Load {
}
ss.map(s => s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining)
}
def setDefinitionKey[T](tk: Task[T], key: ScopedKey[_]): Task[T] =
if (isDummy(tk)) tk else Task(tk.info.set(Keys.taskDefinitionKey, key), tk.work)
@ -245,6 +252,7 @@ private[sbt] object Load {
}
def isProjectThis(s: Setting[_]) = s.key.scope.project match { case This | Select(ThisProject) => true; case _ => false }
def buildConfigurations(loaded: LoadedBuild, rootProject: URI => String, injectSettings: InjectSettings): Seq[Setting[_]] =
{
((loadedBuild in GlobalScope :== loaded) +:
@ -271,8 +279,10 @@ private[sbt] object Load {
def transformProjectOnly(uri: URI, rootProject: URI => String, settings: Seq[Setting[_]]): Seq[Setting[_]] =
Project.transform(Scope.resolveProject(uri, rootProject), settings)
def transformSettings(thisScope: Scope, uri: URI, rootProject: URI => String, settings: Seq[Setting[_]]): Seq[Setting[_]] =
Project.transform(Scope.resolveScope(thisScope, uri, rootProject), settings)
def projectScope(project: Reference): Scope = Scope(Select(project), Global, Global, Global)
def lazyEval(unit: BuildUnit): () => Eval =
@ -280,9 +290,12 @@ private[sbt] object Load {
lazy val eval = mkEval(unit)
() => eval
}
def mkEval(unit: BuildUnit): Eval = mkEval(unit.definitions, unit.plugins, unit.plugins.pluginData.scalacOptions)
def mkEval(defs: LoadedDefinitions, plugs: LoadedPlugins, options: Seq[String]): Eval =
mkEval(defs.target ++ plugs.classpath, defs.base, options)
def mkEval(classpath: Seq[File], base: File, options: Seq[String]): Eval =
new Eval(options, classpath, s => new ConsoleReporter(s), Some(evalOutputDirectory(base)))
@ -309,13 +322,9 @@ private[sbt] object Load {
}
}
@deprecated("This method is no longer used", "0.13.6")
def configurations(srcs: Seq[File], eval: () => Eval, imports: Seq[String]): ClassLoader => LoadedSbtFile =
if (srcs.isEmpty) const(LoadedSbtFile.empty)
else EvaluateConfigurations(eval(), srcs, imports)
def load(file: File, s: State, config: LoadBuildConfiguration): PartBuild =
load(file, builtinLoader(s, config.copy(pluginManagement = config.pluginManagement.shift, extraBuilds = Nil)), config.extraBuilds.toList)
def builtinLoader(s: State, config: LoadBuildConfiguration): BuildLoader =
{
val fail = (uri: URI) => sys.error("Invalid build URI (no handler available): " + uri)
@ -325,7 +334,9 @@ private[sbt] object Load {
val components = BuildLoader.components(resolver, build, full = BuildLoader.componentLoader)
BuildLoader(components, fail, s, config)
}
def load(file: File, loaders: BuildLoader, extra: List[URI]): PartBuild = loadURI(IO.directoryURI(file), loaders, extra)
def loadURI(uri: URI, loaders: BuildLoader, extra: List[URI]): PartBuild =
{
IO.assertAbsolute(uri)
@ -334,6 +345,7 @@ private[sbt] object Load {
val build = new PartBuild(uri, map)
newLoaders transformAll build
}
def addOverrides(unit: BuildUnit, loaders: BuildLoader): BuildLoader =
loaders updatePluginManagement PluginManagement.extractOverrides(unit.plugins.fullClasspath)
@ -363,6 +375,7 @@ private[sbt] object Load {
val rootProjects = if (projectsInRoot.isEmpty) firstDefined :: Nil else projectsInRoot
(new PartBuildUnit(unit, defined.map(d => (d.id, d)).toMap, rootProjects.map(_.id), buildSettings(unit)), externals)
}
def buildSettings(unit: BuildUnit): Seq[Setting[_]] =
{
val buildScope = GlobalScope.copy(project = Select(BuildRef(unit.uri)))
@ -385,11 +398,14 @@ private[sbt] object Load {
}
case Nil => (references, builds, loaders)
}
def checkProjectBase(buildBase: File, projectBase: File): Unit = {
checkDirectory(projectBase)
assert(buildBase == projectBase || IO.relativize(buildBase, projectBase).isDefined, "Directory " + projectBase + " is not contained in build root " + buildBase)
}
def checkBuildBase(base: File) = checkDirectory(base)
def checkDirectory(base: File): Unit = {
assert(base.isAbsolute, "Not absolute: " + base)
if (base.isFile)
@ -397,6 +413,7 @@ private[sbt] object Load {
else if (!base.exists)
IO createDirectory base
}
def resolveAll(builds: Map[URI, PartBuildUnit]): Map[URI, LoadedBuildUnit] =
{
val rootProject = getRootProject(builds)
@ -405,6 +422,7 @@ private[sbt] object Load {
(uri, unit.resolveRefs(ref => Scope.resolveProjectRef(uri, rootProject, ref)))
}
}
def checkAll(referenced: Map[URI, List[ProjectReference]], builds: Map[URI, PartBuildUnit]): Unit = {
val rootProject = getRootProject(builds)
for ((uri, refs) <- referenced; ref <- refs) {
@ -427,6 +445,7 @@ private[sbt] object Load {
}
p => p.copy(base = resolve(p.base))
}
def resolveProjects(loaded: PartBuild): LoadedBuild =
{
val rootProject = getRootProject(loaded.units)
@ -437,12 +456,14 @@ private[sbt] object Load {
}
new LoadedBuild(loaded.root, units)
}
def resolveProjects(uri: URI, unit: PartBuildUnit, rootProject: URI => String): LoadedBuildUnit =
{
IO.assertAbsolute(uri)
val resolve = (_: Project).resolve(ref => Scope.resolveProjectRef(uri, rootProject, ref))
new LoadedBuildUnit(unit.unit, unit.defined mapValues resolve, unit.rootProjects, unit.buildSettings)
}
def projects(unit: BuildUnit): Seq[Project] =
{
// we don't have the complete build graph loaded, so we don't have the rootProject function yet.
@ -451,22 +472,28 @@ private[sbt] object Load {
// although the default loader will resolve the project base directory, other loaders may not, so run resolveBase here as well
unit.definitions.projects.map(resolveBuild compose resolveBase(unit.localBase))
}
def getRootProject(map: Map[URI, BuildUnitBase]): URI => String =
uri => getBuild(map, uri).rootProjects.headOption getOrElse emptyBuild(uri)
def getConfiguration(map: Map[URI, LoadedBuildUnit], uri: URI, id: String, conf: ConfigKey): Configuration =
configurationOpt(map, uri, id, conf) getOrElse noConfiguration(uri, id, conf.name)
def configurationOpt(map: Map[URI, LoadedBuildUnit], uri: URI, id: String, conf: ConfigKey): Option[Configuration] =
getProject(map, uri, id).configurations.find(_.name == conf.name)
def getProject(map: Map[URI, LoadedBuildUnit], uri: URI, id: String): ResolvedProject =
getBuild(map, uri).defined.getOrElse(id, noProject(uri, id))
def getBuild[T](map: Map[URI, T], uri: URI): T =
map.getOrElse(uri, noBuild(uri))
def emptyBuild(uri: URI) = sys.error(s"No root project defined for build unit '$uri'")
def noBuild(uri: URI) = sys.error(s"Build unit '$uri' not defined.")
def noProject(uri: URI, id: String) = sys.error(s"No project '$id' defined in '$uri'.")
def noConfiguration(uri: URI, id: String, conf: String) = sys.error(s"No configuration '$conf' defined in project '$id' in '$uri'")
def noConfiguration(uri: URI, id: String, conf: String) =
sys.error(s"No configuration '$conf' defined in project '$id' in '$uri'")
// Called from builtinLoader
def loadUnit(uri: URI, localBase: File, s: State, config: LoadBuildConfiguration): BuildUnit =
@ -551,6 +578,7 @@ private[sbt] object Load {
// Lame hackery to keep track of our state.
private[this] case class LoadedProjects(projects: Seq[Project], generatedConfigClassFiles: Seq[File])
/**
* Loads a new set of projects, including any transitively defined projects underneath this one.
*
@ -808,6 +836,7 @@ private[sbt] object Load {
case Some(cp) => cp.data.fullClasspath
case None => Nil
}
/** These are the settings defined when loading a project "meta" build. */
val autoPluginSettings: Seq[Setting[_]] = inScope(GlobalScope in LocalRootProject)(Seq(
sbtPlugin :== true,
@ -819,6 +848,7 @@ private[sbt] object Load {
},
onLoadMessage := ("Loading project definition from " + baseDirectory.value)
))
private[this] def removeEntries(cp: Seq[Attributed[File]], remove: Seq[Attributed[File]]): Seq[Attributed[File]] =
{
val files = data(remove).toSet
@ -830,11 +860,13 @@ private[sbt] object Load {
global = autoPluginSettings ++ config.injectSettings.global,
project = config.pluginManagement.inject ++ config.injectSettings.project
))
def activateGlobalPlugin(config: LoadBuildConfiguration): LoadBuildConfiguration =
config.globalPlugin match {
case Some(gp) => config.copy(injectSettings = config.injectSettings.copy(project = gp.inject))
case None => config
}
def plugins(dir: File, s: State, config: LoadBuildConfiguration): LoadedPlugins =
if (hasDefinition(dir))
buildPlugins(dir, s, enableSbtPlugin(activateGlobalPlugin(config)))
@ -846,8 +878,10 @@ private[sbt] object Load {
import sbt.io.syntax._
(dir * -GlobFilter(DefaultTargetName)).get.nonEmpty
}
def noPlugins(dir: File, config: LoadBuildConfiguration): LoadedPlugins =
loadPluginDefinition(dir, config, PluginData(config.globalPluginClasspath, Nil, None, None, Nil))
def buildPlugins(dir: File, s: State, config: LoadBuildConfiguration): LoadedPlugins =
loadPluginDefinition(dir, config, buildPluginDefinition(dir, s, config))
@ -856,10 +890,13 @@ private[sbt] object Load {
val (definitionClasspath, pluginLoader) = pluginDefinitionLoader(config, pluginData)
loadPlugins(dir, pluginData.copy(dependencyClasspath = definitionClasspath), pluginLoader)
}
def pluginDefinitionLoader(config: LoadBuildConfiguration, dependencyClasspath: Seq[Attributed[File]]): (Seq[Attributed[File]], ClassLoader) =
pluginDefinitionLoader(config, dependencyClasspath, Nil)
def pluginDefinitionLoader(config: LoadBuildConfiguration, pluginData: PluginData): (Seq[Attributed[File]], ClassLoader) =
pluginDefinitionLoader(config, pluginData.dependencyClasspath, pluginData.definitionClasspath)
def pluginDefinitionLoader(config: LoadBuildConfiguration, depcp: Seq[Attributed[File]], defcp: Seq[Attributed[File]]): (Seq[Attributed[File]], ClassLoader) =
{
val definitionClasspath =
@ -881,6 +918,7 @@ private[sbt] object Load {
}
(definitionClasspath, pluginLoader)
}
def buildPluginDefinition(dir: File, s: State, config: LoadBuildConfiguration): PluginData =
{
val (eval, pluginDef) = apply(dir, s, config)
@ -888,28 +926,9 @@ private[sbt] object Load {
config.evalPluginDef(Project.structure(pluginState), pluginState)
}
@deprecated("Use ModuleUtilities.getCheckedObjects[BuildDef].", "0.13.2")
def loadDefinitions(loader: ClassLoader, defs: Seq[String]): Seq[BuildDef] =
defs map { definition => loadDefinition(loader, definition) }
@deprecated("Use ModuleUtilities.getCheckedObject[BuildDef].", "0.13.2")
def loadDefinition(loader: ClassLoader, definition: String): BuildDef =
ModuleUtilities.getObject(definition, loader).asInstanceOf[BuildDef]
def loadPlugins(dir: File, data: PluginData, loader: ClassLoader): LoadedPlugins =
new LoadedPlugins(dir, data, loader, PluginDiscovery.discoverAll(data, loader))
@deprecated("Use PluginDiscovery.onClasspath", "0.13.2")
def onClasspath(classpath: Seq[File])(url: URL): Boolean =
PluginDiscovery.onClasspath(classpath)(url)
@deprecated("No longer used.", "0.13.2")
def findDefinitions(analysis: Analysis): Seq[String] = discover(analysis, "sbt.internal.BuildDef")
@deprecated("Use PluginDiscovery.sourceModuleNames", "0.13.2")
def discover(analysis: Analysis, subclasses: String*): Seq[String] =
PluginDiscovery.sourceModuleNames(analysis, subclasses: _*)
def initialSession(structure: BuildStructure, rootEval: () => Eval, s: State): SessionSettings = {
val session = s get Keys.sessionSettings
val currentProject = session map (_.currentProject) getOrElse Map.empty
@ -934,22 +953,10 @@ private[sbt] object Load {
def defaultEvalOptions: Seq[String] = Nil
@deprecated("Use BuildUtil.baseImports", "0.13.0")
def baseImports = BuildUtil.baseImports
@deprecated("Use BuildUtil.checkCycles", "0.13.0")
def checkCycles(units: Map[URI, LoadedBuildUnit]): Unit = BuildUtil.checkCycles(units)
@deprecated("Use BuildUtil.importAll", "0.13.0")
def importAll(values: Seq[String]): Seq[String] = BuildUtil.importAll(values)
@deprecated("Use BuildUtil.importAllRoot", "0.13.0")
def importAllRoot(values: Seq[String]): Seq[String] = BuildUtil.importAllRoot(values)
@deprecated("Use BuildUtil.rootedNames", "0.13.0")
def rootedName(s: String): String = BuildUtil.rootedName(s)
@deprecated("Use BuildUtil.getImports", "0.13.0")
def getImports(unit: BuildUnit): Seq[String] = BuildUtil.getImports(unit)
def referenced[PR <: ProjectReference](definitions: Seq[ProjectDefinition[PR]]): Seq[PR] = definitions flatMap { _.referenced }
final class EvaluatedConfigurations(val eval: Eval, val settings: Seq[Setting[_]])
final case class InjectSettings(global: Seq[Setting[_]], project: Seq[Setting[_]], projectLoaded: ClassLoader => Seq[Setting[_]]) {
import java.net.URLClassLoader
private val cache: mutable.Map[String, Seq[Setting[_]]] = mutable.Map.empty
@ -969,9 +976,6 @@ private[sbt] object Load {
}
}
@deprecated("Use BuildUtil.apply", "0.13.0")
def buildUtil(root: URI, units: Map[URI, LoadedBuildUnit], keyIndex: KeyIndex, data: Settings[Scope]): BuildUtil[ResolvedProject] = BuildUtil(root, units, keyIndex, data)
/** Debugging method to time how long it takes to run various compilation tasks. */
private[sbt] def timed[T](label: String, log: Logger)(t: => T): T = {
val start = System.nanoTime
@ -1009,6 +1013,7 @@ final case class LoadBuildConfiguration(
}
Load.loadPluginDefinition(baseDir, this, pluginData)
}
lazy val detectedGlobalPlugins = globalPluginDefs.detected
}

View File

@ -20,10 +20,6 @@ import sbt.io.IO
object Output {
final val DefaultTail = "> "
@deprecated("Explicitly provide None for the stream ID.", "0.13.0")
def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit)(implicit display: Show[ScopedKey[_]]): Unit =
last(keys, streams, printLines, None)(display)
def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit, sid: Option[String])(implicit display: Show[ScopedKey[_]]): Unit =
printLines(flatLines(lastLines(keys, streams, sid))(idFun))
@ -57,9 +53,6 @@ object Output {
outputs.filterNot(_.value.isEmpty)
}
@deprecated("Explicitly provide None for the stream ID.", "0.13.0")
def lastLines(key: ScopedKey[_], mgr: Streams): Seq[String] = lastLines(key, mgr, None)
def lastLines(key: ScopedKey[_], mgr: Streams, sid: Option[String]): Seq[String] =
mgr.use(key) { s =>
// Workaround for #1155 - Keys.streams are always scoped by the task they're included in

View File

@ -20,7 +20,15 @@ import scala.sys.process.Process
* @param connectInput If true, the standard input of the forked process is connected to the standard input of this process. Otherwise, it is connected to an empty input stream. Connecting input streams can be problematic, especially on versions before Java 7.
* @param envVars The environment variables to provide to the forked process. By default, none are provided.
*/
final case class ForkOptions(javaHome: Option[File] = None, outputStrategy: Option[OutputStrategy] = None, bootJars: Seq[File] = Nil, workingDirectory: Option[File] = None, runJVMOptions: Seq[String] = Nil, connectInput: Boolean = false, envVars: Map[String, String] = Map.empty)
final case class ForkOptions(
javaHome: Option[File] = None,
outputStrategy: Option[OutputStrategy] = None,
bootJars: Seq[File] = Nil,
workingDirectory: Option[File] = None,
runJVMOptions: Seq[String] = Nil,
connectInput: Boolean = false,
envVars: Map[String, String] = Map.empty
)
/** Configures where the standard output and error streams from a forked process go.*/
sealed abstract class OutputStrategy

View File

@ -10,11 +10,11 @@ scalaVersion in update := {
}
}
InputKey[Unit]("check") := (inputTask { argsT =>
(argsT, scalaVersion in ThisBuild, scalaVersion, scalaVersion in update) map { (args, svTB, svP, svU) =>
def check(label: String, i: Int, actual: String) = assert(args(i) == actual, "Expected " + label + "='" + args(i) + "' got '" + actual + "'")
check("scalaVersion in ThisBuild", 0, svTB)
check("scalaVersion", 1, svP)
check("scalaVersion in update", 2, svU)
}
}).evaluated
InputKey[Unit]("check") := {
val args = Def.spaceDelimited().parsed
def check(label: String, i: Int, actual: String) =
assert(args(i) == actual, s"Expected $label='${args(i)}' got '$actual'")
check("scalaVersion in ThisBuild", 0, scalaVersion in ThisBuild value)
check("scalaVersion", 1, scalaVersion.value)
check("scalaVersion in update", 2, scalaVersion in update value)
}

View File

@ -27,12 +27,13 @@ lazy val root = (project in file(".")).
def demoState(s: State, i: Int): State = s put (sample, i + 1)
def checkInit: Initialize[InputTask[Unit]] = InputTask( (_: State) => token(Space ~> IntBasic) ~ token(Space ~> IntBasic).?) { key =>
(key, updateDemo, state) map { case ((curExpected, prevExpected), value, s) =>
val prev = s get sample
assert(value == curExpected, "Expected current value to be " + curExpected + ", got " + value)
assert(prev == prevExpected, "Expected previous value to be " + prevExpected + ", got " + prev)
}
def checkInit: Initialize[InputTask[Unit]] = Def inputTask {
val key = (token(Space ~> IntBasic) ~ token(Space ~> IntBasic).?).parsed
val (curExpected, prevExpected) = key
val value = updateDemo.value
val prev = state.value get sample
assert(value == curExpected, s"Expected current value to be $curExpected, got $value")
assert(prev == prevExpected, s"Expected previous value to be $prevExpected, got $prev")
}
def inMemorySetting = keep := (getPrevious(keep) map { case None => 3; case Some(x) => x + 1} keepAs(keep)).value

View File

@ -1,12 +1,12 @@
import sbt.internal.inc.Analysis
// checks number of compilation iterations performed since last `clean` run
InputKey[Unit]("check-number-of-compiler-iterations") := (inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
val allCompilationsSize = a.compilations.allCompilations.size
assert(allCompilationsSize == expectedIterationsNumber,
"allCompilationsSize == %d (expected %d)".format(allCompilationsSize, expectedIterationsNumber))
}
}).evaluated
InputKey[Unit]("check-number-of-compiler-iterations") := {
val args = Def.spaceDelimited().parsed
val a = (compile in Compile).value.asInstanceOf[Analysis]
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
val allCompilationsSize = a.compilations.allCompilations.size
assert(allCompilationsSize == expectedIterationsNumber,
"allCompilationsSize == %d (expected %d)".format(allCompilationsSize, expectedIterationsNumber))
}

View File

@ -3,10 +3,13 @@ import sbt.internal.inc.Analysis
logLevel := Level.Debug
// dumps analysis into target/analysis-dump.txt file
InputKey[Unit]("check-number-of-compiler-iterations") := (inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber))
}
}).evaluated
InputKey[Unit]("check-number-of-compiler-iterations") := {
val args = Def.spaceDelimited().parsed
val a = (compile in Compile).value.asInstanceOf[Analysis]
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(
a.compilations.allCompilations.size == expectedIterationsNumber,
"a.compilations.allCompilations.size = %d (expected %d)".format(
a.compilations.allCompilations.size, expectedIterationsNumber))
}

View File

@ -1,9 +1,11 @@
import sbt.internal.inc.Analysis
InputKey[Unit]("check-number-of-compiler-iterations") := (inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber))
}
}).evaluated
InputKey[Unit]("check-number-of-compiler-iterations") := {
val args = Def.spaceDelimited().parsed
val a = (compile in Compile).value.asInstanceOf[Analysis]
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber,
"a.compilations.allCompilations.size = %d (expected %d)".format(
a.compilations.allCompilations.size, expectedIterationsNumber))
}

View File

@ -1,5 +1,5 @@
import Configurations.{CompilerPlugin => CPlugin}
lazy val use = project.dependsOn(file("def") % CPlugin).settings(
lazy val use = project.dependsOn(RootProject(file("def")) % CPlugin).settings(
autoCompilerPlugins := true
)

View File

@ -1,8 +1,8 @@
import sbt.internal.librarymanagement.syntax._
seq(externalIvySettings(), externalIvyFile())
Seq(externalIvySettings(), externalIvyFile())
TaskKey[Unit]("check") := {
val files = update.value.matching( moduleFilter(organization = "org.scalacheck", name = "scalacheck*", revision = "1.11.4") )
assert(files.nonEmpty, "ScalaCheck module not found in update report")
}
}

View File

@ -2,4 +2,4 @@ lazy val a = (project in file(".")).
settings(externalIvySettings()) dependsOn(b)
lazy val b = (project in file("b")).
settings(externalIvySettings( (baseDirectory in ThisBuild) / "ivysettings.xml" ))
settings(externalIvySettings( Def setting ((baseDirectory in ThisBuild).value / "ivysettings.xml") ))

View File

@ -3,14 +3,13 @@ autoScalaLibrary := false
ivyPaths := (baseDirectory, target)( (dir, t) => IvyPaths(dir, Some(t / "ivy-cache"))).value
ivyScala := ((scalaVersion in update, scalaBinaryVersion in update) { (fv, bv) =>
Some(IvyScala(fv, bv, Vector.empty, filterImplicit = false, checkExplicit = false, overrideScalaVersion = false))
Some(IvyScala(fv, bv, Vector.empty, filterImplicit = false, checkExplicit = false, overrideScalaVersion = false))
}).value
InputKey[Unit]("check") := (inputTask { args =>
(update, args) map {
case (report, Seq(expected, _*)) =>
report.allModules.forall(_.revision == expected)
}
}).evaluated
InputKey[Unit]("check") := {
val args = Def.spaceDelimited().parsed
val Seq(expected, _*) = args
update.value.allModules.forall(_.revision == expected)
}
scalaVersion := "2.9.1"

View File

@ -4,8 +4,8 @@ addSbtPlugin("com.typesafe.sbtscalariform" % "sbtscalariform" % "0.3.0", sbtVers
scalaBinaryVersion := "2.9.1"
resolvers += Classpaths.typesafeResolver
resolvers += Classpaths.typesafeReleases
dependencyOverrides := Set("com.typesafe.sbtscalariform" % "sbtscalariform" % "0.3.1")
autoScalaLibrary := false
autoScalaLibrary := false

View File

@ -3,7 +3,10 @@ import complete.DefaultParsers._
lazy val root = (project in file(".")).
settings(
resolvers ++= Seq(local, Resolver.sonatypeRepo("releases"), Resolver.sonatypeRepo("snapshots")),
InputKey[Unit]("checkPom") := (InputTask(_ => spaceDelimited("<args>")) { result => (makePom, result, streams) map checkPomRepositories }).evaluated,
InputKey[Unit]("checkPom") := {
val result = spaceDelimited("<args>").parsed
checkPomRepositories(makePom.value, result, streams.value)
},
makePomConfiguration := ((makePomConfiguration, baseDirectory) { (conf, base) =>
conf.copy(filterRepositories = pomIncludeRepository(base, conf.filterRepositories) )
}).value,

View File

@ -9,17 +9,17 @@ lazy val root = (project in file(".")).
managedClasspath in Provided := ((classpathTypes, update) map { (cpts, report) => Classpaths.managedJars(Provided, cpts, report) }).value
)
def checkTask = InputTask(_ => parser ) { result =>
(result, managedClasspath in Provided, fullClasspath in Compile, fullClasspath in Test, fullClasspath in Runtime) map { case ((conf, names), p, c, t, r) =>
println("Checking: " + conf.name)
checkClasspath(conf match {
case Provided => p
case Compile => c
case Test => t
case Runtime => r
}, names.toSet)
}
}
def checkTask = Def.inputTask {
val result = parser.parsed
val (conf, names) = result
println("Checking: " + conf.name)
checkClasspath(conf match {
case Provided => managedClasspath in Provided value
case Compile => fullClasspath in Compile value
case Test => fullClasspath in Test value
case Runtime => fullClasspath in Runtime value
}, names.toSet)
}
lazy val check = InputKey[Unit]("check")
def parser: Parser[(Configuration,Seq[String])] = (Space ~> token(cp(Compile) | cp(Runtime) | cp(Provided) | cp(Test))) ~ spaceDelimited("<module-names>")

View File

@ -5,26 +5,30 @@ val check = InputKey[Unit]("check")
lazy val root = (project in file(".")).
settings(
provided := baseDirectory(_ / "useProvided" exists).value,
configuration := provided(p => if(p) Provided else Compile).value,
libraryDependencies += configuration(c => "javax.servlet" % "servlet-api" % "2.5" % c.name).value,
managedClasspath in Provided := ((classpathTypes, update) map { (cpts, report) => Classpaths.managedJars(Provided, cpts, report) }).value,
check := (InputTask(_ => Space ~> token(Compile.name.id | Runtime.name | Provided.name | Test.name) ~ token(Space ~> Bool)) { result =>
(result, managedClasspath in Provided, fullClasspath in Runtime, fullClasspath in Compile, fullClasspath in Test) map { case ((conf, expected), p, r, c, t) =>
val cp = if(conf == Compile.name) c else if(conf == Runtime.name) r else if(conf == Provided.name) p else if(conf == Test.name) t else sys.error("Invalid config: " + conf)
checkServletAPI(cp.files, expected, conf)
provided := (baseDirectory.value / "useProvided" exists),
configuration := (if (provided.value) Provided else Compile),
libraryDependencies += "javax.servlet" % "servlet-api" % "2.5" % configuration.value.name,
managedClasspath in Provided := Classpaths.managedJars(Provided, classpathTypes.value, update.value),
check := {
val result = (
Space ~> token(Compile.name.id | Runtime.name | Provided.name | Test.name) ~ token(Space ~> Bool)
).parsed
val (conf, expected) = result
val cp = conf match {
case Compile.name => (fullClasspath in Compile).value
case Runtime.name => (fullClasspath in Runtime).value
case Provided.name => (managedClasspath in Provided).value
case Test.name => (fullClasspath in Test).value
case _ => sys.error(s"Invalid config: $conf")
}
}).evaluated
checkServletAPI(cp.files, expected, conf)
}
)
def checkServletAPI(paths: Seq[File], shouldBeIncluded: Boolean, label: String) =
{
val servletAPI = paths.find(_.getName contains "servlet-api")
if(shouldBeIncluded)
{
if(servletAPI.isEmpty)
sys.error("Servlet API should have been included in " + label + ".")
}
else
servletAPI.foreach(s => sys.error(s + " incorrectly included in " + label + "."))
}
def checkServletAPI(paths: Seq[File], shouldBeIncluded: Boolean, label: String) = {
val servletAPI = paths.find(_.getName contains "servlet-api")
if (shouldBeIncluded) {
if (servletAPI.isEmpty) sys.error(s"Servlet API should have been included in $label.")
} else
servletAPI foreach (s => sys.error(s"$s incorrectly included in $label."))
}

View File

@ -1,5 +1,3 @@
{
val loadCount = AttributeKey[Int]("load-count")
val unloadCount = AttributeKey[Int]("unload-count")
@ -7,20 +5,20 @@
val previous = s get key getOrElse 0
s.put(key, previous + 1)
}
seq(
onLoad in Global ~= (f(loadCount) compose _),
onUnload in Global ~= (f(unloadCount) compose _)
Seq(
onLoad in Global ~= (f(loadCount) compose _),
onUnload in Global ~= (f(unloadCount) compose _)
)
}
InputKey[Unit]("checkCount") := (inputTask { argsTask =>
(argsTask, state) map { (args, s) =>
def get(label: String) = s get AttributeKey[Int](label) getOrElse 0
val loadCount = get("load-count")
val unloadCount = get("unload-count")
val expectedLoad = args(0).toInt
val expectedUnload = args(1).toInt
assert(expectedLoad == loadCount, "Expected load count: " + expectedLoad + ", got: " + loadCount)
assert(expectedUnload == unloadCount, "Expected unload count: " + expectedUnload + ", got: " + unloadCount)
}
}).evaluated
InputKey[Unit]("checkCount") := {
val s = state.value
val args = Def.spaceDelimited().parsed
def get(label: String) = s get AttributeKey[Int](label) getOrElse 0
val loadCount = get("load-count")
val unloadCount = get("unload-count")
val expectedLoad = args(0).toInt
val expectedUnload = args(1).toInt
assert(expectedLoad == loadCount, s"Expected load count: $expectedLoad, got: $loadCount")
assert(expectedUnload == unloadCount, s"Expected unload count: $expectedUnload, got: $unloadCount")
}

View File

@ -1,2 +1,2 @@
lazy val root = (project in file(".")).
dependsOn(file("../plugin"))
dependsOn(RootProject(file("../plugin")))

View File

@ -1,9 +1,12 @@
import sbt.internal.inc.Analysis
InputKey[Unit]("checkNumberOfCompilerIterations") := (inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, compile in Compile) map { case (args: Seq[String], a: Analysis) =>
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber, "a.compilations.allCompilations.size = %d (expected %d)".format(a.compilations.allCompilations.size, expectedIterationsNumber))
}
}).evaluated
InputKey[Unit]("checkNumberOfCompilerIterations") := {
val a = (compile in Compile).value.asInstanceOf[Analysis]
val args = Def.spaceDelimited().parsed
assert(args.size == 1)
val expectedIterationsNumber = args(0).toInt
assert(a.compilations.allCompilations.size == expectedIterationsNumber,
"a.compilations.allCompilations.size = %d (expected %d)".format(
a.compilations.allCompilations.size, expectedIterationsNumber)
)
}

View File

@ -4,6 +4,7 @@ testGrouping := {
new Tests.Group(
name = test.name,
tests = Seq(test),
runPolicy = Tests.SubProcess(javaOptions = Seq.empty[String]))
runPolicy = Tests.SubProcess(ForkOptions(runJVMOptions = Seq.empty[String]))
)
}
}

View File

@ -16,7 +16,11 @@ lazy val root = (project in file(".")).
val tests = (definedTests in Test).value
assert(tests.size == 3)
for (idx <- 0 until groups) yield
new Group(groupId(idx), tests, SubProcess(Seq("-Dgroup.prefix=" + groupPrefix(idx))))
new Group(
groupId(idx),
tests,
SubProcess(ForkOptions(runJVMOptions = Seq("-Dgroup.prefix=" + groupPrefix(idx))))
)
},
check := {
val files =

View File

@ -57,10 +57,10 @@ object ScriptedPlugin extends AutoPlugin {
ModuleUtilities.getObject("sbt.test.ScriptedTests", loader)
}
def scriptedRunTask: Initialize[Task[Method]] = (scriptedTests) map {
(m) =>
m.getClass.getMethod("run", classOf[File], classOf[Boolean], classOf[Array[String]], classOf[File], classOf[Array[String]])
}
def scriptedRunTask: Initialize[Task[Method]] = Def task (
scriptedTests.value.getClass.getMethod("run", classOf[File], classOf[Boolean], classOf[Array[String]],
classOf[File], classOf[Array[String]])
)
import DefaultParsers._
case class ScriptedTestPage(page: Int, total: Int)

View File

@ -153,15 +153,19 @@ class ScriptedRunner {
}
runAll(allTests)
}
def runAll(tests: Seq[() => Option[String]]): Unit = {
val errors = for (test <- tests; err <- test()) yield err
if (errors.nonEmpty)
sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n"))
}
def get(tests: Seq[String], baseDirectory: File, log: Logger): Seq[ScriptedTest] =
if (tests.isEmpty) listTests(baseDirectory, log) else parseTests(tests)
def listTests(baseDirectory: File, log: Logger): Seq[ScriptedTest] =
(new ListTests(baseDirectory, _ => true, log)).listTests
def parseTests(in: Seq[String]): Seq[ScriptedTest] =
for (testString <- in) yield {
val Array(group, name) = testString.split("/").map(_.trim)