diff --git a/main-actions/src/main/scala/sbt/Compiler.scala b/main-actions/src/main/scala/sbt/Compiler.scala index d8eef8f1f..24b98d77a 100644 --- a/main-actions/src/main/scala/sbt/Compiler.scala +++ b/main-actions/src/main/scala/sbt/Compiler.scala @@ -36,130 +36,38 @@ object Compiler { ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.12", ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources() - /** Inputs necessary to run the incremental compiler. */ - // final case class Inputs(compilers: Compilers, config: Options, incSetup: IncSetup) - // /** The inputs for the compiler *and* the previous analysis of source dependencies. */ - // final case class InputsWithPrevious(inputs: Inputs, previousAnalysis: PreviousAnalysis) - // final case class Options(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], javacOptions: Seq[String], maxErrors: Int, sourcePositionMapper: Position => Position, order: CompileOrder) - // final case class IncSetup(analysisMap: File => Option[Analysis], definesClass: DefinesClass, skip: Boolean, cacheFile: File, cache: GlobalsCache, incOptions: IncOptions) - - // private[sbt] trait JavaToolWithNewInterface extends JavaTool { - // def newJavac: IncrementalCompilerJavaTools - // } - /** The instances of Scalac/Javac used to compile the current project. */ - // final case class Compilers(scalac: AnalyzingCompiler, javac: IncrementalCompilerJavaTools) - - /** The previous source dependency analysis result from compilation. */ - // final case class PreviousAnalysis(analysis: Analysis, setup: Option[MiniSetup]) - - // def inputs(classpath: Seq[File], sources: Seq[File], classesDirectory: File, options: Seq[String], - // javacOptions: Seq[String], maxErrors: Int, sourcePositionMappers: Seq[Position => Option[Position]], - // order: CompileOrder)(implicit compilers: Compilers, incSetup: IncSetup, log: Logger): Inputs = - // new Inputs( - // compilers, - // new Options(classpath, sources, classesDirectory, options, javacOptions, maxErrors, foldMappers(sourcePositionMappers), order), - // incSetup - // ) - - // @deprecated("Use `compilers(ScalaInstance, ClasspathOptions, Option[File], IvyConfiguration)`.", "0.13.10") - // def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File])(implicit app: AppConfiguration, log: Logger): Compilers = - // { - // val javac = - // AggressiveCompile.directOrFork(instance, cpOptions, javaHome) - // val javac2 = - // JavaTools.directOrFork(instance, cpOptions, javaHome) - // // Hackery to enable both the new and deprecated APIs to coexist peacefully. - // case class CheaterJavaTool(newJavac: IncrementalCompilerJavaTools, delegate: JavaTool) extends JavaTool with JavaToolWithNewInterface { - // def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit = - // javac.compile(contract, sources, classpath, outputDirectory, options)(log) - // def onArgs(f: Seq[String] => Unit): JavaTool = CheaterJavaTool(newJavac, delegate.onArgs(f)) - // } - // compilers(instance, cpOptions, CheaterJavaTool(javac2, javac)) - // } - // def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration)(implicit app: AppConfiguration, log: Logger): Compilers = - // { - // val javac = - // AggressiveCompile.directOrFork(instance, cpOptions, javaHome) - // val javac2 = - // JavaTools.directOrFork(instance, cpOptions, javaHome) - // // Hackery to enable both the new and deprecated APIs to coexist peacefully. - // case class CheaterJavaTool(newJavac: IncrementalCompilerJavaTools, delegate: JavaTool) extends JavaTool with JavaToolWithNewInterface { - // def compile(contract: JavacContract, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String])(implicit log: Logger): Unit = - // javac.compile(contract, sources, classpath, outputDirectory, options)(log) - // def onArgs(f: Seq[String] => Unit): JavaTool = CheaterJavaTool(newJavac, delegate.onArgs(f)) - // } - // val scalac = scalaCompiler(instance, cpOptions, ivyConfiguration) - // new Compilers(scalac, CheaterJavaTool(javac2, javac)) - // } - // @deprecated("Deprecated in favor of new sbt.compiler.javac package.", "0.13.8") - // def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: sbt.compiler.JavaCompiler.Fork)(implicit app: AppConfiguration, log: Logger): Compilers = - // { - // val javaCompiler = sbt.compiler.JavaCompiler.fork(cpOptions, instance)(javac) - // compilers(instance, cpOptions, javaCompiler) - // } - // @deprecated("Deprecated in favor of new sbt.compiler.javac package.", "0.13.8") - // def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javac: JavaTool)(implicit app: AppConfiguration, log: Logger): Compilers = - // { - // val scalac = scalaCompiler(instance, cpOptions) - // new Compilers(scalac, javac) - // } - // @deprecated("Use `scalaCompiler(ScalaInstance, ClasspathOptions, IvyConfiguration)`.", "0.13.10") - // def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = - // { - // val launcher = app.provider.scalaProvider.launcher - // val componentManager = new ComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log) - // val provider = ComponentCompiler.interfaceProvider(componentManager) - // new AnalyzingCompiler(instance, provider, cpOptions) - // } - - def compilers(cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore)(implicit app: AppConfiguration, log: Logger): Compilers = - { - val scalaProvider = app.provider.scalaProvider - val instance = ScalaInstance(scalaProvider.version, scalaProvider.launcher) - val sourceModule = scalaCompilerBridgeSource2_12 - compilers(instance, cpOptions, None, ivyConfiguration, fileToStore, sourceModule) - } - - // def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions)(implicit app: AppConfiguration, log: Logger): Compilers = - // compilers(instance, cpOptions, None) + def compilers( + cpOptions: ClasspathOptions, ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore + )(implicit app: AppConfiguration, log: Logger): Compilers = { + val scalaProvider = app.provider.scalaProvider + val instance = ScalaInstance(scalaProvider.version, scalaProvider.launcher) + val sourceModule = scalaCompilerBridgeSource2_12 + compilers(instance, cpOptions, None, ivyConfiguration, fileToStore, sourceModule) + } // TODO: Get java compiler - def compilers(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], - ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): Compilers = { + def compilers( + instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], + ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID + )(implicit app: AppConfiguration, log: Logger): Compilers = { val scalac = scalaCompiler(instance, cpOptions, javaHome, ivyConfiguration, fileToStore, sourcesModule) val javac = JavaTools.directOrFork(instance, cpOptions, javaHome) new Compilers(scalac, javac) } - def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = - { - val launcher = app.provider.scalaProvider.launcher - val componentManager = new ZincComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log) - val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, fileToStore, sourcesModule) - new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None) - } + + def scalaCompiler( + instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], + ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID + )(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler = { + val launcher = app.provider.scalaProvider.launcher + val componentManager = new ZincComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log) + val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, fileToStore, sourcesModule) + new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None) + } private val compiler = new IncrementalCompilerImpl - def compile(in: Inputs, log: Logger): CompileResult = - { - compiler.compile(in, log) - // import in.inputs.config._ - // compile(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper)) - } - // def compile(in: Inputs, log: Logger, reporter: xsbti.Reporter): CompileResult = - // { - // import in.inputs.compilers._ - // import in.inputs.config._ - // import in.inputs.incSetup._ - // // Here is some trickery to choose the more recent (reporter-using) java compiler rather - // // than the previously defined versions. - // // TODO - Remove this hackery in sbt 1.0. - // val javacChosen: xsbti.compile.JavaCompiler = - // in.inputs.compilers.javac.xsbtiCompiler // ).getOrElse(in.inputs.compilers.javac) - // // TODO - Why are we not using the IC interface??? - // val compiler = new IncrementalCompilerImpl - // compiler.incrementalCompile(scalac, javacChosen, sources, classpath, CompileOutput(classesDirectory), cache, None, options, javacOptions, - // in.previousAnalysis.analysis, in.previousAnalysis.setup, analysisMap, definesClass, reporter, order, skip, incOptions)(log) - // } + + def compile(in: Inputs, log: Logger): CompileResult = compiler.compile(in, log) private[sbt] def foldMappers[A](mappers: Seq[A => Option[A]]) = mappers.foldRight({ p: A => p }) { (mapper, mappers) => { p: A => mapper(p).getOrElse(mappers(p)) } } diff --git a/main-actions/src/main/scala/sbt/RawCompileLike.scala b/main-actions/src/main/scala/sbt/RawCompileLike.scala index 1fa3198d5..f3a0c7497 100644 --- a/main-actions/src/main/scala/sbt/RawCompileLike.scala +++ b/main-actions/src/main/scala/sbt/RawCompileLike.scala @@ -17,7 +17,6 @@ import sbt.internal.util.{ CacheStoreFactory, FilesInfo, HashFileInfo, HNil, Mod import sbt.internal.util.FileInfo.{ exists, hash, lastModified } import xsbti.compile.ClasspathOptions -import sbt.util.Logger import sbt.internal.util.ManagedLogger object RawCompileLike { diff --git a/main-settings/src/main/scala/sbt/Structure.scala b/main-settings/src/main/scala/sbt/Structure.scala index 690a3f721..3ba91e834 100644 --- a/main-settings/src/main/scala/sbt/Structure.scala +++ b/main-settings/src/main/scala/sbt/Structure.scala @@ -54,7 +54,8 @@ sealed abstract class SettingKey[T] extends ScopedTaskable[T] with KeyedInitiali final def transform(f: T => T, source: SourcePosition): Setting[T] = set(scopedKey(f), source) - protected[this] def make[S](other: Initialize[S], source: SourcePosition)(f: (T, S) => T): Setting[T] = set((this, other)(f), source) + protected[this] def make[S](other: Initialize[S], source: SourcePosition)(f: (T, S) => T): Setting[T] = + set((this, other)(f), source) } /** @@ -83,7 +84,7 @@ sealed abstract class TaskKey[T] extends ScopedTaskable[T] with KeyedInitialize[ final def removeN[V](vs: Initialize[Task[V]], source: SourcePosition)(implicit r: Remove.Values[T, V]): Setting[Task[T]] = make(vs, source)(r.removeValues) private[this] def make[S](other: Initialize[Task[S]], source: SourcePosition)(f: (T, S) => T): Setting[Task[T]] = - set((this, other) { (a, b) => (a, b) map f.tupled }, source) + set((this, other)((a, b) => (a, b) map f.tupled), source) } /** @@ -210,7 +211,8 @@ object Scoped { final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] { protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f - def dependsOn(tasks: AnyInitTask*): Initialize[Task[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.dependsOn(deps: _*) } + def dependsOn(tasks: AnyInitTask*): Initialize[Task[S]] = + (i, Initialize.joinAny[Task](tasks))((thisTask, deps) => thisTask.dependsOn(deps: _*)) def failure: Initialize[Task[Incomplete]] = i(_.failure) def result: Initialize[Task[Result[S]]] = i(_.result) @@ -220,12 +222,14 @@ object Scoped { def runBefore[T](tasks: Initialize[Task[T]]*): Initialize[Task[S]] = nonLocal(tasks, Def.runBefore) private[this] def nonLocal(tasks: Seq[AnyInitTask], key: AttributeKey[Seq[Task[_]]]): Initialize[Task[S]] = - (Initialize.joinAny[Task](tasks), i) { (ts, i) => i.copy(info = i.info.set(key, ts)) } + (Initialize.joinAny[Task](tasks), i)((ts, i) => i.copy(info = i.info.set(key, ts))) } final class RichInitializeInputTask[S](i: Initialize[InputTask[S]]) extends RichInitTaskBase[S, InputTask] { protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f) - def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = (i, Initialize.joinAny[Task](tasks)) { (thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*)) } + + def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = + (i, Initialize.joinAny[Task](tasks))((thisTask, deps) => thisTask.mapTask(_.dependsOn(deps: _*))) } sealed abstract class RichInitTaskBase[S, R[_]] { diff --git a/main-settings/src/main/scala/sbt/std/TaskMacro.scala b/main-settings/src/main/scala/sbt/std/TaskMacro.scala index 44b3a372d..a81507f1b 100644 --- a/main-settings/src/main/scala/sbt/std/TaskMacro.scala +++ b/main-settings/src/main/scala/sbt/std/TaskMacro.scala @@ -42,23 +42,21 @@ object FullInstance extends Instance.Composed[Initialize, Task](InitializeInstan type SS = sbt.internal.util.Settings[Scope] val settingsData = TaskKey[SS]("settings-data", "Provides access to the project data for the build.", KeyRanks.DTask) - def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = - { - import Scoped._ - (in, settingsData, Def.capturedTransformations) apply { - (a: Task[Initialize[Task[T]]], data: Task[SS], f) => - import TaskExtra.multT2Task - (a, data) flatMap { case (a, d) => f(a) evaluate d } - } + def flatten[T](in: Initialize[Task[Initialize[Task[T]]]]): Initialize[Task[T]] = { + import Scoped._ + (in, settingsData, Def.capturedTransformations) { (a: Task[Initialize[Task[T]]], data: Task[SS], f) => + import TaskExtra.multT2Task + (a, data) flatMap { case (a, d) => f(a) evaluate d } } - def flattenFun[S, T](in: Initialize[Task[S => Initialize[Task[T]]]]): Initialize[S => Task[T]] = - { - import Scoped._ - (in, settingsData, Def.capturedTransformations) apply { (a: Task[S => Initialize[Task[T]]], data: Task[SS], f) => (s: S) => - import TaskExtra.multT2Task - (a, data) flatMap { case (af, d) => f(af(s)) evaluate d } - } + } + + def flattenFun[S, T](in: Initialize[Task[S => Initialize[Task[T]]]]): Initialize[S => Task[T]] = { + import Scoped._ + (in, settingsData, Def.capturedTransformations) { (a: Task[S => Initialize[Task[T]]], data: Task[SS], f) => (s: S) => + import TaskExtra.multT2Task + (a, data) flatMap { case (af, d) => f(af(s)) evaluate d } } + } } object TaskMacro { diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 38b5c0dc3..606116c91 100755 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -548,11 +548,16 @@ object Defaults extends BuildCommon { } def buffered(log: Logger): Logger = new BufferedLogger(FullLogger(log)) - def testExtra(extra: AttributeMap, tdef: TestDefinition): AttributeMap = - { - val mod = tdef.fingerprint match { case f: SubclassFingerprint => f.isModule; case f: AnnotatedFingerprint => f.isModule; case _ => false } - extra.put(name.key, tdef.name).put(isModule, mod) + + def testExtra(extra: AttributeMap, tdef: TestDefinition): AttributeMap = { + val mod = tdef.fingerprint match { + case f: SubclassFingerprint => f.isModule + case f: AnnotatedFingerprint => f.isModule + case _ => false } + extra.put(name.key, tdef.name).put(isModule, mod) + } + def singleTestGroup(key: Scoped): Initialize[Task[Seq[Tests.Group]]] = inTask(key, singleTestGroupDefault) def singleTestGroupDefault: Initialize[Task[Seq[Tests.Group]]] = Def.task { val tests = definedTests.value @@ -794,7 +799,8 @@ object Defaults extends BuildCommon { .withConfigurations(cOpt.toVector) } } - @deprecated("The configuration(s) should not be decided based on the classifier.", "1.0") + + @deprecated("The configuration(s) should not be decided based on the classifier.", "1.0.0") def artifactConfigurations(base: Artifact, scope: Configuration, classifier: Option[String]): Iterable[Configuration] = classifier match { case Some(c) => Artifact.classifierConf(c) :: Nil @@ -805,7 +811,9 @@ object Defaults extends BuildCommon { def pairID = Util.pairID @deprecated("Use `packageTaskSettings` instead", "0.12.0") - def packageTasks(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) = packageTaskSettings(key, mappingsTask) + def packageTasks(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) = + packageTaskSettings(key, mappingsTask) + def packageTaskSettings(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) = inTask(key)(Seq( key in TaskGlobal := packageTask.value, @@ -1311,7 +1319,8 @@ object Classpaths { def enabledOnly[T](key: SettingKey[T], pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[T]] = (forallIn(key, pkgTasks) zipWith forallIn(publishArtifact, pkgTasks))(_ zip _ collect { case (a, true) => a }) - def forallIn[T](key: SettingKey[T], pkgTasks: Seq[TaskKey[_]]): Initialize[Seq[T]] = + + def forallIn[T](key: Scoped.ScopingSetting[SettingKey[T]], pkgTasks: Seq[TaskKey[_]]): Initialize[Seq[T]] = pkgTasks.map(pkg => key in pkg.scope in pkg).join private[this] def publishGlobalDefaults = Defaults.globalDefaults(Seq( @@ -1926,19 +1935,30 @@ object Classpaths { f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)*/ }*/ - def defaultRepositoryFilter = (repo: MavenRepository) => !repo.root.startsWith("file:") - def getPublishTo(repo: Option[Resolver]): Resolver = repo getOrElse sys.error("Repository for publishing is not specified.") + def defaultRepositoryFilter: MavenRepository => Boolean = repo => !repo.root.startsWith("file:") + + def getPublishTo(repo: Option[Resolver]): Resolver = + repo getOrElse sys.error("Repository for publishing is not specified.") def deliverConfig(outputDirectory: File, status: String = "release", logging: UpdateLogging = UpdateLogging.DownloadOnly) = new DeliverConfiguration(deliverPattern(outputDirectory), status, None, logging) @deprecated("Previous semantics allowed overwriting cached files, which was unsafe. Please specify overwrite parameter.", "0.13.2") - def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String, logging: UpdateLogging): PublishConfiguration = + def publishConfig( + artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], + resolverName: String, logging: UpdateLogging + ): PublishConfiguration = publishConfig(artifacts, ivyFile, checksums, resolverName, logging, overwrite = true) - def publishConfig(artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], resolverName: String = "local", logging: UpdateLogging = UpdateLogging.DownloadOnly, overwrite: Boolean = false) = + + def publishConfig( + artifacts: Map[Artifact, File], ivyFile: Option[File], checksums: Seq[String], + resolverName: String = "local", logging: UpdateLogging = UpdateLogging.DownloadOnly, + overwrite: Boolean = false + ) = new PublishConfiguration(ivyFile, resolverName, artifacts, checksums.toVector, logging, overwrite) - def deliverPattern(outputPath: File): String = (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath + def deliverPattern(outputPath: File): String = + (outputPath / "[artifact]-[revision](-[classifier]).[ext]").absolutePath def projectDependenciesTask: Initialize[Task[Seq[ModuleID]]] = Def.task { @@ -2197,23 +2217,25 @@ object Classpaths { modifyForPlugin(plugin, ModuleID(org, ScalaArtifacts.LibraryID, version)) :: Nil else Nil - def addUnmanagedLibrary: Seq[Setting[_]] = Seq( - unmanagedJars in Compile ++= unmanagedScalaLibrary.value - ) - def unmanagedScalaLibrary: Initialize[Task[Seq[File]]] = - Def.taskDyn { - if (autoScalaLibrary.value && scalaHome.value.isDefined) - Def.task { scalaInstance.value.libraryJar :: Nil } - else - Def.task { Nil } - } + + def addUnmanagedLibrary: Seq[Setting[_]] = + Seq(unmanagedJars in Compile ++= unmanagedScalaLibrary.value) + + def unmanagedScalaLibrary: Initialize[Task[Seq[File]]] = Def.taskDyn { + if (autoScalaLibrary.value && scalaHome.value.isDefined) + Def.task { scalaInstance.value.libraryJar :: Nil } + else + Def.task { Nil } + } import DependencyFilter._ def managedJars(config: Configuration, jarTypes: Set[String], up: UpdateReport): Classpath = up.filter(configurationFilter(config.name) && artifactFilter(`type` = jarTypes)).toSeq.map { case (conf, module, art, file) => - Attributed(file)(AttributeMap.empty.put(artifact.key, art).put(moduleID.key, module).put(configuration.key, config)) - } distinct; + Attributed(file)( + AttributeMap.empty.put(artifact.key, art).put(moduleID.key, module).put(configuration.key, config) + ) + }.distinct def findUnmanagedJars(config: Configuration, base: File, filter: FileFilter, excl: FileFilter): Classpath = (base * (filter -- excl) +++ (base / config.name).descendantsExcept(filter, excl)).classpath diff --git a/main/src/main/scala/sbt/EvaluateTask.scala b/main/src/main/scala/sbt/EvaluateTask.scala index 63045fca9..dd61280cf 100644 --- a/main/src/main/scala/sbt/EvaluateTask.scala +++ b/main/src/main/scala/sbt/EvaluateTask.scala @@ -77,11 +77,12 @@ object TaskCancellationStrategy { /** Cancel handler which registers for SIGINT and cancels tasks when it is received. */ object Signal extends TaskCancellationStrategy { type State = Signals.Registration - def onTaskEngineStart(canceller: RunningTaskEngine): Signals.Registration = { + + def onTaskEngineStart(canceller: RunningTaskEngine): Signals.Registration = Signals.register(() => canceller.cancelAndShutdown()) - } - def onTaskEngineFinish(registration: Signals.Registration): Unit = - registration.remove() + + def onTaskEngineFinish(registration: Signals.Registration): Unit = registration.remove() + override def toString: String = "Signal" } } @@ -98,14 +99,11 @@ sealed trait EvaluateTaskConfig { def checkCycles: Boolean def progressReporter: ExecuteProgress[Task] def cancelStrategy: TaskCancellationStrategy - /** - * If true, we force a finalizer/gc run (or two) after task execution completes when needed. - */ + + /** If true, we force a finalizer/gc run (or two) after task execution completes when needed. */ def forceGarbageCollection: Boolean - /** - * Interval to force GC. - */ + /** Interval to force GC. */ def minForcegcInterval: Duration } @@ -148,7 +146,13 @@ object EvaluateTaskConfig { } } -final case class PluginData(dependencyClasspath: Seq[Attributed[File]], definitionClasspath: Seq[Attributed[File]], resolvers: Option[Seq[Resolver]], report: Option[UpdateReport], scalacOptions: Seq[String]) { +final case class PluginData( + dependencyClasspath: Seq[Attributed[File]], + definitionClasspath: Seq[Attributed[File]], + resolvers: Option[Seq[Resolver]], + report: Option[UpdateReport], + scalacOptions: Seq[String] +) { val classpath: Seq[Attributed[File]] = definitionClasspath ++ dependencyClasspath } diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index 0a6d0f3cb..e1cdbf2b0 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -20,6 +20,7 @@ import sbt.internal.{ ProjectNavigation, Script, SessionSettings, + SetResult, SettingCompletions, LogManager, DefaultBackgroundJobService @@ -303,7 +304,9 @@ object BuiltinCommands { val result = Load.mkEval(classpath, s.baseDir, Nil).eval(arg, srcName = "", imports = new EvalImports(Nil, "")) s.log.info(s"ans: ${result.tpe} = ${result.getValue(app.loader)}") } - def sessionCommand = Command.make(SessionCommand, sessionBrief, SessionSettings.Help)(SessionSettings.command) + + def sessionCommand: Command = Command.make(SessionCommand, sessionBrief, SessionSettings.Help)(SessionSettings.command) + def reapply(newSession: SessionSettings, structure: BuildStructure, s: State): State = { s.log.info("Reapplying settings...") @@ -313,7 +316,8 @@ object BuiltinCommands { val newStructure = Load.reapply(withLogger.mergeSettings, structure)(Project.showContextKey(newSession, structure)) Project.setProject(newSession, newStructure, s) } - def set = Command(SetCommand, setBrief, setDetailed)(setParser) { + + def set: Command = Command(SetCommand, setBrief, setDetailed)(setParser) { case (s, (all, arg)) => val extracted = Project extract s import extracted._ @@ -335,10 +339,11 @@ object BuiltinCommands { s.log.debug(setResult.verboseSummary) reapply(setResult.session, structure, s) } - // @deprecated("Use SettingCompletions.setThis", "0.13.0") - def setThis(s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String) = + + def setThis(s: State, extracted: Extracted, settings: Seq[Def.Setting[_]], arg: String): SetResult = SettingCompletions.setThis(s, extracted, settings, arg) - def inspect = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) { + + def inspect: Command = Command(InspectCommand, inspectBrief, inspectDetailed)(Inspect.parser) { case (s, (option, sk)) => s.log.info(Inspect.output(s, option, sk)) s @@ -512,9 +517,9 @@ object BuiltinCommands { removeBase.map(toRemove => (xs: List[URI]) => xs.filterNot(toRemove.toSet)) } - def project = Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command) + def project: Command = Command.make(ProjectCommand, projectBrief, projectDetailed)(ProjectNavigation.command) - def loadFailed = Command(LoadFailed)(loadProjectParser)(doLoadFailed) + def loadFailed: Command = Command(LoadFailed)(loadProjectParser)(doLoadFailed) @deprecated("No longer used.", "0.13.2") def handleLoadFailed(s: State): State = doLoadFailed(s, "") @@ -528,28 +533,33 @@ object BuiltinCommands { def ignoreMsg = if (Project.isProjectLoaded(s)) "using previously loaded project" else "no project loaded" result match { - case "" => retry - case _ if matches("retry") => retry - case _ if matches(Quit) => s.exit(ok = false) - case _ if matches("ignore") => - s.log.warn(s"Ignoring load failure: $ignoreMsg."); s - case _ if matches("last") => LastCommand :: loadProjectCommand(LoadFailed, loadArg) :: s - case _ => println("Invalid response."); doLoadFailed(s, loadArg) + case "" => retry + case _ if matches("retry") => retry + case _ if matches(Quit) => s.exit(ok = false) + case _ if matches("ignore") => s.log.warn(s"Ignoring load failure: $ignoreMsg."); s + case _ if matches("last") => LastCommand :: loadProjectCommand(LoadFailed, loadArg) :: s + case _ => println("Invalid response."); doLoadFailed(s, loadArg) } } - def loadProjectCommands(arg: String) = + def loadProjectCommands(arg: String): List[String] = StashOnFailure :: (OnFailure + " " + loadProjectCommand(LoadFailed, arg)) :: loadProjectCommand(LoadProjectImpl, arg) :: PopOnFailure :: State.FailureWall :: Nil - def loadProject = Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser) { (s, arg) => loadProjectCommands(arg) ::: s } + + def loadProject: Command = + Command(LoadProject, LoadProjectBrief, LoadProjectDetailed)(loadProjectParser) { (s, arg) => + loadProjectCommands(arg) ::: s + } + private[this] def loadProjectParser = (s: State) => matched(Project.loadActionParser) private[this] def loadProjectCommand(command: String, arg: String): String = s"$command $arg".trim - def loadProjectImpl = Command(LoadProjectImpl)(_ => Project.loadActionParser)(doLoadProject) + def loadProjectImpl: Command = Command(LoadProjectImpl)(_ => Project.loadActionParser)(doLoadProject) + def doLoadProject(s0: State, action: LoadAction.Value): State = { val (s1, base) = Project.loadAction(SessionVar.clear(s0), action) @@ -586,7 +596,7 @@ object BuiltinCommands { s.put(Keys.stateCompilerCache, cache) } - def server = Command.command(Server, Help.more(Server, ServerDetailed)) { s0 => + def server: Command = Command.command(Server, Help.more(Server, ServerDetailed)) { s0 => import sbt.internal.{ ConsolePromptEvent, ConsoleUnpromptEvent } val exchange = StandardMain.exchange val s1 = exchange run s0 diff --git a/main/src/main/scala/sbt/Project.scala b/main/src/main/scala/sbt/Project.scala index 6306e0ff7..5489b6eb1 100755 --- a/main/src/main/scala/sbt/Project.scala +++ b/main/src/main/scala/sbt/Project.scala @@ -614,14 +614,19 @@ object Project extends ProjectExtra { import SessionVar.{ persistAndSet, resolveContext, set, transform => tx } def updateState(f: (State, S) => State): Def.Initialize[Task[S]] = i(t => tx(t, f)) - def storeAs(key: TaskKey[S])(implicit f: JsonFormat[S]): Def.Initialize[Task[S]] = (Keys.resolvedScoped, i) { (scoped, task) => - tx(task, (state, value) => persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f)) - } + + def storeAs(key: TaskKey[S])(implicit f: JsonFormat[S]): Def.Initialize[Task[S]] = + (Keys.resolvedScoped, i)((scoped, task) => + tx(task, (state, value) => persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f)) + ) + def keepAs(key: TaskKey[S]): Def.Initialize[Task[S]] = - (i, Keys.resolvedScoped)((t, scoped) => tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value))) + (i, Keys.resolvedScoped)((t, scoped) => + tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value)) + ) } - import reflect.macros._ + import scala.reflect.macros._ def projectMacroImpl(c: blackbox.Context): c.Expr[Project] = { @@ -636,7 +641,7 @@ private[sbt] trait GeneratedRootProject trait ProjectExtra0 { implicit def wrapProjectReferenceSeqEval[T](rs: => Seq[T])(implicit ev: T => ProjectReference): Seq[Eval[ProjectReference]] = - rs map { r => Eval.later(r: ProjectReference) } + rs map (r => Eval.later(r: ProjectReference)) } trait ProjectExtra extends ProjectExtra0 { @@ -646,19 +651,30 @@ trait ProjectExtra extends ProjectExtra0 { implicit def wrapProjectReferenceEval[T](ref: => T)(implicit ev: T => ProjectReference): Eval[ProjectReference] = Eval.later(ref: ProjectReference) - implicit def wrapSettingDefinitionEval[T](d: => T)(implicit ev: T => Def.SettingsDefinition): Eval[Def.SettingsDefinition] = Eval.later(d) - implicit def wrapSettingSeqEval(ss: => Seq[Setting[_]]): Eval[Def.SettingsDefinition] = Eval.later(new Def.SettingList(ss)) + implicit def wrapSettingDefinitionEval[T](d: => T)(implicit ev: T => Def.SettingsDefinition): Eval[Def.SettingsDefinition] = + Eval.later(d) - implicit def configDependencyConstructor[T](p: T)(implicit ev: T => ProjectReference): Constructor = new Constructor(p) - implicit def classpathDependency[T](p: T)(implicit ev: T => ProjectReference): ClasspathDep[ProjectReference] = new ClasspathDependency(p, None) + implicit def wrapSettingSeqEval(ss: => Seq[Setting[_]]): Eval[Def.SettingsDefinition] = + Eval.later(new Def.SettingList(ss)) + + implicit def configDependencyConstructor[T](p: T)(implicit ev: T => ProjectReference): Constructor = + new Constructor(p) + + implicit def classpathDependency[T](p: T)(implicit ev: T => ProjectReference): ClasspathDep[ProjectReference] = + ClasspathDependency(p, None) // These used to be in Project so that they didn't need to get imported (due to Initialize being nested in Project). // Moving Initialize and other settings types to Def and decoupling Project, Def, and Structure means these go here for now - implicit def richInitializeTask[T](init: Initialize[Task[T]]): Scoped.RichInitializeTask[T] = new Scoped.RichInitializeTask(init) - implicit def richInitializeInputTask[T](init: Initialize[InputTask[T]]): Scoped.RichInitializeInputTask[T] = new Scoped.RichInitializeInputTask(init) + implicit def richInitializeTask[T](init: Initialize[Task[T]]): Scoped.RichInitializeTask[T] = + new Scoped.RichInitializeTask(init) + + implicit def richInitializeInputTask[T](init: Initialize[InputTask[T]]): Scoped.RichInitializeInputTask[T] = + new Scoped.RichInitializeInputTask(init) + implicit def richInitialize[T](i: Initialize[T]): Scoped.RichInitialize[T] = new Scoped.RichInitialize[T](i) - implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] = new Project.RichTaskSessionVar(init) + implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] = + new Project.RichTaskSessionVar(init) def inThisBuild(ss: Seq[Setting[_]]): Seq[Setting[_]] = inScope(ThisScope.copy(project = Select(ThisBuild)))(ss) diff --git a/main/src/main/scala/sbt/internal/BuildUtil.scala b/main/src/main/scala/sbt/internal/BuildUtil.scala index 62f949cc9..e6f293520 100644 --- a/main/src/main/scala/sbt/internal/BuildUtil.scala +++ b/main/src/main/scala/sbt/internal/BuildUtil.scala @@ -51,9 +51,9 @@ object BuildUtil { def dependencies(units: Map[URI, LoadedBuildUnit]): BuildDependencies = { - import collection.mutable.HashMap - val agg = new HashMap[ProjectRef, Seq[ProjectRef]] - val cp = new HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] + import scala.collection.mutable + val agg = new mutable.HashMap[ProjectRef, Seq[ProjectRef]] + val cp = new mutable.HashMap[ProjectRef, Seq[ClasspathDep[ProjectRef]]] for (lbu <- units.values; rp <- lbu.defined.values) { val ref = ProjectRef(lbu.unit.uri, rp.id) cp(ref) = rp.dependencies @@ -97,7 +97,7 @@ object BuildUtil { { val depPairs = for { - (uri, unit) <- units.toIterable + (uri, unit) <- units.toIterable // don't lose this toIterable, doing so breaks actions/cross-multiproject & actions/update-state-fail project <- unit.defined.values ref = ProjectRef(uri, project.id) agg <- project.aggregate diff --git a/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala b/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala index c0a444ce0..3341dc179 100644 --- a/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala +++ b/main/src/main/scala/sbt/internal/EvaluateConfigurations.scala @@ -204,13 +204,12 @@ private[sbt] object EvaluateConfigurations { * @return A method that given an sbt classloader, can return the actual Seq[Setting[_]] defined by * the expression. */ - @deprecated("Build DSL now includes non-Setting[_] type settings.", "0.13.6") // Note: This method is used by the SET command, so we may want to evaluate that sucker a bit. + // Build DSL now includes non-Setting[_] type settings. + // Note: This method is used by the SET command, so we may want to evaluate that sucker a bit. def evaluateSetting(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): LazyClassLoaded[Seq[Setting[_]]] = - { - evaluateDslEntry(eval, name, imports, expression, range).result andThen { - case DslEntry.ProjectSettings(values) => values - case _ => Nil - } + evaluateDslEntry(eval, name, imports, expression, range).result andThen { + case DslEntry.ProjectSettings(values) => values + case _ => Nil } /** diff --git a/main/src/main/scala/sbt/internal/Load.scala b/main/src/main/scala/sbt/internal/Load.scala index ea07c0d7b..829c9732e 100755 --- a/main/src/main/scala/sbt/internal/Load.scala +++ b/main/src/main/scala/sbt/internal/Load.scala @@ -496,7 +496,9 @@ private[sbt] object Load { def emptyBuild(uri: URI) = sys.error(s"No root project defined for build unit '$uri'") def noBuild(uri: URI) = sys.error(s"Build unit '$uri' not defined.") def noProject(uri: URI, id: String) = sys.error(s"No project '$id' defined in '$uri'.") - def noConfiguration(uri: URI, id: String, conf: String) = sys.error(s"No configuration '$conf' defined in project '$id' in '$uri'") + + def noConfiguration(uri: URI, id: String, conf: String) = + sys.error(s"No configuration '$conf' defined in project '$id' in '$uri'") // Called from builtinLoader def loadUnit(uri: URI, localBase: File, s: State, config: LoadBuildConfiguration): BuildUnit = diff --git a/run/src/main/scala/sbt/Fork.scala b/run/src/main/scala/sbt/Fork.scala index a6e33f602..860de613e 100644 --- a/run/src/main/scala/sbt/Fork.scala +++ b/run/src/main/scala/sbt/Fork.scala @@ -20,7 +20,15 @@ import scala.sys.process.Process * @param connectInput If true, the standard input of the forked process is connected to the standard input of this process. Otherwise, it is connected to an empty input stream. Connecting input streams can be problematic, especially on versions before Java 7. * @param envVars The environment variables to provide to the forked process. By default, none are provided. */ -final case class ForkOptions(javaHome: Option[File] = None, outputStrategy: Option[OutputStrategy] = None, bootJars: Seq[File] = Nil, workingDirectory: Option[File] = None, runJVMOptions: Seq[String] = Nil, connectInput: Boolean = false, envVars: Map[String, String] = Map.empty) +final case class ForkOptions( + javaHome: Option[File] = None, + outputStrategy: Option[OutputStrategy] = None, + bootJars: Seq[File] = Nil, + workingDirectory: Option[File] = None, + runJVMOptions: Seq[String] = Nil, + connectInput: Boolean = false, + envVars: Map[String, String] = Map.empty +) /** Configures where the standard output and error streams from a forked process go.*/ sealed abstract class OutputStrategy diff --git a/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala b/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala index 896277515..90bba1002 100644 --- a/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala +++ b/scripted/plugin/src/main/scala/sbt/ScriptedPlugin.scala @@ -57,10 +57,10 @@ object ScriptedPlugin extends AutoPlugin { ModuleUtilities.getObject("sbt.test.ScriptedTests", loader) } - def scriptedRunTask: Initialize[Task[Method]] = (scriptedTests) map { - (m) => - m.getClass.getMethod("run", classOf[File], classOf[Boolean], classOf[Array[String]], classOf[File], classOf[Array[String]]) - } + def scriptedRunTask: Initialize[Task[Method]] = Def task ( + scriptedTests.value.getClass.getMethod("run", classOf[File], classOf[Boolean], classOf[Array[String]], + classOf[File], classOf[Array[String]]) + ) import DefaultParsers._ case class ScriptedTestPage(page: Int, total: Int)