Merge pull request #3031 from eed3si9n/wip/bump_modules

Bump underlying modules to latest
This commit is contained in:
eugene yokota 2017-03-23 22:19:53 -07:00 committed by GitHub
commit 317085a458
61 changed files with 137 additions and 129 deletions

View File

@ -6,10 +6,11 @@ Migration notes
- `Project(...)` constructor is restricted down to two parameters. Use `project` instead.
- `sbt.Plugin` is also gone. Use auto plugins.
- The incremental compiler, called Zinc, uses class-based name hashing.
- Zinc drops support for Scala 2.8.x and 2.9.x.
- Zinc drops support for Scala 2.8.x, 2.9.x., 2.11.1 and below.
- Removed the pre-0.13.7 *.sbt file parser (previously available under `-Dsbt.parser.simple=true`)
- Removed old, hyphen-separated key names (use `publishLocal` instead of `publish-local`)
- Removes no-longer-documented old operators `<<=`, `<+=`, and `<++=`.
- Renames early command feature from `--<command>` to `early(<command>)`.
- Log options `-error`, `-warn`, `-info`, `-debug` are added as shorthand for `"early(error)"` etc.
- `sbt.Process` and `sbt.ProcessExtra` are gone. Use `scala.sys.process` instead.
- `incOptions.value.withNameHashing(...)` option is removed.

View File

@ -4,12 +4,12 @@
package sbt
import sbt.internal.inc.javac.JavaTools
import sbt.internal.inc.{ AnalyzingCompiler, ComponentCompiler, ScalaInstance }
import sbt.internal.inc.{ AnalyzingCompiler, ComponentCompiler, ScalaInstance, ZincComponentManager, IncrementalCompilerImpl }
import xsbti.{ Logger => _, _ }
import xsbti.compile.{ ClasspathOptions, Compilers, CompileResult, Inputs }
import java.io.File
import sbt.internal.librarymanagement.{ ComponentManager, IvyConfiguration }
import sbt.internal.librarymanagement.IvyConfiguration
import sbt.librarymanagement.{ ModuleID, VersionNumber }
import sbt.util.Logger
import sbt.internal.util.CacheStore
@ -18,12 +18,18 @@ object Compiler {
val DefaultMaxErrors = 100
private[sbt] def defaultCompilerBridgeSource(sv: String): ModuleID =
VersionNumber(sv) match {
// 2.10 and before
case VersionNumber(ns, _, _) if (ns.size == 3) && (ns(0) == 2) && (ns(1) <= 10) => scalaCompilerBridgeSource2_10
// 2.11
case VersionNumber(ns, _, _) if (ns.size == 3) && (ns(0) == 2) && (ns(1) == 11) => scalaCompilerBridgeSource2_11
case _ => scalaCompilerBridgeSource2_12
}
private[sbt] def scalaCompilerBridgeSource2_10: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.10",
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
private[sbt] def scalaCompilerBridgeSource2_11: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.11",
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
private[sbt] def scalaCompilerBridgeSource2_12: ModuleID =
ModuleID(xsbti.ArtifactInfo.SbtOrganization, "compiler-bridge_2.12",
ComponentCompiler.incrementalVersion).withConfigurations(Some("component")).sources()
@ -125,14 +131,15 @@ object Compiler {
def scalaCompiler(instance: ScalaInstance, cpOptions: ClasspathOptions, javaHome: Option[File], ivyConfiguration: IvyConfiguration, fileToStore: File => CacheStore, sourcesModule: ModuleID)(implicit app: AppConfiguration, log: Logger): AnalyzingCompiler =
{
val launcher = app.provider.scalaProvider.launcher
val componentManager = new ComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log)
val componentManager = new ZincComponentManager(launcher.globalLock, app.provider.components, Option(launcher.ivyHome), log)
val provider = ComponentCompiler.interfaceProvider(componentManager, ivyConfiguration, fileToStore, sourcesModule)
new AnalyzingCompiler(instance, provider, cpOptions, _ => (), None)
}
private val compiler = new IncrementalCompilerImpl
def compile(in: Inputs, log: Logger): CompileResult =
{
sbt.inc.IncrementalCompilerUtil.defaultIncrementalCompiler.compile(in, log)
compiler.compile(in, log)
// import in.inputs.config._
// compile(in, log, new LoggerReporter(maxErrors, log, sourcePositionMapper))
}

View File

@ -15,6 +15,7 @@ import xsbti.Reporter
import xsbti.compile.JavaTools
import sbt.util.Logger
import sbt.internal.util.ManagedLogger
object Doc {
import RawCompileLike._
@ -36,16 +37,16 @@ object Doc {
val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java")
private[sbt] final class Scaladoc(maximumErrors: Int, compiler: AnalyzingCompiler) extends Doc {
def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: Logger): Unit = {
def apply(label: String, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], log: ManagedLogger): Unit = {
generate("Scala", label, compiler.doc, sources, classpath, outputDirectory, options, maximumErrors, log)
}
}
}
sealed trait Doc {
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, Logger) => Unit
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private[sbt] final def generate(variant: String, label: String, docf: Gen, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: Logger): Unit = {
private[sbt] final def generate(variant: String, label: String, docf: Gen, sources: Seq[File], classpath: Seq[File], outputDirectory: File, options: Seq[String], maxErrors: Int, log: ManagedLogger): Unit = {
val logSnip = variant + " API documentation"
if (sources.isEmpty)
log.info("No sources available, skipping " + logSnip + "...")

View File

@ -18,9 +18,10 @@ import sbt.internal.util.FileInfo.{ exists, hash, lastModified }
import xsbti.compile.ClasspathOptions
import sbt.util.Logger
import sbt.internal.util.ManagedLogger
object RawCompileLike {
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, Logger) => Unit
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
private def optionFiles(options: Seq[String], fileInputOpts: Seq[String]): List[File] =
{

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -11,7 +11,7 @@ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
import scala.collection.mutable.ListBuffer
import scala.util.control.NonFatal
import sbt.protocol._
import sbt.internal.util.{ JLine, ChannelLogEntry, ConsoleAppender }
import sbt.internal.util.{ JLine, StringEvent, ConsoleAppender }
import sbt.util.Level
class NetworkClient(arguments: List[String]) { self =>
@ -47,7 +47,7 @@ class NetworkClient(arguments: List[String]) { self =>
val socket = new Socket(InetAddress.getByName(host), port)
new ServerConnection(socket) {
override def onEvent(event: EventMessage): Unit = self.onEvent(event)
override def onLogEntry(event: ChannelLogEntry): Unit = self.onLogEntry(event)
override def onLogEntry(event: StringEvent): Unit = self.onLogEntry(event)
override def onShutdown(): Unit =
{
running.set(false)
@ -55,7 +55,7 @@ class NetworkClient(arguments: List[String]) { self =>
}
}
def onLogEntry(event: ChannelLogEntry): Unit =
def onLogEntry(event: StringEvent): Unit =
{
val level = event.level match {
case "debug" => Level.Debug

View File

@ -8,7 +8,7 @@ package client
import java.net.{ SocketTimeoutException, Socket }
import java.util.concurrent.atomic.AtomicBoolean
import sbt.protocol._
import sbt.internal.util.ChannelLogEntry
import sbt.internal.util.StringEvent
abstract class ServerConnection(connection: Socket) {
@ -41,8 +41,8 @@ abstract class ServerConnection(connection: Socket) {
println(s"Got invalid chunk from server: $s \n" + errorDesc)
},
_ match {
case event: EventMessage => onEvent(event)
case event: ChannelLogEntry => onLogEntry(event)
case event: EventMessage => onEvent(event)
case event: StringEvent => onLogEntry(event)
}
)
}
@ -65,7 +65,7 @@ abstract class ServerConnection(connection: Socket) {
}
def onEvent(event: EventMessage): Unit
def onLogEntry(event: ChannelLogEntry): Unit
def onLogEntry(event: StringEvent): Unit
def onShutdown(): Unit

View File

@ -1,7 +1,8 @@
package sbt
import sbt.internal.util.Types.const
import sbt.internal.util.{ Attributed, AttributeKey, Init, Show }
import sbt.internal.util.{ Attributed, AttributeKey, Init }
import sbt.util.Show
import sbt.internal.util.complete.Parser
import java.io.File
import java.net.URI
@ -21,16 +22,16 @@ object Def extends Init[Scope] with TaskMacroExtra {
lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None)
def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[_]] =
new Show[ScopedKey[_]] { def apply(key: ScopedKey[_]) = displayFull(key, keyNameColor) }
Show[ScopedKey[_]]((key: ScopedKey[_]) => displayFull(key, keyNameColor))
def showRelativeKey(current: ProjectRef, multi: Boolean, keyNameColor: Option[String] = None): Show[ScopedKey[_]] =
Show[ScopedKey[_]]((key: ScopedKey[_]) =>
Scope.display(key.scope, colored(key.key.label, keyNameColor), ref => displayRelative(current, multi, ref)))
def showBuildRelativeKey(currentBuild: URI, multi: Boolean, keyNameColor: Option[String] = None): Show[ScopedKey[_]] =
Show[ScopedKey[_]]((key: ScopedKey[_]) =>
Scope.display(key.scope, colored(key.key.label, keyNameColor), ref => displayBuildRelative(currentBuild, multi, ref)))
def showRelativeKey(current: ProjectRef, multi: Boolean, keyNameColor: Option[String] = None): Show[ScopedKey[_]] = new Show[ScopedKey[_]] {
def apply(key: ScopedKey[_]) =
Scope.display(key.scope, colored(key.key.label, keyNameColor), ref => displayRelative(current, multi, ref))
}
def showBuildRelativeKey(currentBuild: URI, multi: Boolean, keyNameColor: Option[String] = None): Show[ScopedKey[_]] = new Show[ScopedKey[_]] {
def apply(key: ScopedKey[_]) =
Scope.display(key.scope, colored(key.key.label, keyNameColor), ref => displayBuildRelative(currentBuild, multi, ref))
}
def displayRelative(current: ProjectRef, multi: Boolean, project: Reference): String = project match {
case BuildRef(current.build) => "{.}/"
case `current` => if (multi) current.project + "/" else ""

View File

@ -25,7 +25,7 @@ import sbt.librarymanagement.{ `package` => _, _ }
import sbt.internal.librarymanagement._
import sbt.internal.librarymanagement.syntax._
import sbt.internal.util._
import sbt.util.{ Level, Logger }
import sbt.util.{ Level, Logger, ShowLines }
import scala.xml.NodeSeq
import scala.util.control.NonFatal
@ -510,7 +510,7 @@ object Defaults extends BuildCommon {
testResultLogger in (Test, test) :== TestResultLogger.SilentWhenNoTests, // https://github.com/sbt/sbt/issues/1185
test := {
val trl = (testResultLogger in (Test, test)).value
val taskName = Project.showContextKey(state.value)(resolvedScoped.value)
val taskName = Project.showContextKey(state.value).show(resolvedScoped.value)
trl.run(streams.value.log, executeTests.value, taskName)
},
testOnly := inputTests(testOnly).evaluated,
@ -595,7 +595,7 @@ object Defaults extends BuildCommon {
rel.internalClassDeps(c).map(intlStamp(_, analysis, s + c)) ++
rel.externalDeps(c).map(stamp) +
(apis.internal.get(c) match {
case Some(x) => x.compilation.startTime
case Some(x) => x.compilationTimestamp
case _ => Long.MinValue
})
}.max
@ -627,7 +627,7 @@ object Defaults extends BuildCommon {
val modifiedOpts = Tests.Filters(filter(selected)) +: Tests.Argument(frameworkOptions: _*) +: config.options
val newConfig = config.copy(options = modifiedOpts)
val output = allTestGroupsTask(s, loadedTestFrameworks.value, testLoader.value, testGrouping.value, newConfig, fullClasspath.value, javaHome.value, testForkedParallel.value, javaOptions.value)
val taskName = display(resolvedScoped.value)
val taskName = display.show(resolvedScoped.value)
val trl = testResultLogger.value
val processed = output.map(out => trl.run(s.log, out, taskName))
processed
@ -1121,7 +1121,7 @@ object Defaults extends BuildCommon {
val spms = sourcePositionMappers.value
val problems = analysis.infos.allInfos.values.flatMap(i => i.reportedProblems ++ i.unreportedProblems)
val reporter = new LoggerReporter(max, streams.value.log, Compiler.foldMappers(spms))
problems foreach { p => reporter.display(p.position, p.message, p.severity) }
problems foreach { p => reporter.display(p) }
}
def sbtPluginExtra(m: ModuleID, sbtV: String, scalaV: String): ModuleID =
@ -2005,7 +2005,7 @@ object Classpaths {
val analysisOpt = previousCompile.value.analysis
dirs map { x =>
(x, if (analysisOpt.isDefined) analysisOpt.get
else Analysis.empty(true))
else Analysis.empty)
}
}
}
@ -2027,7 +2027,7 @@ object Classpaths {
val analysisOpt = previousCompile.value.analysis
Seq(jar) map { x =>
(x, if (analysisOpt.isDefined) analysisOpt.get
else Analysis.empty(true))
else Analysis.empty)
}
}
}

View File

@ -4,8 +4,8 @@
package sbt
import sbt.internal.{ Load, BuildStructure, TaskTimings, TaskName, GCUtil }
import sbt.internal.util.{ Attributed, ErrorHandling, HList, RMap, Show, Signals, Types }
import sbt.util.Logger
import sbt.internal.util.{ Attributed, ErrorHandling, HList, RMap, Signals, Types }
import sbt.util.{ Logger, Show }
import sbt.librarymanagement.{ Resolver, UpdateReport }
import scala.concurrent.duration.Duration
@ -288,12 +288,12 @@ object EvaluateTask {
val msgString = (msg.toList ++ ex.toList.map(ErrorHandling.reducedToString)).mkString("\n\t")
val log = getStreams(key, streams).log
val display = contextDisplay(state, log.ansiCodesSupported)
log.error("(" + display(key) + ") " + msgString)
log.error("(" + display.show(key) + ") " + msgString)
}
}
private[this] def contextDisplay(state: State, highlight: Boolean) = Project.showContextKey(state, if (highlight) Some(RED) else None)
def suppressedMessage(key: ScopedKey[_])(implicit display: Show[ScopedKey[_]]): String =
"Stack trace suppressed. Run 'last %s' for the full log.".format(display(key))
"Stack trace suppressed. Run 'last %s' for the full log.".format(display.show(key))
def getStreams(key: ScopedKey[_], streams: Streams): TaskStreams =
streams(ScopedKey(Project.fillTaskAxis(key).scope, Keys.streams.key))

View File

@ -5,7 +5,8 @@ import Project._
import Scope.GlobalScope
import Def.{ ScopedKey, Setting }
import sbt.internal.util.complete.Parser
import sbt.internal.util.{ AttributeKey, Show }
import sbt.internal.util.AttributeKey
import sbt.util.Show
import std.Transform.DummyTaskMap
final case class Extracted(structure: BuildStructure, session: SessionSettings, currentRef: ProjectRef)(implicit val showKey: Show[ScopedKey[_]]) {
@ -96,9 +97,9 @@ final case class Extracted(structure: BuildStructure, session: SessionSettings,
private[this] def resolve[T](key: ScopedKey[T]): ScopedKey[T] =
Project.mapScope(Scope.resolveScope(GlobalScope, currentRef.build, rootProject))(key.scopedKey)
private def getOrError[T](scope: Scope, key: AttributeKey[_], value: Option[T])(implicit display: Show[ScopedKey[_]]): T =
value getOrElse sys.error(display(ScopedKey(scope, key)) + " is undefined.")
value getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.")
private def getOrError[T](scope: Scope, key: AttributeKey[T])(implicit display: Show[ScopedKey[_]]): T =
structure.data.get(scope, key) getOrElse sys.error(display(ScopedKey(scope, key)) + " is undefined.")
structure.data.get(scope, key) getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.")
def append(settings: Seq[Setting[_]], state: State): State =
{

View File

@ -11,11 +11,11 @@ import Keys.{ stateBuildStructure, commands, configuration, historyPath, project
import Scope.{ GlobalScope, ThisScope }
import Def.{ Flattened, Initialize, ScopedKey, Setting }
import sbt.internal.{ Load, BuildStructure, LoadedBuild, LoadedBuildUnit, SettingGraph, SettingCompletions, AddSettings, SessionSettings }
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Relation, Settings, Show, ~> }
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Relation, Settings, ~> }
import sbt.internal.util.Types.{ const, idFun }
import sbt.internal.util.complete.DefaultParsers
import sbt.librarymanagement.Configuration
import sbt.util.Eval
import sbt.util.{ Eval, Show }
import sjsonnew.JsonFormat
import language.experimental.macros
@ -522,7 +522,7 @@ object Project extends ProjectExtra {
if (scopes.isEmpty) ""
else {
val (limited, more) = if (scopes.size <= max) (scopes, "\n") else (scopes.take(max), "\n...\n")
limited.map(sk => prefix(sk) + display(sk)).mkString(label + ":\n\t", "\n\t", more)
limited.map(sk => prefix(sk) + display.show(sk)).mkString(label + ":\n\t", "\n\t", more)
}
data + "\n" +
@ -543,7 +543,7 @@ object Project extends ProjectExtra {
}
def graphSettings(structure: BuildStructure, actual: Boolean, graphName: String, file: File)(implicit display: Show[ScopedKey[_]]): Unit = {
val rel = relation(structure, actual)
val keyToString = display.apply _
val keyToString = display.show _
DotGraph.generateGraph(file, graphName, rel, keyToString, keyToString)
}
def relation(structure: BuildStructure, actual: Boolean)(implicit display: Show[ScopedKey[_]]): Relation[ScopedKey[_], ScopedKey[_]] =
@ -564,7 +564,7 @@ object Project extends ProjectExtra {
def showUses(defs: Seq[ScopedKey[_]])(implicit display: Show[ScopedKey[_]]): String =
showKeys(defs)
private[this] def showKeys(s: Seq[ScopedKey[_]])(implicit display: Show[ScopedKey[_]]): String =
s.map(display.apply).sorted.mkString("\n\t", "\n\t", "\n\n")
s.map(display.show).sorted.mkString("\n\t", "\n\t", "\n\n")
def definitions(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(implicit display: Show[ScopedKey[_]]): Seq[Scope] =
relation(structure, actual)(display)._1s.toSeq flatMap { sk => if (sk.key == key) sk.scope :: Nil else Nil }

View File

@ -12,7 +12,8 @@ import DefaultParsers._
import sbt.internal.util.Types.idFun
import java.net.URI
import sbt.internal.CommandStrings.{ MultiTaskCommand, ShowCommand }
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, IMap, Settings, Show, Util }
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, IMap, Settings, Util }
import sbt.util.Show
final class ParsedKey(val key: ScopedKey[_], val mask: ScopeMask)
@ -100,7 +101,7 @@ object Act {
def noValidKeys = failure("No such key.")
def showAmbiguous(keys: Seq[ScopedKey[_]])(implicit show: Show[ScopedKey[_]]): String =
keys.take(3).map(x => show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "")
keys.take(3).map(x => show.show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "")
def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean =
{
@ -256,7 +257,7 @@ object Act {
val preparedPairs = anyKeyValues(structure, kvs)
val showConfig = Aggregation.defaultShow(state, showTasks = action == ShowAction)
evaluatingParser(state, structure, showConfig)(preparedPairs) map { evaluate => () => {
val keyStrings = preparedPairs.map(pp => showKey(pp.key)).mkString(", ")
val keyStrings = preparedPairs.map(pp => showKey.show(pp.key)).mkString(", ")
state.log.debug("Evaluating tasks: " + keyStrings)
evaluate()
}

View File

@ -7,8 +7,8 @@ package internal
import Def.ScopedKey
import Keys.{ showSuccess, showTiming, timingFormat }
import sbt.internal.util.complete.Parser
import sbt.internal.util.{ Dag, HList, Relation, Settings, Show, Util }
import sbt.util.Logger
import sbt.internal.util.{ Dag, HList, Relation, Settings, Util }
import sbt.util.{ Logger, Show }
import java.net.URI
import Parser.{ seq, failure, success }
import std.Transform.DummyTaskMap
@ -24,7 +24,7 @@ object Aggregation {
xs match {
case KeyValue(_, x: Seq[_]) :: Nil => print(x.mkString("* ", "\n* ", ""))
case KeyValue(_, x) :: Nil => print(x.toString)
case _ => xs foreach { case KeyValue(key, value) => print(display(key) + "\n\t" + value.toString) }
case _ => xs foreach { case KeyValue(key, value) => print(display.show(key) + "\n\t" + value.toString) }
}
type Values[T] = Seq[KeyValue[T]]
type AnyKeys = Values[_]

View File

@ -5,7 +5,7 @@ import java.net.SocketException
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.atomic.AtomicInteger
import sbt.internal.server._
import sbt.internal.util.ChannelLogEntry
import sbt.internal.util.StringEvent
import sbt.protocol.{ EventMessage, Serialization, ChannelAcceptedEvent }
import scala.collection.mutable.ListBuffer
import scala.annotation.tailrec
@ -101,7 +101,7 @@ private[sbt] final class CommandExchange {
val toDel: ListBuffer[CommandChannel] = ListBuffer.empty
val bytes = Serialization.serializeEvent(event)
event match {
case entry: ChannelLogEntry =>
case entry: StringEvent =>
channels.foreach {
case c: ConsoleChannel =>
if (entry.channelName.isEmpty || entry.channelName == Some(c.name)) {

View File

@ -30,7 +30,7 @@ private[sbt] abstract class BackgroundJob {
}
private[sbt] abstract class AbstractJobHandle extends JobHandle {
override def toString = s"JobHandle(${id}, ${humanReadableName}, ${Def.showFullKey(spawningTask)})"
override def toString = s"JobHandle(${id}, ${humanReadableName}, ${Def.showFullKey.show(spawningTask)})"
}
private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobService {

View File

@ -35,8 +35,8 @@ import Keys.{
update
}
import tools.nsc.reporters.ConsoleReporter
import sbt.internal.util.{ Attributed, Settings, Show, ~> }
import sbt.util.{ Eval => Ev }
import sbt.internal.util.{ Attributed, Settings, ~> }
import sbt.util.{ Eval => Ev, Show }
import sbt.internal.util.Attributed.data
import Scope.GlobalScope
import sbt.internal.util.Types.const

View File

@ -16,7 +16,7 @@ import sbt.internal.util.ManagedLogger
import org.apache.logging.log4j.core.Appender
sealed abstract class LogManager {
def apply(data: Settings[Scope], state: State, task: ScopedKey[_], writer: PrintWriter): Logger
def apply(data: Settings[Scope], state: State, task: ScopedKey[_], writer: PrintWriter): ManagedLogger
def backgroundLog(data: Settings[Scope], state: State, task: ScopedKey[_]): ManagedLogger
}
@ -25,7 +25,7 @@ object LogManager {
private val generateId: AtomicInteger = new AtomicInteger
// This is called by mkStreams
def construct(data: Settings[Scope], state: State): (ScopedKey[_], PrintWriter) => Logger = (task: ScopedKey[_], to: PrintWriter) =>
def construct(data: Settings[Scope], state: State): (ScopedKey[_], PrintWriter) => ManagedLogger = (task: ScopedKey[_], to: PrintWriter) =>
{
val manager: LogManager = (logManager in task.scope).get(data) getOrElse { defaultManager(state.globalLogging.console) }
manager(data, state, task, to)
@ -47,7 +47,7 @@ object LogManager {
def withLoggers(
screen: (ScopedKey[_], State) => Appender = (sk, s) => defaultScreen(s.globalLogging.console),
backed: PrintWriter => Appender = defaultBacked(),
backed: PrintWriter => Appender = defaultBacked,
relay: Unit => Appender = defaultRelay,
extra: ScopedKey[_] => Seq[Appender] = _ => Nil
): LogManager = new DefaultLogManager(screen, backed, relay, extra)
@ -58,7 +58,7 @@ object LogManager {
relay: Unit => Appender,
extra: ScopedKey[_] => Seq[Appender]
) extends LogManager {
def apply(data: Settings[Scope], state: State, task: ScopedKey[_], to: PrintWriter): Logger =
def apply(data: Settings[Scope], state: State, task: ScopedKey[_], to: PrintWriter): ManagedLogger =
defaultLogger(data, state, task, screen(task, state), backed(to), relay(()), extra(task).toList)
def backgroundLog(data: Settings[Scope], state: State, task: ScopedKey[_]): ManagedLogger =
@ -67,7 +67,7 @@ object LogManager {
// This is the main function that is used to generate the logger for tasks.
def defaultLogger(data: Settings[Scope], state: State, task: ScopedKey[_],
console: Appender, backed: Appender, relay: Appender, extra: List[Appender]): Logger =
console: Appender, backed: Appender, relay: Appender, extra: List[Appender]): ManagedLogger =
{
val execOpt = state.currentCommand
val loggerName: String = s"${task.key.label}-${generateId.incrementAndGet}"
@ -103,7 +103,7 @@ object LogManager {
def suppressedMessage(key: ScopedKey[_], state: State): SuppressedTraceContext => Option[String] =
{
lazy val display = Project.showContextKey(state)
def commandBase = "last " + display(unwrapStreamsKey(key))
def commandBase = "last " + display.show(unwrapStreamsKey(key))
def command(useColor: Boolean) = if (useColor) BLUE + commandBase + RESET else "'" + commandBase + "'"
context => Some("Stack trace suppressed: run %s for the full output.".format(command(context.useColor)))
}

View File

@ -4,8 +4,8 @@
package sbt
package internal
import sbt.internal.util.{ Show, Types }
import sbt.internal.util.Types
import sbt.util.Show
import java.util.regex.Pattern
import java.io.File
import Keys.Streams
@ -47,7 +47,7 @@ object Output {
outputs flatMap {
case KeyValue(key, lines) =>
val flines = f(lines)
if (!single) bold(display(key)) +: flines else flines
if (!single) bold(display.show(key)) +: flines else flines
}
}

View File

@ -30,6 +30,6 @@ class RelayAppender(name: String) extends AbstractAppender(name, null, PatternLa
}
def appendEvent(level: Level.Value, event: AnyRef): Unit =
event match {
case x: ChannelLogEntry => StandardMain.exchange.publishEvent(x: AbstractEntry)
case x: StringEvent => StandardMain.exchange.publishEvent(x: AbstractEntry)
}
}

View File

@ -1,8 +1,8 @@
package sbt
package internal
import sbt.internal.util.{ AttributeKey, complete, Relation, Settings, Show, Types, Util }
import sbt.internal.util.{ AttributeKey, complete, Relation, Settings, Types, Util }
import sbt.util.Show
import sbt.librarymanagement.Configuration
import Project._
@ -71,7 +71,7 @@ private[sbt] object SettingCompletions {
private[this] def setSummary(redefined: Set[ScopedKey[_]], affected: Set[ScopedKey[_]], verbose: Boolean)(implicit display: Show[ScopedKey[_]]): String =
{
val QuietLimit = 3
def strings(in: Set[ScopedKey[_]]): Seq[String] = in.toSeq.map(sk => display(sk)).sorted
def strings(in: Set[ScopedKey[_]]): Seq[String] = in.toSeq.map(sk => display.show(sk)).sorted
def lines(in: Seq[String]): (String, Boolean) =
if (in.isEmpty)
("no settings or tasks.", false)

View File

@ -4,7 +4,8 @@
package sbt
package internal
import sbt.internal.util.{ Show, JLine }
import sbt.internal.util.{ JLine }
import sbt.util.Show
import java.io.File
import Def.{ compiled, flattenLocals, ScopedKey }
@ -20,12 +21,12 @@ object SettingGraph {
{
val key = scoped.key
val scope = scoped.scope
val definedIn = structure.data.definingScope(scope, key) map { sc => display(ScopedKey(sc, key)) }
val definedIn = structure.data.definingScope(scope, key) map { sc => display.show(ScopedKey(sc, key)) }
val depends = cMap.get(scoped) match { case Some(c) => c.dependencies.toSet; case None => Set.empty }
// val related = cMap.keys.filter(k => k.key == key && k.scope != scope)
// val reverse = reverseDependencies(cMap, scoped)
SettingGraph(display(scoped), definedIn,
SettingGraph(display.show(scoped), definedIn,
Project.scopedKeyData(structure, scope, key),
key.description, basedir,
depends map { (x: ScopedKey[_]) => loop(x, generation + 1) })

View File

@ -12,9 +12,9 @@ object Dependencies {
// sbt modules
private val ioVersion = "1.0.0-M9"
private val utilVersion = "1.0.0-M18"
private val lmVersion = "1.0.0-X5"
private val zincVersion = "1.0.0-X8"
private val utilVersion = "1.0.0-M20"
private val lmVersion = "1.0.0-X7"
private val zincVersion = "1.0.0-X11"
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion

View File

@ -1 +0,0 @@
addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.3.0-M3")

View File

@ -8,3 +8,4 @@ addSbtPlugin("com.typesafe.sbt" % "sbt-javaversioncheck" % "0.1.0")
addSbtPlugin("org.scalariform" % "sbt-scalariform" % "1.6.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.8.2")
addSbtPlugin("me.lessis" % "bintray-sbt" % "0.3.0")
addSbtPlugin("org.scala-sbt" % "sbt-contraband" % "0.3.0-M4")

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -1,5 +1,5 @@
/**
* This code is generated using sbt-datatype.
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY

View File

@ -9,7 +9,7 @@ import sjsonnew.support.scalajson.unsafe.{ Parser, Converter, CompactPrinter }
import scala.json.ast.unsafe.{ JValue, JObject, JString }
import java.nio.ByteBuffer
import scala.util.{ Success, Failure }
import sbt.internal.util.ChannelLogEntry
import sbt.internal.util.StringEvent
object Serialization {
def serializeEvent[A: JsonFormat](event: A): Array[Byte] =
@ -59,9 +59,9 @@ object Serialization {
Parser.parseFromByteBuffer(buffer) match {
case Success(json) =>
detectType(json) match {
case Some("ChannelLogEntry") =>
case Some("StringEvent") =>
import sbt.internal.util.codec.JsonProtocol._
Converter.fromJson[ChannelLogEntry](json) match {
Converter.fromJson[StringEvent](json) match {
case Success(event) => Right(event)
case Failure(e) => Left(e.getMessage)
}

3
sbt-allsources.sh Executable file
View File

@ -0,0 +1,3 @@
#!/usr/bin/env bash
sbt -Dsbtio.path=../io -Dsbtutil.path=../util -Dsbtlm.path=../librarymanagement -Dsbtzinc.path=../zinc "$@"

View File

@ -43,6 +43,10 @@ trait Import {
type SetLevel = sbt.util.SetLevel
type SetSuccess = sbt.util.SetSuccess
type SetTrace = sbt.util.SetTrace
val Show = sbt.util.Show
type Show[T] = sbt.util.Show[T]
val ShowLines = sbt.util.ShowLines
type ShowLines[A] = sbt.util.ShowLines[A]
type Success = sbt.util.Success
type Trace = sbt.util.Trace
@ -141,10 +145,6 @@ trait Import {
val ScalaKeywords = sbt.internal.util.ScalaKeywords
type Settings[S] = sbt.internal.util.Settings[S]
type SharedAttributeKey[T] = sbt.internal.util.SharedAttributeKey[T]
val Show = sbt.internal.util.Show
type Show[T] = sbt.internal.util.Show[T]
val ShowLines = sbt.internal.util.ShowLines
type ShowLines[A] = sbt.internal.util.ShowLines[A]
val Signals = sbt.internal.util.Signals
val SimpleReader = sbt.internal.util.SimpleReader
type SimpleReader = sbt.internal.util.SimpleReader

View File

@ -1,8 +1,8 @@
name := "foo"
scalaVersion := "2.10.4"
scalaVersion := "2.10.6"
crossScalaVersions := List("2.10.4", "2.11.0")
crossScalaVersions := List("2.10.6", "2.11.8")
incOptions := incOptions.value.withClassfileManagerType(
xsbti.Maybe.just(new xsbti.compile.TransactionalManagerType(

View File

@ -1 +0,0 @@
incOptions := incOptions.value.withNameHashing(true)

View File

@ -5,8 +5,7 @@ val commonSettings = Seq(
scalaVersion := "2.11.4",
resolvers += Resolver.sonatypeRepo("snapshots"),
resolvers += Resolver.sonatypeRepo("releases"),
addCompilerPlugin("org.scalamacros" % "paradise" % paradiseVersion cross CrossVersion.full),
incOptions := incOptions.value.withNameHashing(true)
addCompilerPlugin("org.scalamacros" % "paradise" % paradiseVersion cross CrossVersion.full)
)
lazy val root = (project in file(".")).

View File

@ -1,7 +1,6 @@
val defaultSettings = Seq(
scalaVersion := "2.10.6",
libraryDependencies += scalaVersion("org.scala-lang" % "scala-reflect" % _ ).value //,
//incOptions := incOptions.value.withNameHashing(true)
libraryDependencies += scalaVersion("org.scala-lang" % "scala-reflect" % _ ).value
)
lazy val root = (project in file(".")).

View File

@ -1 +0,0 @@
incOptions := incOptions.value.withNameHashing(true)

View File

@ -1,6 +1,3 @@
# Marked as pending because name hashing doesn't support structural types
# in some cases. See: https://github.com/sbt/sbt/issues/1545
> compile
# modify A.scala so that it does not conform to the structural type in B.scala

View File

@ -14,7 +14,7 @@ TaskKey[Unit]("checkCompilations") := {
val allCompilations = analysis.compilations.allCompilations
val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c =>
val recompiledFiles = analysis.apis.internal.collect {
case (cn, api) if api.compilation.startTime == c.startTime => findFile(cn)
case (cn, api) if api.compilationTimestamp == c.startTime => findFile(cn)
}
recompiledFiles.toSet
}

View File

@ -22,7 +22,7 @@ TaskKey[Unit]("checkCompilations") := {
val allCompilations = analysis.compilations.allCompilations
val recompiledFiles: Seq[Set[java.io.File]] = allCompilations map { c =>
val recompiledFiles = analysis.apis.internal.collect {
case (cn, api) if api.compilation.startTime == c.startTime => findFile(cn)
case (cn, api) if api.compilationTimestamp == c.startTime => findFile(cn)
}
recompiledFiles.toSet
}

View File

@ -1,3 +1 @@
logLevel in compile := Level.Debug
incOptions := incOptions.value.withNameHashing(true)

View File

@ -4,7 +4,7 @@ lazy val OtherScala = config("other-scala").hide
configs(OtherScala)
libraryDependencies += "org.scala-lang" % "scala-compiler" % "2.11.1" % OtherScala.name
libraryDependencies += "org.scala-lang" % "scala-compiler" % "2.11.8" % OtherScala.name
managedClasspath in OtherScala := Classpaths.managedJars(OtherScala, classpathTypes.value, update.value)
@ -13,7 +13,7 @@ scalaInstance := {
val rawJars = (managedClasspath in OtherScala).value.map(_.data)
val scalaHome = (target.value / "scala-home")
def removeVersion(name: String): String =
name.replaceAll("\\-2.11.1", "")
name.replaceAll("\\-2.11.8", "")
for(jar <- rawJars) {
val tjar = scalaHome / s"lib/${removeVersion(jar.getName)}"
IO.copyFile(jar, tjar)
@ -27,6 +27,6 @@ libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test"
libraryDependencies += "com.typesafe.akka" %% "akka-actor" % "2.3.3" % "test"
scalaVersion := "2.11.0"
scalaVersion := "2.11.8"
ivyScala := ivyScala.value map (_.withOverrideScalaVersion(sbtPlugin.value))

View File

@ -23,7 +23,7 @@ import sbt.internal.io.DeferredWriter
import sbt.io.IO
import sbt.io.syntax._
import sbt.util.Logger
import sbt.internal.util.ManagedLogger
import sjsonnew.{ IsoString, SupportConverter }
import sbt.internal.util.{ CacheStoreFactory, DirectoryStoreFactory, Input, Output, PlainInput, PlainOutput }
@ -76,10 +76,10 @@ sealed trait TaskStreams[Key] {
// default logger
/** Obtains the default logger. */
final lazy val log: Logger = log(default)
final lazy val log: ManagedLogger = log(default)
/** Creates a Logger that logs to stream with ID `sid`.*/
def log(sid: String): Logger
def log(sid: String): ManagedLogger
private[this] def getID(s: Option[String]) = s getOrElse default
}
@ -120,7 +120,7 @@ object Streams {
synchronized { streams.values.foreach(_.close()); streams.clear() }
}
def apply[Key, J: IsoString](taskDirectory: Key => File, name: Key => String, mkLogger: (Key, PrintWriter) => Logger, converter: SupportConverter[J]): Streams[Key] = new Streams[Key] {
def apply[Key, J: IsoString](taskDirectory: Key => File, name: Key => String, mkLogger: (Key, PrintWriter) => ManagedLogger, converter: SupportConverter[J]): Streams[Key] = new Streams[Key] {
def apply(a: Key): ManagedStreams[Key] = new ManagedStreams[Key] {
private[this] var opened: List[Closeable] = Nil
@ -153,7 +153,7 @@ object Streams {
lazy val cacheStoreFactory: CacheStoreFactory =
new DirectoryStoreFactory(cacheDirectory, converter)
def log(sid: String): Logger = mkLogger(a, text(sid))
def log(sid: String): ManagedLogger = mkLogger(a, text(sid))
def make[T <: Closeable](a: Key, sid: String)(f: File => T): T = synchronized {
checkOpen()