From b656d599e17f4b681fb5440422a311d70e414f60 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Mon, 3 Aug 2020 15:46:03 -0700 Subject: [PATCH 1/9] Allow sbt to force flush of remote output In eb688c9ecdb942dbf0fa985c0103f6f95f6341c1, we started buffering output to the remote client to reduce flickering. This was causing problems with the output for the thin client in batch mode. With the delay, it was possible for the client to exit before all of its output had been displayed. Bonus: only display aggregation error message if terminal has success enabled (the thin client displays its own timing message so the message in aggregation ended up being a duplicate). --- .../scala/sbt/internal/util/Terminal.scala | 2 +- main/src/main/scala/sbt/MainLoop.scala | 4 +++ .../main/scala/sbt/internal/Aggregation.scala | 2 +- .../sbt/internal/server/NetworkChannel.scala | 29 ++++++++++--------- 4 files changed, 21 insertions(+), 16 deletions(-) diff --git a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala index 019e0bb46..52cad90de 100644 --- a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala +++ b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala @@ -163,7 +163,7 @@ trait Terminal extends AutoCloseable { if (lines.nonEmpty) lines.tail.foldLeft(lines.headOption.fold(0)(count))(_ + count(_)) else 0 } - + private[sbt] def flush(): Unit = printStream.flush() } object Terminal { diff --git a/main/src/main/scala/sbt/MainLoop.scala b/main/src/main/scala/sbt/MainLoop.scala index 09f26e96e..a2c4efd7d 100644 --- a/main/src/main/scala/sbt/MainLoop.scala +++ b/main/src/main/scala/sbt/MainLoop.scala @@ -202,6 +202,10 @@ object MainLoop { StandardMain.exchange.setExec(Some(exec)) StandardMain.exchange.unprompt(ConsoleUnpromptEvent(exec.source)) val newState = Command.process(exec.commandLine, progressState) + // Flush the terminal output after command evaluation to ensure that all output + // is displayed in the thin client before we report the command status. + val terminal = channelName.flatMap(exchange.channelForName(_).map(_.terminal)) + terminal.foreach(_.flush()) if (exec.execId.fold(true)(!_.startsWith(networkExecPrefix)) && !exec.commandLine.startsWith(networkExecPrefix)) { val doneEvent = ExecStatusEvent( diff --git a/main/src/main/scala/sbt/internal/Aggregation.scala b/main/src/main/scala/sbt/internal/Aggregation.scala index df7e43ee3..62b6fa7d9 100644 --- a/main/src/main/scala/sbt/internal/Aggregation.scala +++ b/main/src/main/scala/sbt/internal/Aggregation.scala @@ -131,7 +131,7 @@ object Aggregation { if (get(showSuccess)) { if (get(showTiming)) { val msg = timingString(start, stop, structure.data, currentRef) - if (success) log.success(msg) else log.error(msg) + if (success) log.success(msg) else if (Terminal.get.isSuccessEnabled) log.error(msg) } else if (success) log.success("") } diff --git a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala index 4aa3c1f81..ddb42df1c 100644 --- a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala +++ b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala @@ -653,6 +653,13 @@ final class NetworkChannel( import sjsonnew.BasicJsonProtocol._ import scala.collection.JavaConverters._ + private[this] val outputBuffer = new LinkedBlockingQueue[Byte] + private[this] val flushFuture = new AtomicReference[java.util.concurrent.Future[_]] + private[this] def doFlush()() = { + val list = new java.util.ArrayList[Byte] + outputBuffer.synchronized(outputBuffer.drainTo(list)) + if (!list.isEmpty) jsonRpcNotify(Serialization.systemOut, list.asScala.toSeq) + } private[this] lazy val outputStream: OutputStream with AutoCloseable = new OutputStream with AutoCloseable { /* @@ -670,28 +677,21 @@ final class NetworkChannel( Executors.newSingleThreadScheduledExecutor( r => new Thread(r, s"$name-output-buffer-timer-thread") ) - private[this] val buffer = new LinkedBlockingQueue[Byte] - private[this] val future = new AtomicReference[java.util.concurrent.Future[_]] - private[this] def doFlush()() = { - val list = new java.util.ArrayList[Byte] - buffer.synchronized(buffer.drainTo(list)) - if (!list.isEmpty) jsonRpcNotify(Serialization.systemOut, list.asScala.toSeq) - } override def close(): Unit = { Util.ignoreResult(executor.shutdownNow()) doFlush() } - override def write(b: Int): Unit = buffer.synchronized { - buffer.put(b.toByte) + override def write(b: Int): Unit = outputBuffer.synchronized { + outputBuffer.put(b.toByte) } override def flush(): Unit = { - future.get match { + flushFuture.get match { case null => try { - future.set( + flushFuture.set( executor.schedule( (() => { - future.set(null) + flushFuture.set(null) doFlush() }): Runnable, 20, @@ -702,8 +702,8 @@ final class NetworkChannel( case f => } } - override def write(b: Array[Byte]): Unit = buffer.synchronized { - b.foreach(buffer.put) + override def write(b: Array[Byte]): Unit = outputBuffer.synchronized { + b.foreach(outputBuffer.put) } override def write(b: Array[Byte], off: Int, len: Int): Unit = { write(java.util.Arrays.copyOfRange(b, off, off + len)) @@ -880,6 +880,7 @@ final class NetworkChannel( catch { case _: InterruptedException => } } + override def flush(): Unit = doFlush() override def toString: String = s"NetworkTerminal($name)" override def close(): Unit = if (closed.compareAndSet(false, true)) { val threads = blockedThreads.synchronized { From edf43a473b18f5c185475aa39d227eeb94e3ea8e Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Mon, 3 Aug 2020 16:50:31 -0700 Subject: [PATCH 2/9] Set complete flag in completions JLine 3 automatically appends a space character to the completion candidate unless you tell it not to by setting its 'complete' parameter. This behavior is generally nice because it will automatically complete something like 'foo/testO' to 'foo/testOnly ' which allows the user to start typing the testname without having to enter space. It does, however, break scripted completions because it will complete 'scripted wat' to 'scripted watch/ ' This commit updates the custom completer to append a " " to the initial completions and check if there are any additional completions available. If so, we set the complete flag to true and jline will append a space to the input when the user presses or . Otherwise the old jline2 behavior where no spaces are ever appended is preserved. --- .../main/scala/sbt/internal/util/LineReader.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala b/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala index 34526a948..52a36ca8d 100644 --- a/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala +++ b/internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala @@ -57,11 +57,14 @@ object LineReader { * `testOnly testOnly\ com.foo.FooSpec` instead of `testOnly com.foo.FooSpec`. */ if (c.append.nonEmpty) { - if (!pl.line().endsWith(" ")) { - candidates.add(new Candidate(pl.line().split(" ").last + c.append)) - } else { - candidates.add(new Candidate(c.append)) - } + val comp = + if (!pl.line().endsWith(" ")) pl.line().split(" ").last + c.append else c.append + // tell jline to append a " " if the completion would be valid with a " " appended + // which can be the case for input tasks and some commands. We need to exclude + // the empty string and ";" which always seem to be present. + val complete = (Parser.completions(parser, comp + " ", 10).get.map(_.display) -- + Set(";", "")).nonEmpty + candidates.add(new Candidate(comp, comp, null, null, null, null, complete)) } } } From 775cdd598af2d6f244e450855773de79b95f7e30 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Tue, 4 Aug 2020 11:53:19 -0700 Subject: [PATCH 3/9] Catch IOExceptions in consoleLog A ClosedChannelException was thrown here during CI. --- .../src/main/scala/sbt/internal/util/Terminal.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala index 019e0bb46..6b3c135db 100644 --- a/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala +++ b/internal/util-logging/src/main/scala/sbt/internal/util/Terminal.scala @@ -7,7 +7,7 @@ package sbt.internal.util -import java.io.{ InputStream, InterruptedIOException, OutputStream, PrintStream } +import java.io.{ InputStream, InterruptedIOException, IOException, OutputStream, PrintStream } import java.nio.channels.ClosedChannelException import java.util.{ Arrays, Locale } import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference } @@ -171,7 +171,8 @@ object Terminal { if (System.getProperty("sbt.jline.verbose", "false") != "true") jline.internal.Log.setOutput(new PrintStream(_ => {}, false)) def consoleLog(string: String): Unit = { - Terminal.console.printStream.println(s"[info] $string") + try Terminal.console.printStream.println(s"[info] $string") + catch { case _: IOException => } } private[sbt] def set(terminal: Terminal) = { activeTerminal.set(terminal) From 284ed4de5f3010cd76e41d4ada906109dc078efb Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Wed, 29 Jul 2020 09:05:30 -0700 Subject: [PATCH 4/9] Apply miscellaneous whitespace changes The EventsTest changes kept appearing. I'm not sure why scalafmt check was allowing it before. My vim status bar warns me about trailing spaces and I noticed the two in Keys.scala and removed them. --- main/src/main/scala/sbt/Keys.scala | 4 ++-- server-test/src/test/scala/testpkg/EventsTest.scala | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index 12a28d56c..8b41aa7a0 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -387,7 +387,7 @@ object Keys { val csrExtraCredentials = taskKey[Seq[lmcoursier.credentials.Credentials]]("") val csrPublications = taskKey[Seq[(lmcoursier.definitions.Configuration, lmcoursier.definitions.Publication)]]("") val csrReconciliations = settingKey[Seq[(ModuleMatchers, Reconciliation)]]("Strategy to reconcile version conflicts.") - + val internalConfigurationMap = settingKey[Configuration => Configuration]("Maps configurations to the actual configuration used to define the classpath.").withRank(CSetting) val classpathConfiguration = taskKey[Configuration]("The configuration used to define the classpath.").withRank(CTask) val ivyConfiguration = taskKey[IvyConfiguration]("General dependency management (Ivy) settings, such as the resolvers and paths to use.").withRank(DTask) @@ -457,7 +457,7 @@ object Keys { val fullResolvers = taskKey[Seq[Resolver]]("Combines the project resolver, default resolvers, and user-defined resolvers.").withRank(CTask) val otherResolvers = taskKey[Seq[Resolver]]("Resolvers not included in the main resolver chain, such as those in module configurations.").withRank(CSetting) val scalaCompilerBridgeResolvers = taskKey[Seq[Resolver]]("Resolvers used to resolve compiler bridges.").withRank(CSetting) - val includePluginResolvers = settingKey[Boolean]("Include the resolvers from the metabuild.").withRank(CSetting) + val includePluginResolvers = settingKey[Boolean]("Include the resolvers from the metabuild.").withRank(CSetting) val useJCenter = settingKey[Boolean]("Use JCenter as the default repository.").withRank(CSetting) val moduleConfigurations = settingKey[Seq[ModuleConfiguration]]("Defines module configurations, which override resolvers on a per-module basis.").withRank(BMinusSetting) val retrievePattern = settingKey[String]("Pattern used to retrieve managed dependencies to the current build.").withRank(DSetting) diff --git a/server-test/src/test/scala/testpkg/EventsTest.scala b/server-test/src/test/scala/testpkg/EventsTest.scala index 9e5f30424..e3ae360b4 100644 --- a/server-test/src/test/scala/testpkg/EventsTest.scala +++ b/server-test/src/test/scala/testpkg/EventsTest.scala @@ -66,7 +66,7 @@ object EventsTest extends AbstractServerTest { }) } -/* This test is timing out. + /* This test is timing out. test("cancel on-going task with string id") { _ => import sbt.Exec val id = Exec.newExecId @@ -84,5 +84,5 @@ object EventsTest extends AbstractServerTest { s contains """"result":{"status":"Task cancelled"""" }) } -*/ + */ } From eb48f24f3a097cdc5365129a9a9950d778dd5693 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Sun, 2 Aug 2020 09:35:02 -0700 Subject: [PATCH 5/9] Make watch implementation more sbt idiomatic The 1.4.0 implementation of watch uses a concurrent hash map to maintain the global watch state which manages the state for an arbitrary number of clients. Using a mutable map is not idiomatic sbt and I found it difficult to reason about when the map was updated. This commit reworks the feature so that the global state is instead stored in an immutable map that is only modified during the internal watch commands, which is easier to reason about. --- main/src/main/scala/sbt/Main.scala | 31 +++-- .../scala/sbt/internal/CommandExchange.scala | 15 +- .../main/scala/sbt/internal/Continuous.scala | 129 +++++++++++------- .../sbt/internal/server/NetworkChannel.scala | 5 +- .../sbt-test/watch/legacy-sources/build.sbt | 14 +- .../watch/legacy-sources/project/Build.scala | 17 --- 6 files changed, 113 insertions(+), 98 deletions(-) delete mode 100644 sbt/src/sbt-test/watch/legacy-sources/project/Build.scala diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index 763d6cef6..9184670bf 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -971,22 +971,23 @@ object BuiltinCommands { } private[sbt] def waitCmd: Command = - Command.arb(_ => (ContinuousCommands.waitWatch: Parser[String]).examples()) { (s0, _) => + Command.arb( + _ => ContinuousCommands.waitWatch.examples() ~> " ".examples() ~> matched(any.*).examples() + ) { (s0, channel) => val exchange = StandardMain.exchange - if (exchange.channels.exists(ContinuousCommands.isInWatch)) { - val s1 = exchange.run(s0) - exchange.channels.foreach { - case c if ContinuousCommands.isPending(c) => - case c => c.prompt(ConsolePromptEvent(s1)) - } - val exec: Exec = getExec(s1, Duration.Inf) - val remaining: List[Exec] = - Exec(ContinuousCommands.waitWatch, None) :: - Exec(FailureWall, None) :: s1.remainingCommands - val newState = s1.copy(remainingCommands = exec +: remaining) - if (exec.commandLine.trim.isEmpty) newState - else newState.clearGlobalLog - } else s0 + exchange.channelForName(channel) match { + case Some(c) if ContinuousCommands.isInWatch(s0, c) => + c.prompt(ConsolePromptEvent(s0)) + val s1 = exchange.run(s0) + val exec: Exec = getExec(s1, Duration.Inf) + val remaining: List[Exec] = + Exec(s"${ContinuousCommands.waitWatch} $channel", None) :: + Exec(FailureWall, None) :: s1.remainingCommands + val newState = s1.copy(remainingCommands = exec +: remaining) + if (exec.commandLine.trim.isEmpty) newState + else newState.clearGlobalLog + case _ => s0 + } } private[sbt] def promptChannel = Command.arb(_ => reportParser(PromptChannel)) { diff --git a/main/src/main/scala/sbt/internal/CommandExchange.scala b/main/src/main/scala/sbt/internal/CommandExchange.scala index d4c6bbf27..3a5eaea79 100644 --- a/main/src/main/scala/sbt/internal/CommandExchange.scala +++ b/main/src/main/scala/sbt/internal/CommandExchange.scala @@ -170,17 +170,15 @@ private[sbt] final class CommandExchange { currentExec.filter(_.source.map(_.channelName) == Some(c.name)).foreach { e => Util.ignoreResult(NetworkChannel.cancel(e.execId, e.execId.getOrElse("0"))) } - if (ContinuousCommands.isInWatch(c)) { - try commandQueue.put(Exec(s"${ContinuousCommands.stopWatch} ${c.name}", None)) - catch { case _: InterruptedException => } - } + try commandQueue.put(Exec(s"${ContinuousCommands.stopWatch} ${c.name}", None)) + catch { case _: InterruptedException => } } private[this] def mkAskUser( name: String, ): (State, CommandChannel) => UITask = { (state, channel) => ContinuousCommands - .watchUITaskFor(channel) + .watchUITaskFor(state, channel) .getOrElse(new UITask.AskUserTask(state, channel)) } @@ -353,8 +351,8 @@ private[sbt] final class CommandExchange { def prompt(event: ConsolePromptEvent): Unit = { currentExecRef.set(null) channels.foreach { - case c if ContinuousCommands.isInWatch(c) => - case c => c.prompt(event) + case c if ContinuousCommands.isInWatch(lastState.get, c) => + case c => c.prompt(event) } } def unprompt(event: ConsoleUnpromptEvent): Unit = channels.foreach(_.unprompt(event)) @@ -459,10 +457,9 @@ private[sbt] final class CommandExchange { Option(currentExecRef.get).foreach(cancel) mt.channel.prompt(ConsolePromptEvent(lastState.get)) case t if t.startsWith(ContinuousCommands.stopWatch) => - ContinuousCommands.stopWatchImpl(mt.channel.name) mt.channel match { case c: NetworkChannel if !c.isInteractive => exit(mt) - case _ => mt.channel.prompt(ConsolePromptEvent(lastState.get)) + case _ => } commandQueue.add(Exec(t, None, None)) case `TerminateAction` => exit(mt) diff --git a/main/src/main/scala/sbt/internal/Continuous.scala b/main/src/main/scala/sbt/internal/Continuous.scala index 86618ceb2..557fa3300 100644 --- a/main/src/main/scala/sbt/internal/Continuous.scala +++ b/main/src/main/scala/sbt/internal/Continuous.scala @@ -108,8 +108,8 @@ private[sbt] object Continuous extends DeprecatedContinuous { case Some(c) => s -> c case None => StandardMain.exchange.run(s) -> ConsoleChannel.defaultName } - ContinuousCommands.setupWatchState(channel, initialCount, commands, s1) - s"${ContinuousCommands.runWatch} $channel" :: s1 + val ws = ContinuousCommands.setupWatchState(channel, initialCount, commands, s1) + s"${ContinuousCommands.runWatch} $channel" :: ws } @deprecated("The input task version of watch is no longer available", "1.4.0") @@ -1056,7 +1056,7 @@ private[sbt] object Continuous extends DeprecatedContinuous { val commands: Seq[String], beforeCommandImpl: (State, mutable.Set[DynamicInput]) => State, val afterCommand: State => State, - val afterWatch: () => Unit, + val afterWatch: State => State, val callbacks: Callbacks, val dynamicInputs: mutable.Set[DynamicInput], val pending: Boolean, @@ -1102,7 +1102,8 @@ private[sbt] object ContinuousCommands { "", Int.MaxValue ) - private[this] val watchStates = new ConcurrentHashMap[String, ContinuousState] + private[this] val watchStates = + AttributeKey[Map[String, ContinuousState]]("sbt-watch-states", Int.MaxValue) private[sbt] val runWatch = networkExecPrefix + "runWatch" private[sbt] val preWatch = networkExecPrefix + "preWatch" private[sbt] val postWatch = networkExecPrefix + "postWatch" @@ -1120,10 +1121,10 @@ private[sbt] object ContinuousCommands { "", Int.MaxValue ) - private[sbt] val setupWatchState: (String, Int, Seq[String], State) => Unit = + private[sbt] val setupWatchState: (String, Int, Seq[String], State) => State = (channelName, count, commands, state) => { - watchStates.get(channelName) match { - case null => + state.get(watchStates).flatMap(_.get(channelName)) match { + case None => val extracted = Project.extract(state) val repo = state.get(globalFileTreeRepository) match { case Some(r) => localRepo(r) @@ -1161,27 +1162,37 @@ private[sbt] object ContinuousCommands { stateWithCache.put(Continuous.DynamicInputs, dynamicInputs) }, afterCommand = state => { - watchStates.get(channelName) match { - case null => - case ws => watchStates.put(channelName, ws.incremented) + val newWatchState = state.get(watchStates) match { + case None => state + case Some(ws) => + ws.get(channelName) match { + case None => state + case Some(cs) => state.put(watchStates, ws + (channelName -> cs.incremented)) + } } - val restoredState = state.get(stashedRepo) match { + val restoredState = newWatchState.get(stashedRepo) match { case None => throw new IllegalStateException(s"No stashed repository for $state") - case Some(r) => state.put(globalFileTreeRepository, r) + case Some(r) => newWatchState.put(globalFileTreeRepository, r) } restoredState.remove(persistentFileStampCache).remove(Continuous.DynamicInputs) }, - afterWatch = () => { - watchStates.remove(channelName) + afterWatch = state => { LogExchange.unbindLoggerAppenders(channelName + "-watch") repo.close() + state.get(watchStates) match { + case None => state + case Some(ws) => state.put(watchStates, ws - channelName) + } }, callbacks = cb, dynamicInputs = dynamicInputs, pending = false, ) - Util.ignoreResult(watchStates.put(channelName, s)) - case cs => + state.get(watchStates) match { + case None => state.put(watchStates, Map(channelName -> s)) + case Some(ws) => state.put(watchStates, ws + (channelName -> s)) + } + case Some(cs) => val cmd = cs.commands.mkString("; ") val msg = s"Tried to start new watch while channel, '$channelName', was already watching '$cmd'" @@ -1194,28 +1205,26 @@ private[sbt] object ContinuousCommands { Command.arb { state => (cmdParser(name) ~> channelParser).map(channel => () => updateState(channel, state)) } { case (_, newState) => newState() } - private[this] val runWatchCommand = watchCommand(runWatch) { (channel, state) => - watchStates.get(channel) match { - case null => state - case cs => + private[sbt] val runWatchCommand = watchCommand(runWatch) { (channel, state) => + state.get(watchStates).flatMap(_.get(channel)) match { + case None => state + case Some(cs) => val pre = StashOnFailure :: s"$SetTerminal $channel" :: s"$preWatch $channel" :: Nil val post = FailureWall :: PopOnFailure :: s"$SetTerminal ${ConsoleChannel.defaultName}" :: - s"$postWatch $channel" :: waitWatch :: Nil + s"$postWatch $channel" :: s"$waitWatch $channel" :: Nil pre ::: cs.commands.toList ::: post ::: state } } - private[sbt] def watchUITaskFor(channel: CommandChannel): Option[UITask] = - watchStates.get(channel.name) match { - case null => None - case cs => Some(new WatchUITask(channel, cs)) - } - private[sbt] def isInWatch(channel: CommandChannel): Boolean = - watchStates.get(channel.name) != null - private[sbt] def isPending(channel: CommandChannel): Boolean = - Option(watchStates.get(channel.name)).fold(false)(_.pending) + private[sbt] def watchUITaskFor(state: State, channel: CommandChannel): Option[UITask] = + state.get(watchStates).flatMap(_.get(channel.name)).map(new WatchUITask(channel, _, state)) + private[sbt] def isInWatch(state: State, channel: CommandChannel): Boolean = + state.get(watchStates).exists(_.contains(channel.name)) + private[sbt] def isPending(state: State, channel: CommandChannel): Boolean = + state.get(watchStates).exists(_.get(channel.name).exists(_.pending)) private[this] class WatchUITask( override private[sbt] val channel: CommandChannel, cs: ContinuousState, + state: State ) extends Thread(s"sbt-${channel.name}-watch-ui-thread") with UITask { override private[sbt] def reader: UITask.Reader = () => { @@ -1229,8 +1238,12 @@ private[sbt] object ContinuousCommands { recursive = false ) } - val ws = watchState(channel.name) - watchStates.put(channel.name, ws.withPending(true)) + val ws = state.get(watchStates) match { + case None => throw new IllegalStateException("no watch states") + case Some(ws) => + ws.get(channel.name) + .getOrElse(throw new IllegalStateException(s"no watch state for ${channel.name}")) + } exitAction match { // Use a Left so that the client can immediately exit watch via case Watch.CancelWatch => Left(s"$stopWatch ${channel.name}") @@ -1248,30 +1261,40 @@ private[sbt] object ContinuousCommands { } } @inline - private[this] def watchState(channel: String): ContinuousState = watchStates.get(channel) match { - case null => throw new IllegalStateException(s"No watch state for $channel") - case s => s - } + private[this] def watchState(state: State, channel: String): ContinuousState = + state.get(watchStates).flatMap(_.get(channel)) match { + case None => throw new IllegalStateException(s"no watch state for $channel") + case Some(s) => s + } - private[this] val preWatchCommand = watchCommand(preWatch) { (channel, state) => - StandardMain.exchange.channelForName(channel).foreach(_.terminal.setPrompt(Prompt.Watch)) - watchState(channel).beforeCommand(state) + private[sbt] val preWatchCommand = watchCommand(preWatch) { (channel, state) => + watchState(state, channel).beforeCommand(state) } - private[this] val postWatchCommand = watchCommand(postWatch) { (channel, state) => - StandardMain.exchange.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channel)))) - val ws = watchState(channel) - watchStates.put(channel, ws.withPending(false)) - ws.afterCommand(state) + private[sbt] val postWatchCommand = watchCommand(postWatch) { (channel, state) => + val cs = watchState(state, channel) + StandardMain.exchange.channelForName(channel).foreach { c => + c.terminal.setPrompt(Prompt.Watch) + c.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channel)))) + } + val postState = state.get(watchStates) match { + case None => state + case Some(ws) => state.put(watchStates, ws + (channel -> cs.withPending(false))) + } + cs.afterCommand(postState) } - private[this] val stopWatchCommand = watchCommand(stopWatch) { (channel, state) => - stopWatchImpl(channel) - state - } - private[sbt] def stopWatchImpl(channelName: String): Unit = { - StandardMain.exchange.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channelName)))) - Option(watchStates.get(channelName)).foreach { ws => - ws.afterWatch() - ws.callbacks.onExit() + private[sbt] val stopWatchCommand = watchCommand(stopWatch) { (channel, state) => + state.get(watchStates).flatMap(_.get(channel)) match { + case Some(cs) => + val afterWatchState = cs.afterWatch(state) + cs.callbacks.onExit() + StandardMain.exchange + .channelForName(channel) + .foreach(_.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channel))))) + afterWatchState.get(watchStates) match { + case None => afterWatchState + case Some(w) => afterWatchState.put(watchStates, w - channel) + } + case _ => state } } private[this] val failWatchCommand = watchCommand(failWatch) { (channel, state) => diff --git a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala index 4aa3c1f81..9d0dec068 100644 --- a/main/src/main/scala/sbt/internal/server/NetworkChannel.scala +++ b/main/src/main/scala/sbt/internal/server/NetworkChannel.scala @@ -149,7 +149,7 @@ final class NetworkChannel( protected def authOptions: Set[ServerAuthentication] = auth override def mkUIThread: (State, CommandChannel) => UITask = (state, command) => { - if (interactive.get || ContinuousCommands.isInWatch(this)) mkUIThreadImpl(state, command) + if (interactive.get || ContinuousCommands.isInWatch(state, this)) mkUIThreadImpl(state, command) else new UITask { override private[sbt] def channel = NetworkChannel.this @@ -789,7 +789,8 @@ final class NetworkChannel( override def isAnsiSupported: Boolean = getProperty(_.isAnsiSupported, false).getOrElse(false) override def isEchoEnabled: Boolean = waitForPending(_.isEchoEnabled) override def isSuccessEnabled: Boolean = - prompt != Prompt.Batch || ContinuousCommands.isInWatch(NetworkChannel.this) + prompt != Prompt.Batch || + StandardMain.exchange.withState(ContinuousCommands.isInWatch(_, NetworkChannel.this)) override lazy val isColorEnabled: Boolean = waitForPending(_.isColorEnabled) override lazy val isSupershellEnabled: Boolean = waitForPending(_.isSupershellEnabled) getProperties(false) diff --git a/sbt/src/sbt-test/watch/legacy-sources/build.sbt b/sbt/src/sbt-test/watch/legacy-sources/build.sbt index 3ee3097d9..9c20c4496 100644 --- a/sbt/src/sbt-test/watch/legacy-sources/build.sbt +++ b/sbt/src/sbt-test/watch/legacy-sources/build.sbt @@ -1,7 +1,17 @@ -import sbt.legacy.sources.Build._ - Global / watchSources += new sbt.internal.io.Source(baseDirectory.value, "global.txt", NothingFilter, false) +val setStringValue = inputKey[Unit]("set a global string to a value") +val checkStringValue = inputKey[Unit]("check the value of a global") + +def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask { + val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim) + IO.write(file(stringFile), string) +} +def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask { + val Seq(stringFile, string) = Def.spaceDelimited().parsed + assert(IO.read(file(stringFile)) == string) +} + watchSources in setStringValue += new sbt.internal.io.Source(baseDirectory.value, "foo.txt", NothingFilter, false) setStringValue := setStringValueImpl.evaluated diff --git a/sbt/src/sbt-test/watch/legacy-sources/project/Build.scala b/sbt/src/sbt-test/watch/legacy-sources/project/Build.scala deleted file mode 100644 index 17643092a..000000000 --- a/sbt/src/sbt-test/watch/legacy-sources/project/Build.scala +++ /dev/null @@ -1,17 +0,0 @@ -package sbt.legacy.sources - -import sbt._ -import Keys._ - -object Build { - val setStringValue = inputKey[Unit]("set a global string to a value") - val checkStringValue = inputKey[Unit]("check the value of a global") - def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask { - val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim) - IO.write(file(stringFile), string) - } - def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask { - val Seq(stringFile, string) = Def.spaceDelimited().parsed - assert(IO.read(file(stringFile)) == string) - } -} \ No newline at end of file From caccba711206c0ef773bd67a63a96d24a7d49a8b Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Mon, 27 Jul 2020 10:59:57 -0700 Subject: [PATCH 6/9] Add fast path for parsing commands It can easily take 2ms or more to parse a command depending on state's combined parser. There are some commands that sbt requires to work that we can handle in microseconds instead of milliseconds by special casing them. After this change, I saw the performance of https://github.com/eatkins/scala-build-watch-performance improve by a consistent 4-5ms in the 3 source file example which was a drop from 120ms to 115ms. While not necessarily earth shattering, this difference could theoretically be much worse in other projects that have a lot of plugins and custom tasks/commands. I think it's worth the modest maintenance cost. --- main/src/main/scala/sbt/MainLoop.scala | 10 +++- .../main/scala/sbt/internal/Continuous.scala | 2 +- .../sbt/internal/FastTrackCommands.scala | 53 +++++++++++++++++++ 3 files changed, 63 insertions(+), 2 deletions(-) create mode 100644 main/src/main/scala/sbt/internal/FastTrackCommands.scala diff --git a/main/src/main/scala/sbt/MainLoop.scala b/main/src/main/scala/sbt/MainLoop.scala index a2c4efd7d..99256ed79 100644 --- a/main/src/main/scala/sbt/MainLoop.scala +++ b/main/src/main/scala/sbt/MainLoop.scala @@ -23,6 +23,7 @@ import sbt.util.Logger import scala.annotation.tailrec import scala.util.control.NonFatal +import sbt.internal.FastTrackCommands object MainLoop { @@ -201,7 +202,14 @@ object MainLoop { StandardMain.exchange.setState(progressState) StandardMain.exchange.setExec(Some(exec)) StandardMain.exchange.unprompt(ConsoleUnpromptEvent(exec.source)) - val newState = Command.process(exec.commandLine, progressState) + /* + * FastTrackCommands.evaluate can be significantly faster than Command.process because + * it avoids an expensive parsing step for internal commands that are easy to parse. + * Dropping (FastTrackCommands.evaluate ... getOrElse) should be functionally identical + * but slower. + */ + val newState = FastTrackCommands.evaluate(progressState, exec.commandLine) getOrElse + Command.process(exec.commandLine, progressState) // Flush the terminal output after command evaluation to ensure that all output // is displayed in the thin client before we report the command status. val terminal = channelName.flatMap(exchange.channelForName(_).map(_.terminal)) diff --git a/main/src/main/scala/sbt/internal/Continuous.scala b/main/src/main/scala/sbt/internal/Continuous.scala index 557fa3300..cd6682433 100644 --- a/main/src/main/scala/sbt/internal/Continuous.scala +++ b/main/src/main/scala/sbt/internal/Continuous.scala @@ -1297,7 +1297,7 @@ private[sbt] object ContinuousCommands { case _ => state } } - private[this] val failWatchCommand = watchCommand(failWatch) { (channel, state) => + private[sbt] val failWatchCommand = watchCommand(failWatch) { (channel, state) => state.fail } /* diff --git a/main/src/main/scala/sbt/internal/FastTrackCommands.scala b/main/src/main/scala/sbt/internal/FastTrackCommands.scala new file mode 100644 index 000000000..8c53f81de --- /dev/null +++ b/main/src/main/scala/sbt/internal/FastTrackCommands.scala @@ -0,0 +1,53 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal + +import BasicCommandStrings._ +import BasicCommands._ +import BuiltinCommands.{ setTerminalCommand, shell, waitCmd } +import ContinuousCommands._ + +import sbt.internal.util.complete.Parser + +/** This is used to speed up command parsing. */ +private[sbt] object FastTrackCommands { + private def fromCommand( + cmd: String, + command: Command, + arguments: Boolean = true, + ): (State, String) => Option[State] = + (s, c) => + Parser.parse(if (arguments) c else "", command.parser(s)) match { + case Right(newState) => Some(newState()) + case l => None + } + private val commands = Map[String, (State, String) => Option[State]]( + FailureWall -> { case (s, c) => if (c == FailureWall) Some(s) else None }, + StashOnFailure -> fromCommand(StashOnFailure, stashOnFailure, arguments = false), + PopOnFailure -> fromCommand(PopOnFailure, popOnFailure, arguments = false), + Shell -> fromCommand(Shell, shell), + SetTerminal -> fromCommand(SetTerminal, setTerminalCommand), + failWatch -> fromCommand(failWatch, failWatchCommand), + preWatch -> fromCommand(preWatch, preWatchCommand), + postWatch -> fromCommand(postWatch, postWatchCommand), + runWatch -> fromCommand(runWatch, runWatchCommand), + stopWatch -> fromCommand(stopWatch, stopWatchCommand), + waitWatch -> fromCommand(waitWatch, waitCmd), + ) + private[sbt] def evaluate(state: State, cmd: String): Option[State] = { + cmd.trim.split(" ") match { + case Array(h, _*) => + commands.get(h) match { + case Some(command) => command(state, cmd) + case _ => None + } + case _ => None + } + } +} From 5e2fe77434299a061b3ea31de9e65aaae62e08a9 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Tue, 4 Aug 2020 13:34:20 -0700 Subject: [PATCH 7/9] Disable server tests on windows ci The server tests fail often which makes CI very painful. --- .appveyor.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.appveyor.yml b/.appveyor.yml index 0c3f595a1..bad54a126 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -155,4 +155,6 @@ for: - '%USERPROFILE%\.sbt' test_script: - - sbt "scripted actions/* classloader-cache/* nio/* watch/*" "serverTestProj/test" + # The server tests often fail in CI when run together so just run a single test to ensure + # that the thin client works on windows + - sbt "scripted actions/* classloader-cache/* nio/* watch/*" "serverTestProj/testOnly testpkg.ClientTest" From 3a9d3490657dd7db33cb62f916d0ebef4d7cb549 Mon Sep 17 00:00:00 2001 From: Ethan Atkins Date: Tue, 4 Aug 2020 17:08:30 -0700 Subject: [PATCH 8/9] Check server test project scalafmt on travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 31d026e20..dcf88b527 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,7 +11,7 @@ env: # WHITESOURCE_PASSWORD= - secure: d3bu2KNwsVHwfhbGgO+gmRfDKBJhfICdCJFGWKf2w3Gv86AJZX9nuTYRxz0KtdvEHO5Xw8WTBZLPb2thSJqhw9OCm4J8TBAVqCP0ruUj4+aqBUFy4bVexQ6WKE6nWHs4JPzPk8c6uC1LG3hMuzlC8RGETXtL/n81Ef1u7NjyXjs= matrix: - - SBT_CMD="mimaReportBinaryIssues ; javafmtCheck ; Test / javafmtCheck; scalafmtCheckAll ; scalafmtSbtCheck; headerCheck ;test:headerCheck ;whitesourceOnPush ;test:compile; publishLocal; test; serverTestProj/test; doc; $UTIL_TESTS; ++$SCALA_213; $UTIL_TESTS" + - SBT_CMD="mimaReportBinaryIssues ; javafmtCheck ; Test / javafmtCheck; scalafmtCheckAll ; scalafmtSbtCheck; serverTestProj/scalafmtCheckAll; headerCheck ;test:headerCheck ;whitesourceOnPush ;test:compile; publishLocal; test; serverTestProj/test; doc; $UTIL_TESTS; ++$SCALA_213; $UTIL_TESTS" - SBT_CMD="scripted actions/* apiinfo/* compiler-project/* ivy-deps-management/* reporter/* tests/* watch/* classloader-cache/* package/*" - SBT_CMD="scripted dependency-management/* plugins/* project-load/* java/* run/* nio/*" - SBT_CMD="repoOverrideTest:scripted dependency-management/*; scripted source-dependencies/* project/*" From 002f97cae75c86ad6aced256d43034791b7b0a99 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Sun, 26 Jul 2020 20:46:43 -0400 Subject: [PATCH 9/9] Build pipelining Ref https://github.com/sbt/zinc/pull/744 This implements `ThisBuild / usePipelining`, which configures subproject pipelining available from Zinc 1.4.0. The basic idea is to start subproject compilation as soon as pickle JARs (early output) becomes available. This is in part enabled by Scala compiler's new flags `-Ypickle-java` and `-Ypickle-write`. The other part of magic is the use of `Def.promise`: ``` earlyOutputPing := Def.promise[Boolean], ``` This notifies `compileEarly` task, which to the rest of the tasks would look like a normal task but in fact it is promise-blocked. In other words, without calling full `compile` task together, `compileEarly` will never return, forever waiting for the `earlyOutputPing`. --- build.sbt | 13 +- .../src/main/scala/sbt/RawCompileLike.scala | 8 +- main-settings/src/main/scala/sbt/Def.scala | 2 + .../src/main/scala/sbt/PromiseWrap.scala | 1 + main/src/main/scala/sbt/Defaults.scala | 583 ++++++++---------- main/src/main/scala/sbt/Keys.scala | 22 +- .../scala/sbt/internal/ClasspathImpl.scala | 427 +++++++++++++ .../src/main/scala/sbt/internal/SysProp.scala | 1 + .../sbt/internal/VirtualFileValueCache.scala | 69 +++ project/Dependencies.scala | 2 +- .../pipelining-java/build.sbt | 13 + .../pipelining-java/changes/Break.java | 2 + .../pipelining-java/dep/A.java | 3 + .../source-dependencies/pipelining-java/test | 5 + .../pipelining-java/use/B.java | 3 + .../source-dependencies/pipelining/build.sbt | 22 + .../pipelining/changes/Break.scala | 3 + .../pipelining/dep/A.scala | 5 + .../source-dependencies/pipelining/test | 9 + .../pipelining/use/B.scala | 5 + .../src/test/scala/testpkg/ClientTest.scala | 6 + 21 files changed, 873 insertions(+), 331 deletions(-) create mode 100644 main/src/main/scala/sbt/internal/ClasspathImpl.scala create mode 100644 main/src/main/scala/sbt/internal/VirtualFileValueCache.scala create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining-java/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining-java/changes/Break.java create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining-java/dep/A.java create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining-java/test create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining-java/use/B.java create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining/build.sbt create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining/changes/Break.scala create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining/dep/A.scala create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining/test create mode 100644 sbt/src/sbt-test/source-dependencies/pipelining/use/B.scala diff --git a/build.sbt b/build.sbt index 511f7a153..5572d44ab 100644 --- a/build.sbt +++ b/build.sbt @@ -973,7 +973,18 @@ lazy val mainProj = (project in file("main")) // the binary compatible version. exclude[IncompatibleMethTypeProblem]("sbt.internal.server.NetworkChannel.this"), exclude[IncompatibleSignatureProblem]("sbt.internal.DeprecatedContinuous.taskDefinitions"), - exclude[MissingClassProblem]("sbt.internal.SettingsGraph*") + exclude[MissingClassProblem]("sbt.internal.SettingsGraph*"), + // Tasks include non-Files, but it's ok + exclude[IncompatibleSignatureProblem]("sbt.Defaults.outputConfigPaths"), + // private[sbt] + exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedProducts"), + exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedJarProducts"), + exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedDependencies0"), + exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependenciesImplTask"), + exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependencyJarsImplTask"), + exclude[DirectMissingMethodProblem]("sbt.Classpaths.interDependencies"), + exclude[DirectMissingMethodProblem]("sbt.Classpaths.productsTask"), + exclude[DirectMissingMethodProblem]("sbt.Classpaths.jarProductsTask"), ) ) .configure( diff --git a/main-actions/src/main/scala/sbt/RawCompileLike.scala b/main-actions/src/main/scala/sbt/RawCompileLike.scala index b2cf1fb27..7ac1a12cc 100644 --- a/main-actions/src/main/scala/sbt/RawCompileLike.scala +++ b/main-actions/src/main/scala/sbt/RawCompileLike.scala @@ -11,7 +11,7 @@ import scala.annotation.tailrec import java.io.File import sbt.io.syntax._ import sbt.io.IO -import sbt.internal.inc.{ PlainVirtualFile, RawCompiler, ScalaInstance } +import sbt.internal.inc.{ RawCompiler, ScalaInstance } import sbt.internal.util.Types.:+: import sbt.internal.util.HListFormats._ import sbt.internal.util.HNil @@ -88,11 +88,7 @@ object RawCompileLike { def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen = (sources, classpath, outputDirectory, options, _, log) => { val compiler = new RawCompiler(instance, cpOptions, log) - compiler(sources map { x => - PlainVirtualFile(x.toPath) - }, classpath map { x => - PlainVirtualFile(x.toPath) - }, outputDirectory.toPath, options) + compiler(sources.map(_.toPath), classpath.map(_.toPath), outputDirectory.toPath, options) } def compile( diff --git a/main-settings/src/main/scala/sbt/Def.scala b/main-settings/src/main/scala/sbt/Def.scala index 2f6833fb8..2467f6b6c 100644 --- a/main-settings/src/main/scala/sbt/Def.scala +++ b/main-settings/src/main/scala/sbt/Def.scala @@ -18,10 +18,12 @@ import sbt.internal.util.complete.Parser import sbt.internal.util._ import Util._ import sbt.util.Show +import xsbti.VirtualFile /** A concrete settings system that uses `sbt.Scope` for the scope type. */ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits { type Classpath = Seq[Attributed[File]] + type VirtualClasspath = Seq[Attributed[VirtualFile]] def settings(ss: SettingsDefinition*): Seq[Setting[_]] = ss.flatMap(_.settings) diff --git a/main-settings/src/main/scala/sbt/PromiseWrap.scala b/main-settings/src/main/scala/sbt/PromiseWrap.scala index 2706ac5bc..ec9a50817 100644 --- a/main-settings/src/main/scala/sbt/PromiseWrap.scala +++ b/main-settings/src/main/scala/sbt/PromiseWrap.scala @@ -18,4 +18,5 @@ final class PromiseWrap[A] { } def success(value: A): Unit = underlying.success(value) def failure(cause: Throwable): Unit = underlying.failure(cause) + def isCompleted: Boolean = underlying.isCompleted } diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 487ee7958..1de20aaf3 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -26,7 +26,8 @@ import sbt.Project.{ inTask, richInitialize, richInitializeTask, - richTaskSessionVar + richTaskSessionVar, + sbtRichTaskPromise, } import sbt.Scope.{ GlobalScope, ThisScope, fillTaskAxis } import sbt.coursierint._ @@ -35,7 +36,14 @@ import sbt.internal._ import sbt.internal.classpath.AlternativeZincUtil import sbt.internal.inc.JavaInterfaceUtil._ import sbt.internal.inc.classpath.{ ClassLoaderCache, ClasspathFilter, ClasspathUtil } -import sbt.internal.inc.{ MappedFileConverter, PlainVirtualFile, Stamps, ZincLmUtil, ZincUtil } +import sbt.internal.inc.{ + CompileOutput, + MappedFileConverter, + PlainVirtualFile, + Stamps, + ZincLmUtil, + ZincUtil +} import sbt.internal.io.{ Source, WatchState } import sbt.internal.librarymanagement.mavenint.{ PomExtraDependencyAttributes, @@ -68,7 +76,6 @@ import sbt.librarymanagement.Configurations.{ Provided, Runtime, Test, - names } import sbt.librarymanagement.CrossVersion.{ binarySbtVersion, binaryScalaVersion, partialVersion } import sbt.librarymanagement._ @@ -82,7 +89,7 @@ import sbt.nio.Watch import sbt.std.TaskExtra._ import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint } import sbt.util.CacheImplicits._ -import sbt.util.InterfaceUtil.{ toJavaFunction => f1 } +import sbt.util.InterfaceUtil.{ toJavaFunction => f1, t2 } import sbt.util._ import sjsonnew._ import sjsonnew.support.scalajson.unsafe.Converter @@ -97,7 +104,6 @@ import sbt.SlashSyntax0._ import sbt.internal.inc.{ Analysis, AnalyzingCompiler, - FileValueCache, Locate, ManagedLoggedReporter, MixedAnalyzingCompiler, @@ -112,6 +118,7 @@ import xsbti.compile.{ CompileOptions, CompileOrder, CompileResult, + CompileProgress, CompilerCache, Compilers, DefinesClass, @@ -180,16 +187,10 @@ object Defaults extends BuildCommon { apiMappings := Map.empty, autoScalaLibrary :== true, managedScalaInstance :== true, - classpathEntryDefinesClass := { - val converter = fileConverter.value - val f = FileValueCache({ x: NioPath => - if (x.getFileName.toString != "rt.jar") Locate.definesClass(converter.toVirtualFile(x)) - else ((_: String) => false): DefinesClass - }).get; - { (x: File) => - f(x.toPath) - } + classpathEntryDefinesClass := { (file: File) => + sys.error("use classpathEntryDefinesClassVF instead") }, + extraIncOptions :== Seq("JAVA_CLASS_VERSION" -> sys.props("java.class.version")), allowMachinePath :== true, rootPaths := { val app = appConfiguration.value @@ -373,6 +374,7 @@ object Defaults extends BuildCommon { () => Clean.deleteContents(tempDirectory, _ => false) }, turbo :== SysProp.turbo, + usePipelining :== SysProp.pipelining, useSuperShell := { if (insideCI.value) false else Terminal.console.isSupershellEnabled }, progressReports := { val rs = EvaluateTask.taskTimingProgress.toVector ++ EvaluateTask.taskTraceEvent.toVector @@ -543,10 +545,16 @@ object Defaults extends BuildCommon { ) // This exists for binary compatibility and probably never should have been public. def addBaseSources: Seq[Def.Setting[Task[Seq[File]]]] = Nil - lazy val outputConfigPaths = Seq( + lazy val outputConfigPaths: Seq[Setting[_]] = Seq( classDirectory := crossTarget.value / (prefix(configuration.value.name) + "classes"), + // TODO: Use FileConverter once Zinc can handle non-Path + backendOutput := PlainVirtualFile(classDirectory.value.toPath), + earlyOutput / artifactPath := earlyArtifactPathSetting(artifact).value, + // TODO: Use FileConverter once Zinc can handle non-Path + earlyOutput := PlainVirtualFile((earlyOutput / artifactPath).value.toPath), semanticdbTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "meta"), compileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "zinc"), + earlyCompileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "early-zinc"), target in doc := crossTarget.value / (prefix(configuration.value.name) + "api") ) @@ -670,9 +678,10 @@ object Defaults extends BuildCommon { def defaultCompileSettings: Seq[Setting[_]] = globalDefaults(enableBinaryCompileAnalysis := true) - lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++ inTask(compile)( - compileInputsSettings - ) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq( + lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++ + inTask(compile)(compileInputsSettings) ++ + inTask(compileJava)(compileInputsSettings(dependencyVirtualClasspath)) ++ + configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq( compileOutputs := { import scala.collection.JavaConverters._ val c = fileConverter.value @@ -684,9 +693,31 @@ object Defaults extends BuildCommon { }, compileOutputs := compileOutputs.triggeredBy(compile).value, clean := (compileOutputs / clean).value, + earlyOutputPing := Def.promise[Boolean], + compileProgress := { + val s = streams.value + val promise = earlyOutputPing.value + val mn = moduleName.value + val c = configuration.value + new CompileProgress { + override def afterEarlyOutput(isSuccess: Boolean): Unit = { + if (isSuccess) s.log.debug(s"[$mn / $c] early output is success") + else s.log.debug(s"[$mn / $c] early output can't be made because of macros") + promise.complete(Value(isSuccess)) + } + } + }, + compileEarly := compileEarlyTask.value, compile := compileTask.value, + compileScalaBackend := compileScalaBackendTask.value, + compileJava := compileJavaTask.value, + compileSplit := { + // conditional task + if (incOptions.value.pipelining) compileJava.value + else compileScalaBackend.value + }, internalDependencyConfigurations := InternalDependencies.configurations.value, - manipulateBytecode := compileIncremental.value, + manipulateBytecode := compileSplit.value, compileIncremental := compileIncrementalTask.tag(Tags.Compile, Tags.CPU).value, printWarnings := printWarningsTask.value, compileAnalysisFilename := { @@ -698,6 +729,9 @@ object Defaults extends BuildCommon { else "" s"inc_compile$extra.zip" }, + earlyCompileAnalysisFile := { + earlyCompileAnalysisTargetRoot.value / compileAnalysisFilename.value + }, compileAnalysisFile := { compileAnalysisTargetRoot.value / compileAnalysisFilename.value }, @@ -715,6 +749,22 @@ object Defaults extends BuildCommon { ): ClassFileManagerType ).toOptional ) + .withPipelining(usePipelining.value) + }, + scalacOptions := { + val old = scalacOptions.value + val converter = fileConverter.value + if (usePipelining.value) + Vector("-Ypickle-java", "-Ypickle-write", converter.toPath(earlyOutput.value).toString) ++ old + else old + }, + classpathEntryDefinesClassVF := { + val converter = fileConverter.value + val f = VirtualFileValueCache(converter)({ x: VirtualFile => + if (x.name.toString != "rt.jar") Locate.definesClass(x) + else ((_: String) => false): DefinesClass + }).get + f }, compileIncSetup := compileIncSetupTask.value, console := consoleTask.value, @@ -1429,6 +1479,19 @@ object Defaults extends BuildCommon { excludes: ScopedTaskable[FileFilter] ): Initialize[Task[Seq[File]]] = collectFiles(dirs: Taskable[Seq[File]], filter, excludes) + private[sbt] def earlyArtifactPathSetting(art: SettingKey[Artifact]): Initialize[File] = + Def.setting { + val f = artifactName.value + crossTarget.value / "early" / f( + ScalaVersion( + (scalaVersion in artifactName).value, + (scalaBinaryVersion in artifactName).value + ), + projectID.value, + art.value + ) + } + def artifactPathSetting(art: SettingKey[Artifact]): Initialize[File] = Def.setting { val f = artifactName.value @@ -1836,6 +1899,43 @@ object Defaults extends BuildCommon { finally w.close() // workaround for #937 } + /** Handles traditional Scalac compilation. For non-pipelined compilation, + * this also handles Java compilation. + */ + private[sbt] def compileScalaBackendTask: Initialize[Task[CompileResult]] = Def.task { + val setup: Setup = compileIncSetup.value + val useBinary: Boolean = enableBinaryCompileAnalysis.value + val analysisResult: CompileResult = compileIncremental.value + // Save analysis midway if pipelining is enabled + if (analysisResult.hasModified && setup.incrementalCompilerOptions.pipelining) { + val store = + MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary) + val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup()) + store.set(contents) + } + analysisResult + } + + /** Block on earlyOutputPing promise, which will be completed by `compile` midway + * via `compileProgress` implementation. + */ + private[sbt] def compileEarlyTask: Initialize[Task[CompileAnalysis]] = Def.task { + if ({ + streams.value.log + .debug(s"${name.value}: compileEarly: blocking on earlyOutputPing") + earlyOutputPing.await.value + }) { + val useBinary: Boolean = enableBinaryCompileAnalysis.value + val store = + MixedAnalyzingCompiler.staticCachedStore(earlyCompileAnalysisFile.value.toPath, !useBinary) + store.get.toOption match { + case Some(contents) => contents.getAnalysis + case _ => Analysis.empty + } + } else { + compile.value + } + } def compileTask: Initialize[Task[CompileAnalysis]] = Def.task { val setup: Setup = compileIncSetup.value val useBinary: Boolean = enableBinaryCompileAnalysis.value @@ -1860,11 +1960,31 @@ object Defaults extends BuildCommon { def compileIncrementalTask = Def.task { BspCompileTask.compute(bspTargetIdentifier.value, thisProjectRef.value, configuration.value) { // TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too? - compileIncrementalTaskImpl(streams.value, (compileInputs in compile).value) + compileIncrementalTaskImpl( + streams.value, + (compile / compileInputs).value, + earlyOutputPing.value + ) } } private val incCompiler = ZincUtil.defaultIncrementalCompiler - private[this] def compileIncrementalTaskImpl(s: TaskStreams, ci: Inputs): CompileResult = { + private[sbt] def compileJavaTask: Initialize[Task[CompileResult]] = Def.task { + val s = streams.value + val in = (compileJava / compileInputs).value + val _ = compileScalaBackend.value + try { + incCompiler.asInstanceOf[sbt.internal.inc.IncrementalCompilerImpl].compileAllJava(in, s.log) + } finally { + in.setup.reporter match { + case r: BuildServerReporter => r.sendFinalReport() + } + } + } + private[this] def compileIncrementalTaskImpl( + s: TaskStreams, + ci: Inputs, + promise: PromiseWrap[Boolean] + ): CompileResult = { lazy val x = s.text(ExportStream) def onArgs(cs: Compilers) = { cs.withScalac( @@ -1874,13 +1994,14 @@ object Defaults extends BuildCommon { } ) } - // .withJavac( - // cs.javac.onArgs(exported(x, "javac")) - //) val compilers: Compilers = ci.compilers val i = ci.withCompilers(onArgs(compilers)) try { incCompiler.compile(i, s.log) + } catch { + case e: Throwable if !promise.isCompleted => + promise.failure(e) + throw e } finally { i.setup.reporter match { case r: BuildServerReporter => r.sendFinalReport() @@ -1889,47 +2010,44 @@ object Defaults extends BuildCommon { } } def compileIncSetupTask = Def.task { - val converter = fileConverter.value + val cp = dependencyPicklePath.value val lookup = new PerClasspathEntryLookup { - private val cachedAnalysisMap: File => Option[CompileAnalysis] = - analysisMap(dependencyClasspath.value) - private val cachedPerEntryDefinesClassLookup: File => DefinesClass = - Keys.classpathEntryDefinesClass.value - + private val cachedAnalysisMap: VirtualFile => Option[CompileAnalysis] = + analysisMap(cp) + private val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass = + Keys.classpathEntryDefinesClassVF.value override def analysis(classpathEntry: VirtualFile): Optional[CompileAnalysis] = - cachedAnalysisMap(converter.toPath(classpathEntry).toFile).toOptional + cachedAnalysisMap(classpathEntry).toOptional override def definesClass(classpathEntry: VirtualFile): DefinesClass = - cachedPerEntryDefinesClassLookup(converter.toPath(classpathEntry).toFile) + cachedPerEntryDefinesClassLookup(classpathEntry) } + val extra = extraIncOptions.value.map(t2) Setup.of( lookup, (skip in compile).value, - // TODO - this is kind of a bad way to grab the cache directory for streams... compileAnalysisFile.value.toPath, compilerCache.value, incOptions.value, (compilerReporter in compile).value, - // TODO - task / setting for compile progress - None.toOptional: Optional[xsbti.compile.CompileProgress], - // TODO - task / setting for extra, - Array.empty: Array[xsbti.T2[String, String]], + Some((compile / compileProgress).value).toOptional, + extra.toArray, ) } - def compileInputsSettings: Seq[Setting[_]] = { + def compileInputsSettings: Seq[Setting[_]] = + compileInputsSettings(dependencyPicklePath) + def compileInputsSettings(classpathTask: TaskKey[VirtualClasspath]): Seq[Setting[_]] = { Seq( compileOptions := { val c = fileConverter.value - val cp0 = classDirectory.value +: data(dependencyClasspath.value) - val cp = cp0 map { x => - PlainVirtualFile(x.toPath) - } + val cp0 = classpathTask.value + val cp = backendOutput.value +: data(cp0) val vs = sources.value.toVector map { x => c.toVirtualFile(x.toPath) } CompileOptions.of( - cp.toArray: Array[VirtualFile], + cp.toArray, vs.toArray, - classDirectory.value.toPath, + c.toPath(backendOutput.value), scalacOptions.value.toArray, javacOptions.value.toArray, maxErrors.value, @@ -1938,7 +2056,7 @@ object Defaults extends BuildCommon { None.toOptional: Optional[NioPath], Some(fileConverter.value).toOptional, Some(reusableStamper.value).toOptional, - None.toOptional: Optional[xsbti.compile.Output], + Some(CompileOutput(c.toPath(earlyOutput.value))).toOptional, ) }, compilerReporter := { @@ -2166,8 +2284,10 @@ object Classpaths { def concatSettings[T](a: SettingKey[Seq[T]], b: SettingKey[Seq[T]]): Initialize[Seq[T]] = concatSettings(a: Initialize[Seq[T]], b) // forward to widened variant + // Included as part of JvmPlugin#projectSettings. lazy val configSettings: Seq[Setting[_]] = classpaths ++ Seq( products := makeProducts.value, + pickleProducts := makePickleProducts.value, productDirectories := classDirectory.value :: Nil, classpathConfiguration := findClasspathConfig( internalConfigurationMap.value, @@ -2181,7 +2301,7 @@ object Classpaths { externalDependencyClasspath := concat(unmanagedClasspath, managedClasspath).value, dependencyClasspath := concat(internalDependencyClasspath, externalDependencyClasspath).value, fullClasspath := concatDistinct(exportedProducts, dependencyClasspath).value, - internalDependencyClasspath := internalDependencies.value, + internalDependencyClasspath := ClasspathImpl.internalDependencyClasspathTask.value, unmanagedClasspath := unmanagedDependencies.value, managedClasspath := { val isMeta = isMetaBuild.value @@ -2198,12 +2318,20 @@ object Classpaths { if (isMeta && !force && !csr) mjars ++ sbtCp else mjars }, - exportedProducts := trackedExportedProducts(TrackLevel.TrackAlways).value, - exportedProductsIfMissing := trackedExportedProducts(TrackLevel.TrackIfMissing).value, - exportedProductsNoTracking := trackedExportedProducts(TrackLevel.NoTracking).value, - exportedProductJars := trackedExportedJarProducts(TrackLevel.TrackAlways).value, - exportedProductJarsIfMissing := trackedExportedJarProducts(TrackLevel.TrackIfMissing).value, - exportedProductJarsNoTracking := trackedExportedJarProducts(TrackLevel.NoTracking).value, + exportedProducts := ClasspathImpl.trackedExportedProducts(TrackLevel.TrackAlways).value, + exportedProductsIfMissing := ClasspathImpl + .trackedExportedProducts(TrackLevel.TrackIfMissing) + .value, + exportedProductsNoTracking := ClasspathImpl + .trackedExportedProducts(TrackLevel.NoTracking) + .value, + exportedProductJars := ClasspathImpl.trackedExportedJarProducts(TrackLevel.TrackAlways).value, + exportedProductJarsIfMissing := ClasspathImpl + .trackedExportedJarProducts(TrackLevel.TrackIfMissing) + .value, + exportedProductJarsNoTracking := ClasspathImpl + .trackedExportedJarProducts(TrackLevel.NoTracking) + .value, internalDependencyAsJars := internalDependencyJarsTask.value, dependencyClasspathAsJars := concat(internalDependencyAsJars, externalDependencyClasspath).value, fullClasspathAsJars := concatDistinct(exportedProductJars, dependencyClasspathAsJars).value, @@ -2221,7 +2349,38 @@ object Classpaths { dependencyClasspathFiles.value.flatMap( p => FileStamp(stamper.library(converter.toVirtualFile(p))).map(p -> _) ) - } + }, + dependencyVirtualClasspath := { + // TODO: Use converter + val cp0 = dependencyClasspath.value + cp0 map { + _ map { file => + PlainVirtualFile(file.toPath): VirtualFile + } + } + }, + // Note: invoking this task from shell would block indefinately because it will + // wait for the upstream compilation to start. + dependencyPicklePath := { + // This is a conditional task. Do not refactor. + if (incOptions.value.pipelining) { + concat( + internalDependencyPicklePath, + Def.task { + // TODO: Use converter + externalDependencyClasspath.value map { + _ map { file => + PlainVirtualFile(file.toPath): VirtualFile + } + } + } + ).value + } else { + dependencyVirtualClasspath.value + } + }, + internalDependencyPicklePath := ClasspathImpl.internalDependencyPicklePathTask.value, + exportedPickles := ClasspathImpl.exportedPicklesTask.value, ) private[this] def exportClasspath(s: Setting[Task[Classpath]]): Setting[Task[Classpath]] = @@ -3182,15 +3341,15 @@ object Classpaths { } /* - // can't cache deliver/publish easily since files involved are hidden behind patterns. publish will be difficult to verify target-side anyway - def cachedPublish(cacheFile: File)(g: (IvySbt#Module, PublishConfiguration) => Unit, module: IvySbt#Module, config: PublishConfiguration) => Unit = - { case module :+: config :+: HNil => - /* implicit val publishCache = publishIC - val f = cached(cacheFile) { (conf: IvyConfiguration, settings: ModuleSettings, config: PublishConfiguration) =>*/ - g(module, config) - /*} - f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)*/ - }*/ + // can't cache deliver/publish easily since files involved are hidden behind patterns. publish will be difficult to verify target-side anyway + def cachedPublish(cacheFile: File)(g: (IvySbt#Module, PublishConfiguration) => Unit, module: IvySbt#Module, config: PublishConfiguration) => Unit = + { case module :+: config :+: HNil => + /* implicit val publishCache = publishIC + val f = cached(cacheFile) { (conf: IvyConfiguration, settings: ModuleSettings, config: PublishConfiguration) =>*/ + g(module, config) + /*} + f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)*/ + }*/ def defaultRepositoryFilter: MavenRepository => Boolean = repo => !repo.root.startsWith("file:") @@ -3283,140 +3442,37 @@ object Classpaths { new RawRepository(resolver, resolver.getName) } - def analyzed[T](data: T, analysis: CompileAnalysis) = - Attributed.blank(data).put(Keys.analysis, analysis) + def analyzed[T](data: T, analysis: CompileAnalysis) = ClasspathImpl.analyzed[T](data, analysis) def makeProducts: Initialize[Task[Seq[File]]] = Def.task { + val c = fileConverter.value compile.value copyResources.value - classDirectory.value :: Nil + c.toPath(backendOutput.value).toFile :: Nil } - private[sbt] def trackedExportedProducts(track: TrackLevel): Initialize[Task[Classpath]] = - Def.task { - val _ = (packageBin / dynamicDependency).value - val art = (artifact in packageBin).value - val module = projectID.value - val config = configuration.value - for { (f, analysis) <- trackedExportedProductsImplTask(track).value } yield APIMappings - .store(analyzed(f, analysis), apiURL.value) - .put(artifact.key, art) - .put(moduleID.key, module) - .put(configuration.key, config) - } - private[sbt] def trackedExportedJarProducts(track: TrackLevel): Initialize[Task[Classpath]] = - Def.task { - val _ = (packageBin / dynamicDependency).value - val art = (artifact in packageBin).value - val module = projectID.value - val config = configuration.value - for { (f, analysis) <- trackedJarProductsImplTask(track).value } yield APIMappings - .store(analyzed(f, analysis), apiURL.value) - .put(artifact.key, art) - .put(moduleID.key, module) - .put(configuration.key, config) - } - private[this] def trackedExportedProductsImplTask( - track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = - Def.taskDyn { - val _ = (packageBin / dynamicDependency).value - val useJars = exportJars.value - if (useJars) trackedJarProductsImplTask(track) - else trackedNonJarProductsImplTask(track) - } - private[this] def trackedNonJarProductsImplTask( - track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = - Def.taskDyn { - val dirs = productDirectories.value - val view = fileTreeView.value - def containsClassFile(): Boolean = - view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).nonEmpty - TrackLevel.intersection(track, exportToInternal.value) match { - case TrackLevel.TrackAlways => - Def.task { - products.value map { (_, compile.value) } - } - case TrackLevel.TrackIfMissing if !containsClassFile() => - Def.task { - products.value map { (_, compile.value) } - } - case _ => - Def.task { - val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty) - dirs.map(_ -> analysis) - } - } - } - private[this] def trackedJarProductsImplTask( - track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = - Def.taskDyn { - val jar = (artifactPath in packageBin).value - TrackLevel.intersection(track, exportToInternal.value) match { - case TrackLevel.TrackAlways => - Def.task { - Seq((packageBin.value, compile.value)) - } - case TrackLevel.TrackIfMissing if !jar.exists => - Def.task { - Seq((packageBin.value, compile.value)) - } - case _ => - Def.task { - val analysisOpt = previousCompile.value.analysis.toOption - Seq(jar) map { x => - ( - x, - if (analysisOpt.isDefined) analysisOpt.get - else Analysis.empty - ) - } - } + + private[sbt] def makePickleProducts: Initialize[Task[Seq[VirtualFile]]] = Def.task { + // This is a conditional task. + if (earlyOutputPing.await.value) { + // TODO: copyResources.value + earlyOutput.value :: Nil + } else { + val c = fileConverter.value + products.value map { x: File => + c.toVirtualFile(x.toPath) } } + } def constructBuildDependencies: Initialize[BuildDependencies] = loadedBuild(lb => BuildUtil.dependencies(lb.units)) + @deprecated("not used", "1.4.0") def internalDependencies: Initialize[Task[Classpath]] = - Def.taskDyn { - val _ = ( - (exportedProductsNoTracking / transitiveClasspathDependency).value, - (exportedProductsIfMissing / transitiveClasspathDependency).value, - (exportedProducts / transitiveClasspathDependency).value, - (exportedProductJarsNoTracking / transitiveClasspathDependency).value, - (exportedProductJarsIfMissing / transitiveClasspathDependency).value, - (exportedProductJars / transitiveClasspathDependency).value - ) - internalDependenciesImplTask( - thisProjectRef.value, - classpathConfiguration.value, - configuration.value, - settingsData.value, - buildDependencies.value, - trackInternalDependencies.value - ) - } + ClasspathImpl.internalDependencyClasspathTask + def internalDependencyJarsTask: Initialize[Task[Classpath]] = - Def.taskDyn { - internalDependencyJarsImplTask( - thisProjectRef.value, - classpathConfiguration.value, - configuration.value, - settingsData.value, - buildDependencies.value, - trackInternalDependencies.value - ) - } - def unmanagedDependencies: Initialize[Task[Classpath]] = - Def.taskDyn { - unmanagedDependencies0( - thisProjectRef.value, - configuration.value, - settingsData.value, - buildDependencies.value - ) - } + ClasspathImpl.internalDependencyJarsTask + def unmanagedDependencies: Initialize[Task[Classpath]] = ClasspathImpl.unmanagedDependenciesTask def mkIvyConfiguration: Initialize[Task[InlineIvyConfiguration]] = Def.task { val (rs, other) = (fullResolvers.value.toVector, otherResolvers.value.toVector) @@ -3435,37 +3491,12 @@ object Classpaths { .withLog(s.log) } - import java.util.LinkedHashSet - - import collection.JavaConverters._ def interSort( projectRef: ProjectRef, conf: Configuration, data: Settings[Scope], deps: BuildDependencies - ): Seq[(ProjectRef, String)] = { - val visited = (new LinkedHashSet[(ProjectRef, String)]).asScala - def visit(p: ProjectRef, c: Configuration): Unit = { - val applicableConfigs = allConfigs(c) - for (ac <- applicableConfigs) // add all configurations in this project - visited add (p -> ac.name) - val masterConfs = names(getConfigurations(projectRef, data).toVector) - - for (ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)) { - val configurations = getConfigurations(dep, data) - val mapping = - mapped(confMapping, masterConfs, names(configurations.toVector), "compile", "*->compile") - // map master configuration 'c' and all extended configurations to the appropriate dependency configuration - for (ac <- applicableConfigs; depConfName <- mapping(ac.name)) { - for (depConf <- confOpt(configurations, depConfName)) - if (!visited((dep, depConfName))) - visit(dep, depConf) - } - } - } - visit(projectRef, conf) - visited.toSeq - } + ): Seq[(ProjectRef, String)] = ClasspathImpl.interSort(projectRef, conf, data, deps) def interSortConfigurations( projectRef: ProjectRef, @@ -3477,143 +3508,50 @@ object Classpaths { case (projectRef, configName) => (projectRef, ConfigRef(configName)) } - private[sbt] def unmanagedDependencies0( - projectRef: ProjectRef, - conf: Configuration, - data: Settings[Scope], - deps: BuildDependencies - ): Initialize[Task[Classpath]] = - Def.value { - interDependencies( - projectRef, - deps, - conf, - conf, - data, - TrackLevel.TrackAlways, - true, - (dep, conf, data, _) => unmanagedLibs(dep, conf, data), - ) - } - private[sbt] def internalDependenciesImplTask( - projectRef: ProjectRef, - conf: Configuration, - self: Configuration, - data: Settings[Scope], - deps: BuildDependencies, - track: TrackLevel - ): Initialize[Task[Classpath]] = - Def.value { interDependencies(projectRef, deps, conf, self, data, track, false, productsTask) } - private[sbt] def internalDependencyJarsImplTask( - projectRef: ProjectRef, - conf: Configuration, - self: Configuration, - data: Settings[Scope], - deps: BuildDependencies, - track: TrackLevel - ): Initialize[Task[Classpath]] = - Def.value { - interDependencies(projectRef, deps, conf, self, data, track, false, jarProductsTask) - } - private[sbt] def interDependencies( - projectRef: ProjectRef, - deps: BuildDependencies, - conf: Configuration, - self: Configuration, - data: Settings[Scope], - track: TrackLevel, - includeSelf: Boolean, - f: (ProjectRef, String, Settings[Scope], TrackLevel) => Task[Classpath] - ): Task[Classpath] = { - val visited = interSort(projectRef, conf, data, deps) - val tasks = (new LinkedHashSet[Task[Classpath]]).asScala - for ((dep, c) <- visited) - if (includeSelf || (dep != projectRef) || (conf.name != c && self.name != c)) - tasks += f(dep, c, data, track) - - (tasks.toSeq.join).map(_.flatten.distinct) - } - def mapped( confString: Option[String], masterConfs: Seq[String], depConfs: Seq[String], default: String, defaultMapping: String - ): String => Seq[String] = { - lazy val defaultMap = parseMapping(defaultMapping, masterConfs, depConfs, _ :: Nil) - parseMapping(confString getOrElse default, masterConfs, depConfs, defaultMap) - } + ): String => Seq[String] = + ClasspathImpl.mapped(confString, masterConfs, depConfs, default, defaultMapping) + def parseMapping( confString: String, masterConfs: Seq[String], depConfs: Seq[String], default: String => Seq[String] ): String => Seq[String] = - union(confString.split(";") map parseSingleMapping(masterConfs, depConfs, default)) + ClasspathImpl.parseMapping(confString, masterConfs, depConfs, default) + def parseSingleMapping( masterConfs: Seq[String], depConfs: Seq[String], default: String => Seq[String] - )(confString: String): String => Seq[String] = { - val ms: Seq[(String, Seq[String])] = - trim(confString.split("->", 2)) match { - case x :: Nil => for (a <- parseList(x, masterConfs)) yield (a, default(a)) - case x :: y :: Nil => - val target = parseList(y, depConfs); - for (a <- parseList(x, masterConfs)) yield (a, target) - case _ => sys.error("Invalid configuration '" + confString + "'") // shouldn't get here - } - val m = ms.toMap - s => m.getOrElse(s, Nil) - } + )(confString: String): String => Seq[String] = + ClasspathImpl.parseSingleMapping(masterConfs, depConfs, default)(confString) def union[A, B](maps: Seq[A => Seq[B]]): A => Seq[B] = - a => maps.foldLeft(Seq[B]()) { _ ++ _(a) } distinct; + ClasspathImpl.union[A, B](maps) def parseList(s: String, allConfs: Seq[String]): Seq[String] = - (trim(s split ",") flatMap replaceWildcard(allConfs)).distinct - def replaceWildcard(allConfs: Seq[String])(conf: String): Seq[String] = conf match { - case "" => Nil - case "*" => allConfs - case _ => conf :: Nil - } + ClasspathImpl.parseList(s, allConfs) + + def replaceWildcard(allConfs: Seq[String])(conf: String): Seq[String] = + ClasspathImpl.replaceWildcard(allConfs)(conf) - private def trim(a: Array[String]): List[String] = a.toList.map(_.trim) def missingConfiguration(in: String, conf: String) = sys.error("Configuration '" + conf + "' not defined in '" + in + "'") - def allConfigs(conf: Configuration): Seq[Configuration] = - Dag.topologicalSort(conf)(_.extendsConfigs) + def allConfigs(conf: Configuration): Seq[Configuration] = ClasspathImpl.allConfigs(conf) def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] = - ivyConfigurations in p get data getOrElse Nil + ClasspathImpl.getConfigurations(p, data) def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] = - configurations.find(_.name == conf) - private[sbt] def productsTask( - dep: ResolvedReference, - conf: String, - data: Settings[Scope], - track: TrackLevel - ): Task[Classpath] = - track match { - case TrackLevel.NoTracking => getClasspath(exportedProductsNoTracking, dep, conf, data) - case TrackLevel.TrackIfMissing => getClasspath(exportedProductsIfMissing, dep, conf, data) - case TrackLevel.TrackAlways => getClasspath(exportedProducts, dep, conf, data) - } - private[sbt] def jarProductsTask( - dep: ResolvedReference, - conf: String, - data: Settings[Scope], - track: TrackLevel - ): Task[Classpath] = - track match { - case TrackLevel.NoTracking => getClasspath(exportedProductJarsNoTracking, dep, conf, data) - case TrackLevel.TrackIfMissing => getClasspath(exportedProductJarsIfMissing, dep, conf, data) - case TrackLevel.TrackAlways => getClasspath(exportedProductJars, dep, conf, data) - } + ClasspathImpl.confOpt(configurations, conf) def unmanagedLibs(dep: ResolvedReference, conf: String, data: Settings[Scope]): Task[Classpath] = - getClasspath(unmanagedJars, dep, conf, data) + ClasspathImpl.unmanagedLibs(dep, conf, data) def getClasspath( key: TaskKey[Classpath], @@ -3621,7 +3559,7 @@ object Classpaths { conf: String, data: Settings[Scope] ): Task[Classpath] = - (key in (dep, ConfigKey(conf))) get data getOrElse constant(Nil) + ClasspathImpl.getClasspath(key, dep, conf, data) def defaultConfigurationTask(p: ResolvedReference, data: Settings[Scope]): Configuration = flatten(defaultConfiguration in p get data) getOrElse Configurations.Default @@ -3704,13 +3642,14 @@ object Classpaths { val ref = thisProjectRef.value val data = settingsData.value val deps = buildDependencies.value - internalDependenciesImplTask( + ClasspathImpl.internalDependenciesImplTask( ref, CompilerPlugin, CompilerPlugin, data, deps, - TrackLevel.TrackAlways + TrackLevel.TrackAlways, + streams.value.log ) } diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index 8b41aa7a0..c6b713386 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -36,7 +36,7 @@ import sbt.librarymanagement.ivy.{ Credentials, IvyConfiguration, IvyPaths, Upda import sbt.nio.file.Glob import sbt.testing.Framework import sbt.util.{ Level, Logger } -import xsbti.FileConverter +import xsbti.{ FileConverter, VirtualFile } import xsbti.compile._ import xsbti.compile.analysis.ReadStamps @@ -151,6 +151,8 @@ object Keys { // Output paths val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting) + val earlyOutput = settingKey[VirtualFile]("JAR file for pickles used for build pipelining") + val backendOutput = settingKey[VirtualFile]("Directory or JAR file for compiled classes and copied resources") val cleanFiles = taskKey[Seq[File]]("The files to recursively delete during a clean.").withRank(BSetting) val cleanKeepFiles = settingKey[Seq[File]]("Files or directories to keep during a clean. Must be direct children of target.").withRank(CSetting) val cleanKeepGlobs = settingKey[Seq[Glob]]("Globs to keep during a clean. Must be direct children of target.").withRank(CSetting) @@ -167,6 +169,7 @@ object Keys { val scalacOptions = taskKey[Seq[String]]("Options for the Scala compiler.").withRank(BPlusTask) val javacOptions = taskKey[Seq[String]]("Options for the Java compiler.").withRank(BPlusTask) val incOptions = taskKey[IncOptions]("Options for the incremental compiler.").withRank(BTask) + val extraIncOptions = taskKey[Seq[(String, String)]]("Extra options for the incremental compiler").withRank(CTask) val compileOrder = settingKey[CompileOrder]("Configures the order in which Java and sources within a single compilation are compiled. Valid values are: JavaThenScala, ScalaThenJava, or Mixed.").withRank(BPlusSetting) val initialCommands = settingKey[String]("Initial commands to execute when starting up the Scala interpreter.").withRank(AMinusSetting) val cleanupCommands = settingKey[String]("Commands to execute before the Scala interpreter exits.").withRank(BMinusSetting) @@ -211,14 +214,24 @@ object Keys { val manipulateBytecode = taskKey[CompileResult]("Manipulates generated bytecode").withRank(BTask) val compileIncremental = taskKey[CompileResult]("Actually runs the incremental compilation").withRank(DTask) val previousCompile = taskKey[PreviousResult]("Read the incremental compiler analysis from disk").withRank(DTask) + private[sbt] val compileScalaBackend = taskKey[CompileResult]("Compiles only Scala sources if pipelining is enabled. Compiles both Scala and Java sources otherwise").withRank(Invisible) + private[sbt] val compileEarly = taskKey[CompileAnalysis]("Compiles only Scala sources if pipelining is enabled, and produce an early output (pickle JAR)").withRank(Invisible) + private[sbt] val earlyOutputPing = taskKey[PromiseWrap[Boolean]]("When pipelining is enabled, this returns true when early output (pickle JAR) is created; false otherwise").withRank(Invisible) + private[sbt] val compileJava = taskKey[CompileResult]("Compiles only Java sources (called only for pipelining)").withRank(Invisible) + private[sbt] val compileSplit = taskKey[CompileResult]("When pipelining is enabled, compile Scala then Java; otherwise compile both").withRank(Invisible) + + val compileProgress = taskKey[CompileProgress]("Callback used by the compiler to report phase progress") val compilers = taskKey[Compilers]("Defines the Scala and Java compilers to use for compilation.").withRank(DTask) val compileAnalysisFilename = taskKey[String]("Defines the filename used for compileAnalysisFile.").withRank(DTask) val compileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting) + val earlyCompileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting) val compileAnalysisFile = taskKey[File]("Zinc analysis storage.").withRank(DSetting) + val earlyCompileAnalysisFile = taskKey[File]("Zinc analysis storage for early compilation").withRank(DSetting) val compileIncSetup = taskKey[Setup]("Configures aspects of incremental compilation.").withRank(DTask) val compilerCache = taskKey[GlobalsCache]("Cache of scala.tools.nsc.Global instances. This should typically be cached so that it isn't recreated every task run.").withRank(DTask) val stateCompilerCache = AttributeKey[GlobalsCache]("stateCompilerCache", "Internal use: Global cache.") val classpathEntryDefinesClass = taskKey[File => DefinesClass]("Internal use: provides a function that determines whether the provided file contains a given class.").withRank(Invisible) + val classpathEntryDefinesClassVF = taskKey[VirtualFile => DefinesClass]("Internal use: provides a function that determines whether the provided file contains a given class.").withRank(Invisible) val doc = taskKey[File]("Generates API documentation.").withRank(AMinusTask) val copyResources = taskKey[Seq[(File, File)]]("Copies resources to the output directory.").withRank(AMinusTask) val aggregate = settingKey[Boolean]("Configures task aggregation.").withRank(BMinusSetting) @@ -302,6 +315,7 @@ object Keys { // Classpath/Dependency Management Keys type Classpath = Def.Classpath + type VirtualClasspath = Def.VirtualClasspath val name = settingKey[String]("Project name.").withRank(APlusSetting) val normalizedName = settingKey[String]("Project name transformed from mixed case and spaces to lowercase and dash-separated.").withRank(BSetting) @@ -333,12 +347,17 @@ object Keys { val internalDependencyClasspath = taskKey[Classpath]("The internal (inter-project) classpath.").withRank(CTask) val externalDependencyClasspath = taskKey[Classpath]("The classpath consisting of library dependencies, both managed and unmanaged.").withRank(BMinusTask) val dependencyClasspath = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(BPlusTask) + val dependencyVirtualClasspath = taskKey[VirtualClasspath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(CTask) + val dependencyPicklePath = taskKey[VirtualClasspath]("The classpath consisting of internal pickles and external, managed and unmanaged dependencies. This task is promise-blocked.") + val internalDependencyPicklePath = taskKey[VirtualClasspath]("The internal (inter-project) pickles. This task is promise-blocked.") val fullClasspath = taskKey[Classpath]("The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies.").withRank(BPlusTask) val trackInternalDependencies = settingKey[TrackLevel]("The level of tracking for the internal (inter-project) dependency.").withRank(BSetting) val exportToInternal = settingKey[TrackLevel]("The level of tracking for this project by the internal callers.").withRank(BSetting) val exportedProductJars = taskKey[Classpath]("Build products that go on the exported classpath as JARs.") val exportedProductJarsIfMissing = taskKey[Classpath]("Build products that go on the exported classpath as JARs if missing.") val exportedProductJarsNoTracking = taskKey[Classpath]("Just the exported classpath as JARs without triggering the compilation.") + val exportedPickles = taskKey[VirtualClasspath]("Build products that go on the exported compilation classpath as JARs. Note this is promise-blocked.").withRank(DTask) + val pickleProducts = taskKey[Seq[VirtualFile]]("Pickle JARs").withRank(DTask) val internalDependencyAsJars = taskKey[Classpath]("The internal (inter-project) classpath as JARs.") val dependencyClasspathAsJars = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies, all as JARs.") val fullClasspathAsJars = taskKey[Classpath]("The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies, all as JARs.") @@ -357,6 +376,7 @@ object Keys { val pushRemoteCacheConfiguration = taskKey[PublishConfiguration]("") val pushRemoteCacheTo = settingKey[Option[Resolver]]("The resolver to publish remote cache to.") val remoteCachePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.") + val usePipelining = settingKey[Boolean]("Use subproject pipelining for compilation.").withRank(BSetting) val bspTargetIdentifier = settingKey[BuildTargetIdentifier]("Id for BSP build target.").withRank(DSetting) val bspWorkspace = settingKey[Map[BuildTargetIdentifier, Scope]]("Mapping of BSP build targets to sbt scopes").withRank(DSetting) diff --git a/main/src/main/scala/sbt/internal/ClasspathImpl.scala b/main/src/main/scala/sbt/internal/ClasspathImpl.scala new file mode 100644 index 000000000..d9f36cb88 --- /dev/null +++ b/main/src/main/scala/sbt/internal/ClasspathImpl.scala @@ -0,0 +1,427 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal + +import java.io.File +import java.util.LinkedHashSet +import sbt.SlashSyntax0._ +import sbt.Keys._ +import sbt.nio.Keys._ +import sbt.nio.file.{ Glob, RecursiveGlob } +import sbt.Def.Initialize +import sbt.internal.inc.Analysis +import sbt.internal.inc.JavaInterfaceUtil._ +import sbt.internal.util.{ Attributed, Dag, Settings } +import sbt.librarymanagement.{ Configuration, TrackLevel } +import sbt.librarymanagement.Configurations.names +import sbt.std.TaskExtra._ +import sbt.util._ +import scala.collection.JavaConverters._ +import xsbti.compile.CompileAnalysis + +private[sbt] object ClasspathImpl { + + // Since we can't predict the path for pickleProduct, + // we can't reduce the track level. + def exportedPicklesTask: Initialize[Task[VirtualClasspath]] = + Def.task { + val module = projectID.value + val config = configuration.value + val products = pickleProducts.value + val analysis = compileEarly.value + val xs = products map { _ -> analysis } + for { (f, analysis) <- xs } yield APIMappings + .store(analyzed(f, analysis), apiURL.value) + .put(moduleID.key, module) + .put(configuration.key, config) + } + + def trackedExportedProducts(track: TrackLevel): Initialize[Task[Classpath]] = + Def.task { + val _ = (packageBin / dynamicDependency).value + val art = (artifact in packageBin).value + val module = projectID.value + val config = configuration.value + for { (f, analysis) <- trackedExportedProductsImplTask(track).value } yield APIMappings + .store(analyzed(f, analysis), apiURL.value) + .put(artifact.key, art) + .put(moduleID.key, module) + .put(configuration.key, config) + } + + def trackedExportedJarProducts(track: TrackLevel): Initialize[Task[Classpath]] = + Def.task { + val _ = (packageBin / dynamicDependency).value + val art = (artifact in packageBin).value + val module = projectID.value + val config = configuration.value + for { (f, analysis) <- trackedJarProductsImplTask(track).value } yield APIMappings + .store(analyzed(f, analysis), apiURL.value) + .put(artifact.key, art) + .put(moduleID.key, module) + .put(configuration.key, config) + } + + private[this] def trackedExportedProductsImplTask( + track: TrackLevel + ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + Def.taskDyn { + val _ = (packageBin / dynamicDependency).value + val useJars = exportJars.value + if (useJars) trackedJarProductsImplTask(track) + else trackedNonJarProductsImplTask(track) + } + + private[this] def trackedNonJarProductsImplTask( + track: TrackLevel + ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + Def.taskDyn { + val dirs = productDirectories.value + val view = fileTreeView.value + def containsClassFile(): Boolean = + view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).nonEmpty + TrackLevel.intersection(track, exportToInternal.value) match { + case TrackLevel.TrackAlways => + Def.task { + products.value map { (_, compile.value) } + } + case TrackLevel.TrackIfMissing if !containsClassFile() => + Def.task { + products.value map { (_, compile.value) } + } + case _ => + Def.task { + val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty) + dirs.map(_ -> analysis) + } + } + } + + private[this] def trackedJarProductsImplTask( + track: TrackLevel + ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + Def.taskDyn { + val jar = (artifactPath in packageBin).value + TrackLevel.intersection(track, exportToInternal.value) match { + case TrackLevel.TrackAlways => + Def.task { + Seq((packageBin.value, compile.value)) + } + case TrackLevel.TrackIfMissing if !jar.exists => + Def.task { + Seq((packageBin.value, compile.value)) + } + case _ => + Def.task { + val analysisOpt = previousCompile.value.analysis.toOption + Seq(jar) map { x => + ( + x, + if (analysisOpt.isDefined) analysisOpt.get + else Analysis.empty + ) + } + } + } + } + + def internalDependencyClasspathTask: Initialize[Task[Classpath]] = { + Def.taskDyn { + val _ = ( + (exportedProductsNoTracking / transitiveClasspathDependency).value, + (exportedProductsIfMissing / transitiveClasspathDependency).value, + (exportedProducts / transitiveClasspathDependency).value, + (exportedProductJarsNoTracking / transitiveClasspathDependency).value, + (exportedProductJarsIfMissing / transitiveClasspathDependency).value, + (exportedProductJars / transitiveClasspathDependency).value + ) + internalDependenciesImplTask( + thisProjectRef.value, + classpathConfiguration.value, + configuration.value, + settingsData.value, + buildDependencies.value, + trackInternalDependencies.value, + streams.value.log, + ) + } + } + + def internalDependenciesImplTask( + projectRef: ProjectRef, + conf: Configuration, + self: Configuration, + data: Settings[Scope], + deps: BuildDependencies, + track: TrackLevel, + log: Logger + ): Initialize[Task[Classpath]] = + Def.value { + interDependencies(projectRef, deps, conf, self, data, track, false, log)( + exportedProductsNoTracking, + exportedProductsIfMissing, + exportedProducts + ) + } + + def internalDependencyPicklePathTask: Initialize[Task[VirtualClasspath]] = { + def implTask( + projectRef: ProjectRef, + conf: Configuration, + self: Configuration, + data: Settings[Scope], + deps: BuildDependencies, + track: TrackLevel, + log: Logger + ): Initialize[Task[VirtualClasspath]] = + Def.value { + interDependencies(projectRef, deps, conf, self, data, track, false, log)( + exportedPickles, + exportedPickles, + exportedPickles + ) + } + Def.taskDyn { + implTask( + thisProjectRef.value, + classpathConfiguration.value, + configuration.value, + settingsData.value, + buildDependencies.value, + TrackLevel.TrackAlways, + streams.value.log, + ) + } + } + + def internalDependencyJarsTask: Initialize[Task[Classpath]] = + Def.taskDyn { + internalDependencyJarsImplTask( + thisProjectRef.value, + classpathConfiguration.value, + configuration.value, + settingsData.value, + buildDependencies.value, + trackInternalDependencies.value, + streams.value.log, + ) + } + + private def internalDependencyJarsImplTask( + projectRef: ProjectRef, + conf: Configuration, + self: Configuration, + data: Settings[Scope], + deps: BuildDependencies, + track: TrackLevel, + log: Logger + ): Initialize[Task[Classpath]] = + Def.value { + interDependencies(projectRef, deps, conf, self, data, track, false, log)( + exportedProductJarsNoTracking, + exportedProductJarsIfMissing, + exportedProductJars + ) + } + + def unmanagedDependenciesTask: Initialize[Task[Classpath]] = + Def.taskDyn { + unmanagedDependencies0( + thisProjectRef.value, + configuration.value, + settingsData.value, + buildDependencies.value, + streams.value.log + ) + } + + def unmanagedDependencies0( + projectRef: ProjectRef, + conf: Configuration, + data: Settings[Scope], + deps: BuildDependencies, + log: Logger + ): Initialize[Task[Classpath]] = + Def.value { + interDependencies( + projectRef, + deps, + conf, + conf, + data, + TrackLevel.TrackAlways, + true, + log + )( + unmanagedJars, + unmanagedJars, + unmanagedJars + ) + } + + def unmanagedLibs( + dep: ResolvedReference, + conf: String, + data: Settings[Scope] + ): Task[Classpath] = + getClasspath(unmanagedJars, dep, conf, data) + + def interDependencies[A]( + projectRef: ProjectRef, + deps: BuildDependencies, + conf: Configuration, + self: Configuration, + data: Settings[Scope], + track: TrackLevel, + includeSelf: Boolean, + log: Logger + )( + noTracking: TaskKey[Seq[A]], + trackIfMissing: TaskKey[Seq[A]], + trackAlways: TaskKey[Seq[A]] + ): Task[Seq[A]] = { + val interDepConfigs = interSort(projectRef, conf, data, deps) filter { + case (dep, c) => + includeSelf || (dep != projectRef) || (conf.name != c && self.name != c) + } + val tasks = (new LinkedHashSet[Task[Seq[A]]]).asScala + for { + (dep, c) <- interDepConfigs + } { + tasks += (track match { + case TrackLevel.NoTracking => + getClasspath(noTracking, dep, c, data) + case TrackLevel.TrackIfMissing => + getClasspath(trackIfMissing, dep, c, data) + case TrackLevel.TrackAlways => + getClasspath(trackAlways, dep, c, data) + }) + } + (tasks.toSeq.join).map(_.flatten.distinct) + } + + def analyzed[A](data: A, analysis: CompileAnalysis) = + Attributed.blank(data).put(Keys.analysis, analysis) + + def interSort( + projectRef: ProjectRef, + conf: Configuration, + data: Settings[Scope], + deps: BuildDependencies + ): Seq[(ProjectRef, String)] = { + val visited = (new LinkedHashSet[(ProjectRef, String)]).asScala + def visit(p: ProjectRef, c: Configuration): Unit = { + val applicableConfigs = allConfigs(c) + for { + ac <- applicableConfigs + } // add all configurations in this project + visited add (p -> ac.name) + val masterConfs = names(getConfigurations(projectRef, data).toVector) + + for { + ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p) + } { + val configurations = getConfigurations(dep, data) + val mapping = + mapped(confMapping, masterConfs, names(configurations.toVector), "compile", "*->compile") + // map master configuration 'c' and all extended configurations to the appropriate dependency configuration + for { + ac <- applicableConfigs + depConfName <- mapping(ac.name) + } { + for { + depConf <- confOpt(configurations, depConfName) + } if (!visited((dep, depConfName))) { + visit(dep, depConf) + } + } + } + } + visit(projectRef, conf) + visited.toSeq + } + + def mapped( + confString: Option[String], + masterConfs: Seq[String], + depConfs: Seq[String], + default: String, + defaultMapping: String + ): String => Seq[String] = { + lazy val defaultMap = parseMapping(defaultMapping, masterConfs, depConfs, _ :: Nil) + parseMapping(confString getOrElse default, masterConfs, depConfs, defaultMap) + } + + def parseMapping( + confString: String, + masterConfs: Seq[String], + depConfs: Seq[String], + default: String => Seq[String] + ): String => Seq[String] = + union(confString.split(";") map parseSingleMapping(masterConfs, depConfs, default)) + + def parseSingleMapping( + masterConfs: Seq[String], + depConfs: Seq[String], + default: String => Seq[String] + )(confString: String): String => Seq[String] = { + val ms: Seq[(String, Seq[String])] = + trim(confString.split("->", 2)) match { + case x :: Nil => for (a <- parseList(x, masterConfs)) yield (a, default(a)) + case x :: y :: Nil => + val target = parseList(y, depConfs); + for (a <- parseList(x, masterConfs)) yield (a, target) + case _ => sys.error("Invalid configuration '" + confString + "'") // shouldn't get here + } + val m = ms.toMap + s => m.getOrElse(s, Nil) + } + + def union[A, B](maps: Seq[A => Seq[B]]): A => Seq[B] = + a => maps.foldLeft(Seq[B]()) { _ ++ _(a) } distinct; + + def parseList(s: String, allConfs: Seq[String]): Seq[String] = + (trim(s split ",") flatMap replaceWildcard(allConfs)).distinct + + def replaceWildcard(allConfs: Seq[String])(conf: String): Seq[String] = conf match { + case "" => Nil + case "*" => allConfs + case _ => conf :: Nil + } + + private def trim(a: Array[String]): List[String] = a.toList.map(_.trim) + + def allConfigs(conf: Configuration): Seq[Configuration] = + Dag.topologicalSort(conf)(_.extendsConfigs) + + def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] = + (p / ivyConfigurations).get(data).getOrElse(Nil) + + def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] = + configurations.find(_.name == conf) + + def getClasspath[A]( + key: TaskKey[Seq[A]], + dep: ResolvedReference, + conf: Configuration, + data: Settings[Scope] + ): Task[Seq[A]] = getClasspath(key, dep, conf.name, data) + + def getClasspath[A]( + key: TaskKey[Seq[A]], + dep: ResolvedReference, + conf: String, + data: Settings[Scope] + ): Task[Seq[A]] = + (dep / ConfigKey(conf) / key).get(data) match { + case Some(x) => x + case _ => constant(Nil) + } + +} diff --git a/main/src/main/scala/sbt/internal/SysProp.scala b/main/src/main/scala/sbt/internal/SysProp.scala index 43ef5ec00..15c971af4 100644 --- a/main/src/main/scala/sbt/internal/SysProp.scala +++ b/main/src/main/scala/sbt/internal/SysProp.scala @@ -116,6 +116,7 @@ object SysProp { def banner: Boolean = getOrTrue("sbt.banner") def turbo: Boolean = getOrFalse("sbt.turbo") + def pipelining: Boolean = getOrFalse("sbt.pipelining") def taskTimings: Boolean = getOrFalse("sbt.task.timings") def taskTimingsOnShutdown: Boolean = getOrFalse("sbt.task.timings.on.shutdown") diff --git a/main/src/main/scala/sbt/internal/VirtualFileValueCache.scala b/main/src/main/scala/sbt/internal/VirtualFileValueCache.scala new file mode 100644 index 000000000..56ca6ad78 --- /dev/null +++ b/main/src/main/scala/sbt/internal/VirtualFileValueCache.scala @@ -0,0 +1,69 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt +package internal + +import java.util.concurrent.ConcurrentHashMap +import sbt.internal.inc.Stamper +import xsbti.{ FileConverter, VirtualFile, VirtualFileRef } +import xsbti.compile.analysis.{ Stamp => XStamp } + +/** + * Cache based on path and its stamp. + */ +sealed trait VirtualFileValueCache[A] { + def clear(): Unit + def get: VirtualFile => A +} + +object VirtualFileValueCache { + def apply[A](converter: FileConverter)(f: VirtualFile => A): VirtualFileValueCache[A] = { + import collection.mutable.{ HashMap, Map } + val stampCache: Map[VirtualFileRef, (Long, XStamp)] = new HashMap + make( + Stamper.timeWrap(stampCache, converter, { + case (vf: VirtualFile) => Stamper.forContentHash(vf) + }) + )(f) + } + def make[A](stamp: VirtualFile => XStamp)(f: VirtualFile => A): VirtualFileValueCache[A] = + new VirtualFileValueCache0[A](stamp, f) +} + +private[this] final class VirtualFileValueCache0[A]( + getStamp: VirtualFile => XStamp, + make: VirtualFile => A +)( + implicit equiv: Equiv[XStamp] +) extends VirtualFileValueCache[A] { + private[this] val backing = new ConcurrentHashMap[VirtualFile, VirtualFileCache] + + def clear(): Unit = backing.clear() + def get = file => { + val ifAbsent = new VirtualFileCache(file) + val cache = backing.putIfAbsent(file, ifAbsent) + (if (cache eq null) ifAbsent else cache).get() + } + + private[this] final class VirtualFileCache(file: VirtualFile) { + private[this] var stampedValue: Option[(XStamp, A)] = None + def get(): A = synchronized { + val latest = getStamp(file) + stampedValue match { + case Some((stamp, value)) if (equiv.equiv(latest, stamp)) => value + case _ => update(latest) + } + } + + private[this] def update(stamp: XStamp): A = { + val value = make(file) + stampedValue = Some((stamp, value)) + value + } + } +} diff --git a/project/Dependencies.scala b/project/Dependencies.scala index e640a014d..f6b238b17 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -14,7 +14,7 @@ object Dependencies { private val ioVersion = nightlyVersion.getOrElse("1.4.0-M6") private val lmVersion = sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("1.4.0-M1") - val zincVersion = nightlyVersion.getOrElse("1.4.0-M7") + val zincVersion = nightlyVersion.getOrElse("1.4.0-M8") private val sbtIO = "org.scala-sbt" %% "io" % ioVersion diff --git a/sbt/src/sbt-test/source-dependencies/pipelining-java/build.sbt b/sbt/src/sbt-test/source-dependencies/pipelining-java/build.sbt new file mode 100644 index 000000000..a5b9eef0d --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining-java/build.sbt @@ -0,0 +1,13 @@ +ThisBuild / scalaVersion := "2.13.3" +ThisBuild / usePipelining := true + +lazy val root = (project in file(".")) + .aggregate(dep, use) + .settings( + name := "pipelining Java", + ) + +lazy val dep = project + +lazy val use = project + .dependsOn(dep) diff --git a/sbt/src/sbt-test/source-dependencies/pipelining-java/changes/Break.java b/sbt/src/sbt-test/source-dependencies/pipelining-java/changes/Break.java new file mode 100644 index 000000000..014567f98 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining-java/changes/Break.java @@ -0,0 +1,2 @@ +public class Break { +} diff --git a/sbt/src/sbt-test/source-dependencies/pipelining-java/dep/A.java b/sbt/src/sbt-test/source-dependencies/pipelining-java/dep/A.java new file mode 100644 index 000000000..56fa6cd06 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining-java/dep/A.java @@ -0,0 +1,3 @@ +public class A { + public static int x = 3; +} diff --git a/sbt/src/sbt-test/source-dependencies/pipelining-java/test b/sbt/src/sbt-test/source-dependencies/pipelining-java/test new file mode 100644 index 000000000..78b6178ee --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining-java/test @@ -0,0 +1,5 @@ +> use/compile + +$ delete dep/A.java +$ copy-file changes/Break.java dep/Break.java +-> use/compile diff --git a/sbt/src/sbt-test/source-dependencies/pipelining-java/use/B.java b/sbt/src/sbt-test/source-dependencies/pipelining-java/use/B.java new file mode 100644 index 000000000..60121dd8e --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining-java/use/B.java @@ -0,0 +1,3 @@ +public class B { + public static int y = A.x; +} diff --git a/sbt/src/sbt-test/source-dependencies/pipelining/build.sbt b/sbt/src/sbt-test/source-dependencies/pipelining/build.sbt new file mode 100644 index 000000000..36db86700 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining/build.sbt @@ -0,0 +1,22 @@ +ThisBuild / scalaVersion := "2.13.3" +ThisBuild / usePipelining := true + +lazy val root = (project in file(".")) + .aggregate(dep, use) + .settings( + name := "pipelining basics", + ) + +lazy val dep = project + +lazy val use = project + .dependsOn(dep) + .settings( + TaskKey[Unit]("checkPickle") := { + val s = streams.value + val x = (dep / Compile / compile).value + val picklePath = (Compile / internalDependencyPicklePath).value + assert(picklePath.size == 1 && + picklePath.head.data.name == "dep_2.13-0.1.0-SNAPSHOT.jar", s"picklePath = ${picklePath}") + }, + ) diff --git a/sbt/src/sbt-test/source-dependencies/pipelining/changes/Break.scala b/sbt/src/sbt-test/source-dependencies/pipelining/changes/Break.scala new file mode 100644 index 000000000..b2dfe50e1 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining/changes/Break.scala @@ -0,0 +1,3 @@ +package example + +object Break diff --git a/sbt/src/sbt-test/source-dependencies/pipelining/dep/A.scala b/sbt/src/sbt-test/source-dependencies/pipelining/dep/A.scala new file mode 100644 index 000000000..2221c2ebc --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining/dep/A.scala @@ -0,0 +1,5 @@ +package example + +object A { + val x = 3 +} diff --git a/sbt/src/sbt-test/source-dependencies/pipelining/test b/sbt/src/sbt-test/source-dependencies/pipelining/test new file mode 100644 index 000000000..6b3baa1c6 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining/test @@ -0,0 +1,9 @@ +> dep/compile + +> use/checkPickle + +> compile + +# making subproject dep should trigger failure +$ copy-file changes/Break.scala dep/A.scala +-> compile diff --git a/sbt/src/sbt-test/source-dependencies/pipelining/use/B.scala b/sbt/src/sbt-test/source-dependencies/pipelining/use/B.scala new file mode 100644 index 000000000..f8ec39178 --- /dev/null +++ b/sbt/src/sbt-test/source-dependencies/pipelining/use/B.scala @@ -0,0 +1,5 @@ +package example + +object B { + val y = A.x +} diff --git a/server-test/src/test/scala/testpkg/ClientTest.scala b/server-test/src/test/scala/testpkg/ClientTest.scala index 145f12e0f..bdb71b513 100644 --- a/server-test/src/test/scala/testpkg/ClientTest.scala +++ b/server-test/src/test/scala/testpkg/ClientTest.scala @@ -93,11 +93,17 @@ object ClientTest extends AbstractServerTest { "compileAnalysisFile", "compileAnalysisFilename", "compileAnalysisTargetRoot", + "compileEarly", "compileIncSetup", "compileIncremental", + "compileJava", "compileOutputs", + "compileProgress", + "compileScalaBackend", + "compileSplit", "compilers", ) + assert(complete("compi") == expected) } test("testOnly completions") { _ =>