mirror of https://github.com/sbt/sbt.git
commit
c2602e0d44
|
|
@ -22,4 +22,16 @@ object MacroDefaults {
|
|||
import c.universe._
|
||||
q"sbt.Keys.fileTreeRepository.value: @sbtUnchecked"
|
||||
}
|
||||
|
||||
/**
|
||||
* Macro to generated default file tree repository. It must be defined as an untyped tree because
|
||||
* sbt.Keys is not available in this project. This is meant for internal use only, but must be
|
||||
* public because its a macro.
|
||||
* @param c the macro context
|
||||
* @return the tree expressing the default file tree repository.
|
||||
*/
|
||||
def dynamicInputs(c: blackbox.Context): c.Tree = {
|
||||
import c.universe._
|
||||
q"sbt.internal.Continuous.dynamicInputs.value: @sbtUnchecked"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,8 @@ object BasicKeys {
|
|||
"The function that constructs the command prompt from the current build state.",
|
||||
10000
|
||||
)
|
||||
val watch = AttributeKey[Watched]("watch", "Continuous execution configuration.", 1000)
|
||||
val watch =
|
||||
AttributeKey[Watched]("watched", "Continuous execution configuration.", 1000)
|
||||
val serverPort =
|
||||
AttributeKey[Int]("server-port", "The port number used by server command.", 10000)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,26 +7,16 @@
|
|||
|
||||
package sbt
|
||||
|
||||
import java.io.{ File, InputStream }
|
||||
import java.nio.file.{ FileSystems, Path }
|
||||
import java.io.File
|
||||
import java.nio.file.FileSystems
|
||||
|
||||
import sbt.BasicCommandStrings.{
|
||||
ContinuousExecutePrefix,
|
||||
FailureWall,
|
||||
continuousBriefHelp,
|
||||
continuousDetail
|
||||
}
|
||||
import sbt.BasicCommands.otherCommandParser
|
||||
import sbt.internal.LabeledFunctions._
|
||||
import sbt.internal.LegacyWatched
|
||||
import sbt.internal.io.{ EventMonitor, Source, WatchState }
|
||||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.util.complete.{ DefaultParsers, Parser }
|
||||
import sbt.internal.util.{ AttributeKey, JLine }
|
||||
import sbt.internal.{ FileAttributes, LegacyWatched }
|
||||
import sbt.internal.util.AttributeKey
|
||||
import sbt.io._
|
||||
import sbt.util.{ Level, Logger }
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.Properties
|
||||
|
||||
|
|
@ -38,8 +28,8 @@ trait Watched {
|
|||
def terminateWatch(key: Int): Boolean = Watched.isEnter(key)
|
||||
|
||||
/**
|
||||
* The time in milliseconds between checking for changes. The actual time between the last change made to a file and the
|
||||
* execution time is between `pollInterval` and `pollInterval*2`.
|
||||
* The time in milliseconds between checking for changes. The actual time between the last change
|
||||
* made to a file and the execution time is between `pollInterval` and `pollInterval*2`.
|
||||
*/
|
||||
def pollInterval: FiniteDuration = Watched.PollDelay
|
||||
|
||||
|
|
@ -61,128 +51,16 @@ trait Watched {
|
|||
|
||||
object Watched {
|
||||
|
||||
/**
|
||||
* This trait is used to communicate what the watch should do next at various points in time. It
|
||||
* is heavily linked to a number of callbacks in [[WatchConfig]]. For example, when the event
|
||||
* monitor detects a changed source we expect [[WatchConfig.onWatchEvent]] to return [[Trigger]].
|
||||
*/
|
||||
sealed trait Action
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch should stop.
|
||||
*/
|
||||
case object CancelWatch extends Action
|
||||
|
||||
/**
|
||||
* Action that indicates that an error has occurred. The watch will be terminated when this action
|
||||
* is produced.
|
||||
*/
|
||||
case object HandleError extends Action
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch should continue as though nothing happened. This may be
|
||||
* because, for example, no user input was yet available in [[WatchConfig.handleInput]].
|
||||
*/
|
||||
case object Ignore extends Action
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch should pause while the build is reloaded. This is used to
|
||||
* automatically reload the project when the build files (e.g. build.sbt) are changed.
|
||||
*/
|
||||
case object Reload extends Action
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch process should re-run the command.
|
||||
*/
|
||||
case object Trigger extends Action
|
||||
|
||||
/**
|
||||
* A user defined Action. It is not sealed so that the user can create custom instances. If any
|
||||
* of the [[WatchConfig]] callbacks, e.g. [[WatchConfig.onWatchEvent]], return an instance of
|
||||
* [[Custom]], the watch will terminate.
|
||||
*/
|
||||
trait Custom extends Action
|
||||
|
||||
@deprecated("WatchSource is replaced by sbt.io.Glob", "1.3.0")
|
||||
type WatchSource = Source
|
||||
def terminateWatch(key: Int): Boolean = Watched.isEnter(key)
|
||||
|
||||
private[this] val isWin = Properties.isWin
|
||||
private def drain(is: InputStream): Unit = while (is.available > 0) is.read()
|
||||
private def withCharBufferedStdIn[R](f: InputStream => R): R =
|
||||
if (!isWin) JLine.usingTerminal { terminal =>
|
||||
terminal.init()
|
||||
val in = terminal.wrapInIfNeeded(System.in)
|
||||
try {
|
||||
drain(in)
|
||||
f(in)
|
||||
} finally {
|
||||
drain(in)
|
||||
terminal.reset()
|
||||
}
|
||||
} else
|
||||
try {
|
||||
drain(System.in)
|
||||
f(System.in)
|
||||
} finally drain(System.in)
|
||||
|
||||
private[sbt] final val handleInput: InputStream => Action = in => {
|
||||
@tailrec
|
||||
def scanInput(): Action = {
|
||||
if (in.available > 0) {
|
||||
in.read() match {
|
||||
case key if isEnter(key) => CancelWatch
|
||||
case key if isR(key) && !isWin => Trigger
|
||||
case key if key >= 0 => scanInput()
|
||||
case _ => Ignore
|
||||
}
|
||||
} else {
|
||||
Ignore
|
||||
}
|
||||
}
|
||||
scanInput()
|
||||
}
|
||||
private[sbt] def onEvent(
|
||||
sources: Seq[WatchSource],
|
||||
projectSources: Seq[WatchSource]
|
||||
): FileAttributes.Event => Watched.Action =
|
||||
event =>
|
||||
if (sources.exists(_.accept(event.path))) Watched.Trigger
|
||||
else if (projectSources.exists(_.accept(event.path))) {
|
||||
(event.previous, event.current) match {
|
||||
case (Some(p), Some(c)) => if (c == p) Watched.Ignore else Watched.Reload
|
||||
case _ => Watched.Trigger
|
||||
}
|
||||
} else Ignore
|
||||
|
||||
private[this] val reRun = if (isWin) "" else " or 'r' to re-run the command"
|
||||
private def waitMessage(project: String): String =
|
||||
s"Waiting for source changes$project... (press enter to interrupt$reRun)"
|
||||
val defaultStartWatch: Int => Option[String] =
|
||||
((count: Int) => Some(s"$count. ${waitMessage("")}")).label("Watched.defaultStartWatch")
|
||||
@deprecated("Use defaultStartWatch in conjunction with the watchStartMessage key", "1.3.0")
|
||||
val defaultWatchingMessage: WatchState => String =
|
||||
((ws: WatchState) => defaultStartWatch(ws.count).get).label("Watched.defaultWatchingMessage")
|
||||
def projectWatchingMessage(projectId: String): WatchState => String =
|
||||
((ws: WatchState) => projectOnWatchMessage(projectId)(ws.count).get)
|
||||
.label("Watched.projectWatchingMessage")
|
||||
def projectOnWatchMessage(project: String): Int => Option[String] =
|
||||
((count: Int) => Some(s"$count. ${waitMessage(s" in project $project")}"))
|
||||
.label("Watched.projectOnWatchMessage")
|
||||
s"Waiting for source changes$project... (press enter to interrupt)"
|
||||
|
||||
val defaultOnTriggerMessage: Int => Option[String] =
|
||||
((_: Int) => None).label("Watched.defaultOnTriggerMessage")
|
||||
@deprecated(
|
||||
"Use defaultOnTriggerMessage in conjunction with the watchTriggeredMessage key",
|
||||
"1.3.0"
|
||||
)
|
||||
val defaultTriggeredMessage: WatchState => String =
|
||||
const("").label("Watched.defaultTriggeredMessage")
|
||||
val clearOnTrigger: Int => Option[String] = _ => Some(clearScreen)
|
||||
@deprecated("Use clearOnTrigger in conjunction with the watchTriggeredMessage key", "1.3.0")
|
||||
val clearWhenTriggered: WatchState => String =
|
||||
const(clearScreen).label("Watched.clearWhenTriggered")
|
||||
def clearScreen: String = "\u001b[2J\u001b[0;0H"
|
||||
|
||||
@deprecated("WatchSource has been replaced by sbt.io.Glob", "1.3.0")
|
||||
object WatchSource {
|
||||
|
||||
/**
|
||||
|
|
@ -206,6 +84,48 @@ object Watched {
|
|||
|
||||
}
|
||||
|
||||
private[sbt] val newWatchService: () => WatchService =
|
||||
(() => createWatchService()).label("Watched.newWatchService")
|
||||
def createWatchService(pollDelay: FiniteDuration): WatchService = {
|
||||
def closeWatch = new MacOSXWatchService()
|
||||
sys.props.get("sbt.watch.mode") match {
|
||||
case Some("polling") =>
|
||||
new PollingWatchService(pollDelay)
|
||||
case Some("nio") =>
|
||||
FileSystems.getDefault.newWatchService()
|
||||
case Some("closewatch") => closeWatch
|
||||
case _ if Properties.isMac => closeWatch
|
||||
case _ =>
|
||||
FileSystems.getDefault.newWatchService()
|
||||
}
|
||||
}
|
||||
|
||||
@deprecated("This is no longer used by continuous builds.", "1.3.0")
|
||||
def printIfDefined(msg: String): Unit = if (!msg.isEmpty) System.out.println(msg)
|
||||
@deprecated("This is no longer used by continuous builds.", "1.3.0")
|
||||
def isEnter(key: Int): Boolean = key == 10 || key == 13
|
||||
@deprecated("Replaced by defaultPollInterval", "1.3.0")
|
||||
val PollDelay: FiniteDuration = 500.milliseconds
|
||||
@deprecated("Replaced by defaultAntiEntropy", "1.3.0")
|
||||
val AntiEntropy: FiniteDuration = 40.milliseconds
|
||||
@deprecated("Use the version that explicitly takes the poll delay", "1.3.0")
|
||||
def createWatchService(): WatchService = createWatchService(PollDelay)
|
||||
|
||||
@deprecated("Replaced by Watched.command", "1.3.0")
|
||||
def executeContinuously(watched: Watched, s: State, next: String, repeat: String): State =
|
||||
LegacyWatched.executeContinuously(watched, s, next, repeat)
|
||||
|
||||
// Deprecated apis below
|
||||
@deprecated("unused", "1.3.0")
|
||||
def projectWatchingMessage(projectId: String): WatchState => String =
|
||||
((ws: WatchState) => projectOnWatchMessage(projectId)(ws.count, projectId, Nil).get)
|
||||
.label("Watched.projectWatchingMessage")
|
||||
@deprecated("unused", "1.3.0")
|
||||
def projectOnWatchMessage(project: String): (Int, String, Seq[String]) => Option[String] = {
|
||||
(count: Int, _: String, _: Seq[String]) =>
|
||||
Some(s"$count. ${waitMessage(s" in project $project")}")
|
||||
}.label("Watched.projectOnWatchMessage")
|
||||
|
||||
@deprecated("This method is not used and may be removed in a future version of sbt", "1.3.0")
|
||||
private[this] class AWatched extends Watched
|
||||
|
||||
|
|
@ -223,350 +143,37 @@ object Watched {
|
|||
@deprecated("This method is not used and may be removed in a future version of sbt", "1.3.0")
|
||||
def empty: Watched = new AWatched
|
||||
|
||||
val PollDelay: FiniteDuration = 500.milliseconds
|
||||
val AntiEntropy: FiniteDuration = 40.milliseconds
|
||||
def isEnter(key: Int): Boolean = key == 10 || key == 13
|
||||
def isR(key: Int): Boolean = key == 82 || key == 114
|
||||
def printIfDefined(msg: String): Unit = if (!msg.isEmpty) System.out.println(msg)
|
||||
|
||||
private type RunCommand = () => State
|
||||
private type WatchSetup = (State, String) => (State, WatchConfig, RunCommand => State)
|
||||
|
||||
/**
|
||||
* Provides the '~' continuous execution command.
|
||||
* @param setup a function that provides a logger and a function from (() => State) => State.
|
||||
* @return the '~' command.
|
||||
*/
|
||||
def continuous(setup: WatchSetup): Command =
|
||||
Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(otherCommandParser) {
|
||||
(state, command) =>
|
||||
Watched.executeContinuously(state, command, setup)
|
||||
}
|
||||
|
||||
/**
|
||||
* Default handler to transform the state when the watch terminates. When the [[Watched.Action]] is
|
||||
* [[Reload]], the handler will prepend the original command (prefixed by ~) to the
|
||||
* [[State.remainingCommands]] and then invoke the [[StateOps.reload]] method. When the
|
||||
* [[Watched.Action]] is [[HandleError]], the handler returns the result of [[StateOps.fail]]. Otherwise
|
||||
* the original state is returned.
|
||||
*/
|
||||
private[sbt] val onTermination: (Action, String, State) => State = (action, command, state) =>
|
||||
action match {
|
||||
case Reload =>
|
||||
val continuousCommand = Exec(ContinuousExecutePrefix + command, None)
|
||||
state.copy(remainingCommands = continuousCommand +: state.remainingCommands).reload
|
||||
case HandleError => state.fail
|
||||
case _ => state
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements continuous execution. It works by first parsing the command and generating a task to
|
||||
* run with each build. It can run multiple commands that are separated by ";" in the command
|
||||
* input. If any of these commands are invalid, the watch will immediately exit.
|
||||
* @param state the initial state
|
||||
* @param command the command(s) to repeatedly apply
|
||||
* @param setup function to generate a logger and a transformation of the resultant state. The
|
||||
* purpose of the transformation is to preserve the logging semantics that existed
|
||||
* in the legacy version of this function in which the task would be run through
|
||||
* MainLoop.processCommand, which is unavailable in the main-command project
|
||||
* @return the initial state if all of the input commands are valid. Otherwise, returns the
|
||||
* initial state with the failure transformation.
|
||||
*/
|
||||
private[sbt] def executeContinuously(
|
||||
state: State,
|
||||
command: String,
|
||||
setup: WatchSetup,
|
||||
): State = withCharBufferedStdIn { in =>
|
||||
val (s0, config, newState) = setup(state, command)
|
||||
val failureCommandName = "SbtContinuousWatchOnFail"
|
||||
val onFail = Command.command(failureCommandName)(identity)
|
||||
val s = (FailureWall :: s0).copy(
|
||||
onFailure = Some(Exec(failureCommandName, None)),
|
||||
definedCommands = s0.definedCommands :+ onFail
|
||||
)
|
||||
val commands = Parser.parse(command, BasicCommands.multiParserImpl(Some(s))) match {
|
||||
case Left(_) => command :: Nil
|
||||
case Right(c) => c
|
||||
}
|
||||
val parser = Command.combine(s.definedCommands)(s)
|
||||
val tasks = commands.foldLeft(Nil: Seq[Either[String, () => Either[Exception, Boolean]]]) {
|
||||
(t, cmd) =>
|
||||
t :+ (DefaultParsers.parse(cmd, parser) match {
|
||||
case Right(task) =>
|
||||
Right { () =>
|
||||
try {
|
||||
Right(newState(task).remainingCommands.forall(_.commandLine != failureCommandName))
|
||||
} catch { case e: Exception => Left(e) }
|
||||
}
|
||||
case Left(_) => Left(cmd)
|
||||
})
|
||||
}
|
||||
val (valid, invalid) = tasks.partition(_.isRight)
|
||||
if (invalid.isEmpty) {
|
||||
val task = () =>
|
||||
valid.foldLeft(Right(true): Either[Exception, Boolean]) {
|
||||
case (status, Right(t)) => if (status.getOrElse(true)) t() else status
|
||||
case _ => throw new IllegalStateException("Should be unreachable")
|
||||
}
|
||||
val terminationAction = watch(in, task, config)
|
||||
config.onWatchTerminated(terminationAction, command, state)
|
||||
} else {
|
||||
val commands = invalid.flatMap(_.left.toOption).mkString("'", "', '", "'")
|
||||
config.logger.error(s"Terminating watch due to invalid command(s): $commands")
|
||||
state.fail
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def watch(
|
||||
in: InputStream,
|
||||
task: () => Either[Exception, Boolean],
|
||||
config: WatchConfig
|
||||
): Action = {
|
||||
val logger = config.logger
|
||||
def info(msg: String): Unit = if (msg.nonEmpty) logger.info(msg)
|
||||
|
||||
@tailrec
|
||||
def impl(count: Int): Action = {
|
||||
@tailrec
|
||||
def nextAction(): Action = {
|
||||
config.handleInput(in) match {
|
||||
case action @ (CancelWatch | HandleError | Reload | _: Custom) => action
|
||||
case Trigger => Trigger
|
||||
case _ =>
|
||||
val events = config.fileEventMonitor
|
||||
.poll(10.millis)
|
||||
.map(new FileAttributes.EventImpl(_))
|
||||
val next = events match {
|
||||
case Seq() => (Ignore, None)
|
||||
case Seq(head, tail @ _*) =>
|
||||
/*
|
||||
* We traverse all of the events and find the one for which we give the highest
|
||||
* weight.
|
||||
* Custom > HandleError > CancelWatch > Reload > Trigger > Ignore
|
||||
*/
|
||||
tail.foldLeft((config.onWatchEvent(head), Some(head))) {
|
||||
case (current @ (_: Custom, _), _) => current
|
||||
case (current @ (action, _), event) =>
|
||||
config.onWatchEvent(event) match {
|
||||
case HandleError => (HandleError, Some(event))
|
||||
case CancelWatch if action != HandleError => (CancelWatch, Some(event))
|
||||
case Reload if action != HandleError && action != CancelWatch =>
|
||||
(Reload, Some(event))
|
||||
case Trigger if action == Ignore => (Trigger, Some(event))
|
||||
case _ => current
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note that nextAction should never return Ignore.
|
||||
next match {
|
||||
case (action @ (HandleError | CancelWatch | _: Custom), Some(event)) =>
|
||||
val cause =
|
||||
if (action == HandleError) "error"
|
||||
else if (action.isInstanceOf[Custom]) action.toString
|
||||
else "cancellation"
|
||||
logger.debug(s"Stopping watch due to $cause from ${event.path}")
|
||||
action
|
||||
case (Trigger, Some(event)) =>
|
||||
logger.debug(s"Triggered by ${event.path}")
|
||||
config.triggeredMessage(event.path, count).foreach(info)
|
||||
Trigger
|
||||
case (Reload, Some(event)) =>
|
||||
logger.info(s"Reload triggered by ${event.path}")
|
||||
Reload
|
||||
case _ =>
|
||||
nextAction()
|
||||
}
|
||||
}
|
||||
}
|
||||
task() match {
|
||||
case Right(status) =>
|
||||
config.preWatch(count, status) match {
|
||||
case Ignore =>
|
||||
config.watchingMessage(count).foreach(info)
|
||||
nextAction() match {
|
||||
case action @ (CancelWatch | HandleError | Reload | _: Custom) => action
|
||||
case _ => impl(count + 1)
|
||||
}
|
||||
case Trigger => impl(count + 1)
|
||||
case action @ (CancelWatch | HandleError | Reload | _: Custom) => action
|
||||
}
|
||||
case Left(e) =>
|
||||
logger.error(s"Terminating watch due to Unexpected error: $e")
|
||||
HandleError
|
||||
}
|
||||
}
|
||||
try impl(count = 1)
|
||||
finally config.fileEventMonitor.close()
|
||||
}
|
||||
|
||||
@deprecated("Replaced by Watched.command", "1.3.0")
|
||||
def executeContinuously(watched: Watched, s: State, next: String, repeat: String): State =
|
||||
LegacyWatched.executeContinuously(watched, s, next, repeat)
|
||||
|
||||
private[sbt] object NullLogger extends Logger {
|
||||
override def trace(t: => Throwable): Unit = {}
|
||||
override def success(message: => String): Unit = {}
|
||||
override def log(level: Level.Value, message: => String): Unit = {}
|
||||
}
|
||||
|
||||
@deprecated("ContinuousEventMonitor attribute is not used by Watched.command", "1.3.0")
|
||||
val ContinuousEventMonitor =
|
||||
AttributeKey[EventMonitor](
|
||||
"watch event monitor",
|
||||
"Internal: maintains watch state and monitor threads."
|
||||
)
|
||||
@deprecated("Superseded by ContinuousEventMonitor", "1.1.5")
|
||||
@deprecated("Superseded by ContinuousEventMonitor", "1.3.0")
|
||||
val ContinuousState =
|
||||
AttributeKey[WatchState]("watch state", "Internal: tracks state for continuous execution.")
|
||||
|
||||
@deprecated("Superseded by ContinuousEventMonitor", "1.1.5")
|
||||
@deprecated("Superseded by ContinuousEventMonitor", "1.3.0")
|
||||
val ContinuousWatchService =
|
||||
AttributeKey[WatchService](
|
||||
"watch service",
|
||||
"Internal: tracks watch service for continuous execution."
|
||||
)
|
||||
@deprecated("No longer used for continuous execution", "1.3.0")
|
||||
val Configuration =
|
||||
AttributeKey[Watched]("watched-configuration", "Configures continuous execution.")
|
||||
|
||||
def createWatchService(pollDelay: FiniteDuration): WatchService = {
|
||||
def closeWatch = new MacOSXWatchService()
|
||||
sys.props.get("sbt.watch.mode") match {
|
||||
case Some("polling") =>
|
||||
new PollingWatchService(pollDelay)
|
||||
case Some("nio") =>
|
||||
FileSystems.getDefault.newWatchService()
|
||||
case Some("closewatch") => closeWatch
|
||||
case _ if Properties.isMac => closeWatch
|
||||
case _ =>
|
||||
FileSystems.getDefault.newWatchService()
|
||||
}
|
||||
}
|
||||
def createWatchService(): WatchService = createWatchService(PollDelay)
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a number of configuration options for continuous execution.
|
||||
*/
|
||||
trait WatchConfig {
|
||||
|
||||
/**
|
||||
* A logger.
|
||||
* @return a logger
|
||||
*/
|
||||
def logger: Logger
|
||||
|
||||
/**
|
||||
* The sbt.io.FileEventMonitor that is used to monitor the file system.
|
||||
*
|
||||
* @return an sbt.io.FileEventMonitor instance.
|
||||
*/
|
||||
def fileEventMonitor: FileEventMonitor[FileAttributes]
|
||||
|
||||
/**
|
||||
* A function that is periodically invoked to determine whether the watch should stop or
|
||||
* trigger. Usually this will read from System.in to react to user input.
|
||||
* @return an [[Watched.Action Action]] that will determine the next step in the watch.
|
||||
*/
|
||||
def handleInput(inputStream: InputStream): Watched.Action
|
||||
|
||||
/**
|
||||
* This is run before each watch iteration and if it returns true, the watch is terminated.
|
||||
* @param count The current number of watch iterations.
|
||||
* @param lastStatus true if the previous task execution completed successfully
|
||||
* @return the Action to apply
|
||||
*/
|
||||
def preWatch(count: Int, lastStatus: Boolean): Watched.Action
|
||||
|
||||
/**
|
||||
* Callback that is invoked whenever a file system vent is detected. The next step of the watch
|
||||
* is determined by the [[Watched.Action Action]] returned by the callback.
|
||||
* @param event the detected sbt.io.FileEventMonitor.Event.
|
||||
* @return the next [[Watched.Action Action]] to run.
|
||||
*/
|
||||
def onWatchEvent(event: FileAttributes.Event): Watched.Action
|
||||
|
||||
/**
|
||||
* Transforms the state after the watch terminates.
|
||||
* @param action the [[Watched.Action Action]] that caused the build to terminate
|
||||
* @param command the command that the watch was repeating
|
||||
* @param state the initial state prior to the start of continuous execution
|
||||
* @return the updated state.
|
||||
*/
|
||||
def onWatchTerminated(action: Watched.Action, command: String, state: State): State
|
||||
|
||||
/**
|
||||
* The optional message to log when a build is triggered.
|
||||
* @param path the path that triggered the vuild
|
||||
* @param count the current iteration
|
||||
* @return an optional log message.
|
||||
*/
|
||||
def triggeredMessage(path: Path, count: Int): Option[String]
|
||||
|
||||
/**
|
||||
* The optional message to log before each watch iteration.
|
||||
* @param count the current iteration
|
||||
* @return an optional log message.
|
||||
*/
|
||||
def watchingMessage(count: Int): Option[String]
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a default implementation of [[WatchConfig]].
|
||||
*/
|
||||
object WatchConfig {
|
||||
|
||||
/**
|
||||
* Create an instance of [[WatchConfig]].
|
||||
* @param logger logger for watch events
|
||||
* @param fileEventMonitor the monitor for file system events.
|
||||
* @param handleInput callback that is periodically invoked to check whether to continue or
|
||||
* terminate the watch based on user input. It is also possible to, for
|
||||
* example time out the watch using this callback.
|
||||
* @param preWatch callback to invoke before waiting for updates from the sbt.io.FileEventMonitor.
|
||||
* The input parameters are the current iteration count and whether or not
|
||||
* the last invocation of the command was successful. Typical uses would be to
|
||||
* terminate the watch after a fixed number of iterations or to terminate the
|
||||
* watch if the command was unsuccessful.
|
||||
* @param onWatchEvent callback that is invoked when
|
||||
* @param onWatchTerminated callback that is invoked to update the state after the watch
|
||||
* terminates.
|
||||
* @param triggeredMessage optional message that will be logged when a new build is triggered.
|
||||
* The input parameters are the sbt.io.TypedPath that triggered the new
|
||||
* build and the current iteration count.
|
||||
* @param watchingMessage optional message that is printed before each watch iteration begins.
|
||||
* The input parameter is the current iteration count.
|
||||
* @return a [[WatchConfig]] instance.
|
||||
*/
|
||||
def default(
|
||||
logger: Logger,
|
||||
fileEventMonitor: FileEventMonitor[FileAttributes],
|
||||
handleInput: InputStream => Watched.Action,
|
||||
preWatch: (Int, Boolean) => Watched.Action,
|
||||
onWatchEvent: FileAttributes.Event => Watched.Action,
|
||||
onWatchTerminated: (Watched.Action, String, State) => State,
|
||||
triggeredMessage: (Path, Int) => Option[String],
|
||||
watchingMessage: Int => Option[String]
|
||||
): WatchConfig = {
|
||||
val l = logger
|
||||
val fem = fileEventMonitor
|
||||
val hi = handleInput
|
||||
val pw = preWatch
|
||||
val owe = onWatchEvent
|
||||
val owt = onWatchTerminated
|
||||
val tm = triggeredMessage
|
||||
val wm = watchingMessage
|
||||
new WatchConfig {
|
||||
override def logger: Logger = l
|
||||
override def fileEventMonitor: FileEventMonitor[FileAttributes] = fem
|
||||
override def handleInput(inputStream: InputStream): Watched.Action = hi(inputStream)
|
||||
override def preWatch(count: Int, lastResult: Boolean): Watched.Action =
|
||||
pw(count, lastResult)
|
||||
override def onWatchEvent(event: FileAttributes.Event): Watched.Action = owe(event)
|
||||
override def onWatchTerminated(action: Watched.Action, command: String, state: State): State =
|
||||
owt(action, command, state)
|
||||
override def triggeredMessage(path: Path, count: Int): Option[String] =
|
||||
tm(path, count)
|
||||
override def watchingMessage(count: Int): Option[String] = wm(count)
|
||||
}
|
||||
}
|
||||
@deprecated("Use defaultStartWatch in conjunction with the watchStartMessage key", "1.3.0")
|
||||
val defaultWatchingMessage: WatchState => String =
|
||||
((ws: WatchState) => s"${ws.count}. ${waitMessage("")} ")
|
||||
.label("Watched.projectWatchingMessage")
|
||||
@deprecated(
|
||||
"Use defaultOnTriggerMessage in conjunction with the watchTriggeredMessage key",
|
||||
"1.3.0"
|
||||
)
|
||||
val defaultTriggeredMessage: WatchState => String =
|
||||
const("").label("Watched.defaultTriggeredMessage")
|
||||
@deprecated("Use clearOnTrigger in conjunction with the watchTriggeredMessage key", "1.3.0")
|
||||
val clearWhenTriggered: WatchState => String =
|
||||
const(clearScreen).label("Watched.clearWhenTriggered")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ object MultiParserSpec {
|
|||
def parseEither: Either[String, Seq[String]] = Parser.parse(s, parser)
|
||||
}
|
||||
}
|
||||
import MultiParserSpec._
|
||||
import sbt.MultiParserSpec._
|
||||
class MultiParserSpec extends FlatSpec with Matchers {
|
||||
"parsing" should "parse single commands" in {
|
||||
";foo".parse shouldBe Seq("foo")
|
||||
|
|
|
|||
|
|
@ -1,144 +0,0 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import java.io.{ File, InputStream }
|
||||
import java.nio.file.{ Files, Path }
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
|
||||
import org.scalatest.{ FlatSpec, Matchers }
|
||||
import sbt.Watched._
|
||||
import sbt.WatchedSpec._
|
||||
import sbt.internal.FileAttributes
|
||||
import sbt.io._
|
||||
import sbt.io.syntax._
|
||||
import sbt.util.Logger
|
||||
|
||||
import scala.collection.mutable
|
||||
import scala.concurrent.duration._
|
||||
|
||||
class WatchedSpec extends FlatSpec with Matchers {
|
||||
object Defaults {
|
||||
def config(
|
||||
globs: Seq[Glob],
|
||||
fileEventMonitor: Option[FileEventMonitor[FileAttributes]] = None,
|
||||
logger: Logger = NullLogger,
|
||||
handleInput: InputStream => Action = _ => Ignore,
|
||||
preWatch: (Int, Boolean) => Action = (_, _) => CancelWatch,
|
||||
onWatchEvent: FileAttributes.Event => Action = _ => Ignore,
|
||||
triggeredMessage: (Path, Int) => Option[String] = (_, _) => None,
|
||||
watchingMessage: Int => Option[String] = _ => None
|
||||
): WatchConfig = {
|
||||
val monitor = fileEventMonitor.getOrElse {
|
||||
val fileTreeRepository = FileTreeRepository.default(FileAttributes.default)
|
||||
globs.foreach(fileTreeRepository.register)
|
||||
FileEventMonitor.antiEntropy(
|
||||
fileTreeRepository,
|
||||
50.millis,
|
||||
m => logger.debug(m.toString),
|
||||
50.milliseconds,
|
||||
100.milliseconds
|
||||
)
|
||||
}
|
||||
WatchConfig.default(
|
||||
logger = logger,
|
||||
monitor,
|
||||
handleInput,
|
||||
preWatch,
|
||||
onWatchEvent,
|
||||
(_, _, state) => state,
|
||||
triggeredMessage,
|
||||
watchingMessage
|
||||
)
|
||||
}
|
||||
}
|
||||
object NullInputStream extends InputStream {
|
||||
override def available(): Int = 0
|
||||
override def read(): Int = -1
|
||||
}
|
||||
"Watched.watch" should "stop" in IO.withTemporaryDirectory { dir =>
|
||||
val config = Defaults.config(globs = Seq(dir.toRealPath.toGlob))
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe CancelWatch
|
||||
}
|
||||
it should "trigger" in IO.withTemporaryDirectory { dir =>
|
||||
val triggered = new AtomicBoolean(false)
|
||||
val config = Defaults.config(
|
||||
globs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
preWatch = (count, _) => if (count == 2) CancelWatch else Ignore,
|
||||
onWatchEvent = _ => { triggered.set(true); Trigger },
|
||||
watchingMessage = _ => {
|
||||
new File(dir, "file").createNewFile; None
|
||||
}
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe CancelWatch
|
||||
assert(triggered.get())
|
||||
}
|
||||
it should "filter events" in IO.withTemporaryDirectory { dir =>
|
||||
val realDir = dir.toRealPath
|
||||
val queue = new mutable.Queue[Path]
|
||||
val foo = realDir.toPath.resolve("foo")
|
||||
val bar = realDir.toPath.resolve("bar")
|
||||
val config = Defaults.config(
|
||||
globs = Seq(realDir ** AllPassFilter),
|
||||
preWatch = (count, _) => if (count == 2) CancelWatch else Ignore,
|
||||
onWatchEvent = e => if (e.path == foo) Trigger else Ignore,
|
||||
triggeredMessage = (tp, _) => { queue += tp; None },
|
||||
watchingMessage = _ => { Files.createFile(bar); Thread.sleep(5); Files.createFile(foo); None }
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe CancelWatch
|
||||
queue.toIndexedSeq shouldBe Seq(foo)
|
||||
}
|
||||
it should "enforce anti-entropy" in IO.withTemporaryDirectory { dir =>
|
||||
val realDir = dir.toRealPath.toPath
|
||||
val queue = new mutable.Queue[Path]
|
||||
val foo = realDir.resolve("foo")
|
||||
val bar = realDir.resolve("bar")
|
||||
val config = Defaults.config(
|
||||
globs = Seq(realDir ** AllPassFilter),
|
||||
preWatch = (count, _) => if (count == 3) CancelWatch else Ignore,
|
||||
onWatchEvent = e => if (e.path != realDir) Trigger else Ignore,
|
||||
triggeredMessage = (tp, _) => { queue += tp; None },
|
||||
watchingMessage = count => {
|
||||
count match {
|
||||
case 1 => Files.createFile(bar)
|
||||
case 2 =>
|
||||
bar.toFile.setLastModified(5000)
|
||||
Files.createFile(foo)
|
||||
case _ =>
|
||||
}
|
||||
None
|
||||
}
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe CancelWatch
|
||||
queue.toIndexedSeq shouldBe Seq(bar, foo)
|
||||
}
|
||||
it should "halt on error" in IO.withTemporaryDirectory { dir =>
|
||||
val halted = new AtomicBoolean(false)
|
||||
val config = Defaults.config(
|
||||
globs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
preWatch = (_, lastStatus) => if (lastStatus) Ignore else { halted.set(true); HandleError }
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(false), config) shouldBe HandleError
|
||||
assert(halted.get())
|
||||
}
|
||||
it should "reload" in IO.withTemporaryDirectory { dir =>
|
||||
val config = Defaults.config(
|
||||
globs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
preWatch = (_, _) => Ignore,
|
||||
onWatchEvent = _ => Reload,
|
||||
watchingMessage = _ => { new File(dir, "file").createNewFile(); None }
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe Reload
|
||||
}
|
||||
}
|
||||
|
||||
object WatchedSpec {
|
||||
implicit class FileOps(val f: File) {
|
||||
def toRealPath: File = f.toPath.toRealPath().toFile
|
||||
}
|
||||
}
|
||||
|
|
@ -100,7 +100,16 @@ object Append {
|
|||
new Sequence[Seq[Source], Seq[File], File] {
|
||||
def appendValue(a: Seq[Source], b: File): Seq[Source] = appendValues(a, Seq(b))
|
||||
def appendValues(a: Seq[Source], b: Seq[File]): Seq[Source] =
|
||||
a ++ b.map(new Source(_, AllPassFilter, NothingFilter))
|
||||
a ++ b.map { f =>
|
||||
// Globs only accept their own base if the depth parameter is set to -1. The conversion
|
||||
// from Source to Glob never sets the depth to -1, which causes individual files
|
||||
// added via `watchSource += ...` to not trigger a build when they are modified. Since
|
||||
// watchSources will be deprecated in 1.3.0, I'm hoping that most people will migrate
|
||||
// their builds to the new system, but this will work for most builds in the interim.
|
||||
if (f.isFile && f.getParentFile != null)
|
||||
new Source(f.getParentFile, f.getName, NothingFilter, recursive = false)
|
||||
else new Source(f, AllPassFilter, NothingFilter)
|
||||
}
|
||||
}
|
||||
|
||||
// Implemented with SAM conversion short-hand
|
||||
|
|
|
|||
|
|
@ -7,15 +7,15 @@
|
|||
|
||||
package sbt
|
||||
|
||||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.util.{ AttributeKey, Attributed, ConsoleAppender, Init }
|
||||
import sbt.util.Show
|
||||
import sbt.internal.util.complete.Parser
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
|
||||
import Scope.{ GlobalScope, ThisScope }
|
||||
import KeyRanks.{ DTask, Invisible }
|
||||
import sbt.KeyRanks.{ DTask, Invisible }
|
||||
import sbt.Scope.{ GlobalScope, ThisScope }
|
||||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util._
|
||||
import sbt.util.Show
|
||||
|
||||
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
|
||||
object Def extends Init[Scope] with TaskMacroExtra {
|
||||
|
|
@ -206,15 +206,16 @@ object Def extends Init[Scope] with TaskMacroExtra {
|
|||
def toISParser[T](p: Initialize[Parser[T]]): Initialize[State => Parser[T]] = p(toSParser)
|
||||
def toIParser[T](p: Initialize[InputTask[T]]): Initialize[State => Parser[Task[T]]] = p(_.parser)
|
||||
|
||||
import language.experimental.macros
|
||||
import std.SettingMacro.{ settingDynMacroImpl, settingMacroImpl }
|
||||
import std.TaskMacro.{
|
||||
inputTaskMacroImpl,
|
||||
inputTaskDynMacroImpl,
|
||||
inputTaskMacroImpl,
|
||||
taskDynMacroImpl,
|
||||
taskMacroImpl
|
||||
}
|
||||
import std.SettingMacro.{ settingDynMacroImpl, settingMacroImpl }
|
||||
import std.{ InputEvaluated, MacroPrevious, MacroValue, MacroTaskValue, ParserInput }
|
||||
import std._
|
||||
|
||||
import language.experimental.macros
|
||||
|
||||
def task[T](t: T): Def.Initialize[Task[T]] = macro taskMacroImpl[T]
|
||||
def taskDyn[T](t: Def.Initialize[Task[T]]): Def.Initialize[Task[T]] = macro taskDynMacroImpl[T]
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ package sbt
|
|||
|
||||
import java.io.{ File, PrintWriter }
|
||||
import java.net.{ URI, URL }
|
||||
import java.nio.file.{ Path => NioPath }
|
||||
import java.util.Optional
|
||||
import java.util.concurrent.{ Callable, TimeUnit }
|
||||
|
||||
|
|
@ -43,6 +42,7 @@ import sbt.internal.server.{
|
|||
ServerHandler
|
||||
}
|
||||
import sbt.internal.testing.TestLogger
|
||||
import sbt.internal.TransitiveGlobs._
|
||||
import sbt.internal.util.Attributed.data
|
||||
import sbt.internal.util.Types._
|
||||
import sbt.internal.util._
|
||||
|
|
@ -143,6 +143,8 @@ object Defaults extends BuildCommon {
|
|||
defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq(
|
||||
excludeFilter :== HiddenFileFilter,
|
||||
classLoaderCache := ClassLoaderCache(4),
|
||||
fileInputs :== Nil,
|
||||
watchTriggers :== Nil,
|
||||
) ++ TaskRepository
|
||||
.proxy(GlobalScope / classLoaderCache, ClassLoaderCache(4)) ++ globalIvyCore ++ globalJvmCore
|
||||
) ++ globalSbtCore
|
||||
|
|
@ -229,6 +231,14 @@ object Defaults extends BuildCommon {
|
|||
outputStrategy :== None, // TODO - This might belong elsewhere.
|
||||
buildStructure := Project.structure(state.value),
|
||||
settingsData := buildStructure.value.data,
|
||||
settingsData / fileInputs := {
|
||||
val baseDir = file(".").getCanonicalFile
|
||||
val sourceFilter = ("*.sbt" || "*.scala" || "*.java") -- HiddenFileFilter
|
||||
Seq(
|
||||
Glob(baseDir, "*.sbt" -- HiddenFileFilter, 0),
|
||||
Glob(baseDir / "project", sourceFilter, Int.MaxValue)
|
||||
)
|
||||
},
|
||||
trapExit :== true,
|
||||
connectInput :== false,
|
||||
cancelable :== false,
|
||||
|
|
@ -243,8 +253,6 @@ object Defaults extends BuildCommon {
|
|||
// The idea here is to be able to define a `sbtVersion in pluginCrossBuild`, which
|
||||
// directs the dependencies of the plugin to build to the specified sbt plugin version.
|
||||
sbtVersion in pluginCrossBuild := sbtVersion.value,
|
||||
watchingMessage := Watched.defaultWatchingMessage,
|
||||
triggeredMessage := Watched.defaultTriggeredMessage,
|
||||
onLoad := idFun[State],
|
||||
onUnload := idFun[State],
|
||||
onUnload := { s =>
|
||||
|
|
@ -255,8 +263,7 @@ object Defaults extends BuildCommon {
|
|||
Nil
|
||||
},
|
||||
pollingGlobs :== Nil,
|
||||
watchSources :== Nil,
|
||||
watchProjectSources :== Nil,
|
||||
watchSources :== Nil, // Although this is deprecated, it can't be removed or it breaks += for legacy builds.
|
||||
skip :== false,
|
||||
taskTemporaryDirectory := { val dir = IO.createTemporaryDirectory; dir.deleteOnExit(); dir },
|
||||
onComplete := {
|
||||
|
|
@ -281,22 +288,16 @@ object Defaults extends BuildCommon {
|
|||
Previous.references :== new Previous.References,
|
||||
concurrentRestrictions := defaultRestrictions.value,
|
||||
parallelExecution :== true,
|
||||
pollInterval :== new FiniteDuration(500, TimeUnit.MILLISECONDS),
|
||||
watchTriggeredMessage := { (_, _) =>
|
||||
None
|
||||
},
|
||||
watchStartMessage := Watched.defaultStartWatch,
|
||||
fileTreeRepository := FileTree.Repository.polling,
|
||||
fileTreeRepository := state.value
|
||||
.get(globalFileTreeRepository)
|
||||
.map(FileTree.repository)
|
||||
.getOrElse(FileTree.Repository.polling),
|
||||
Continuous.dynamicInputs := Continuous.dynamicInputsImpl.value,
|
||||
externalHooks := {
|
||||
val repository = fileTreeRepository.value
|
||||
compileOptions =>
|
||||
Some(ExternalHooks(compileOptions, repository))
|
||||
},
|
||||
watchAntiEntropy :== new FiniteDuration(500, TimeUnit.MILLISECONDS),
|
||||
watchLogger := streams.value.log,
|
||||
watchService :== { () =>
|
||||
Watched.createWatchService()
|
||||
},
|
||||
logBuffered :== false,
|
||||
commands :== Nil,
|
||||
showSuccess :== true,
|
||||
|
|
@ -331,6 +332,22 @@ object Defaults extends BuildCommon {
|
|||
},
|
||||
insideCI :== sys.env.contains("BUILD_NUMBER") ||
|
||||
sys.env.contains("CI") || System.getProperty("sbt.ci", "false") == "true",
|
||||
// watch related settings
|
||||
pollInterval :== Watch.defaultPollInterval,
|
||||
watchAntiEntropy :== Watch.defaultAntiEntropy,
|
||||
watchAntiEntropyRetentionPeriod :== Watch.defaultAntiEntropyRetentionPeriod,
|
||||
watchLogLevel :== Level.Info,
|
||||
watchOnEnter :== Watch.defaultOnEnter,
|
||||
watchOnMetaBuildEvent :== Watch.ifChanged(Watch.Reload),
|
||||
watchOnInputEvent :== Watch.trigger,
|
||||
watchOnTriggerEvent :== Watch.trigger,
|
||||
watchDeletionQuarantinePeriod :== Watch.defaultDeletionQuarantinePeriod,
|
||||
watchService :== Watched.newWatchService,
|
||||
watchStartMessage :== Watch.defaultStartWatch,
|
||||
watchTasks := Continuous.continuousTask.evaluated,
|
||||
aggregate in watchTasks :== false,
|
||||
watchTrackMetaBuild :== true,
|
||||
watchTriggeredMessage :== Watch.defaultOnTriggerMessage,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -381,33 +398,17 @@ object Defaults extends BuildCommon {
|
|||
crossPaths.value
|
||||
)
|
||||
},
|
||||
unmanagedSources := {
|
||||
unmanagedSources / fileInputs := {
|
||||
val filter =
|
||||
(includeFilter in unmanagedSources).value -- (excludeFilter in unmanagedSources).value
|
||||
val baseSources = if (sourcesInBase.value) baseDirectory.value * filter :: Nil else Nil
|
||||
(unmanagedSourceDirectories.value.map(_ ** filter) ++ baseSources).all.map(Stamped.file)
|
||||
},
|
||||
watchSources in ConfigGlobal := (watchSources in ConfigGlobal).value ++ {
|
||||
val baseDir = baseDirectory.value
|
||||
val bases = unmanagedSourceDirectories.value
|
||||
val include = (includeFilter in unmanagedSources).value
|
||||
val exclude = (excludeFilter in unmanagedSources).value
|
||||
val baseSources =
|
||||
if (sourcesInBase.value) Seq(new Source(baseDir, include, exclude, recursive = false))
|
||||
else Nil
|
||||
bases.map(b => new Source(b, include, exclude)) ++ baseSources
|
||||
},
|
||||
watchProjectSources in ConfigGlobal := (watchProjectSources in ConfigGlobal).value ++ {
|
||||
val baseDir = baseDirectory.value
|
||||
Seq(
|
||||
new Source(baseDir, "*.sbt", HiddenFileFilter, recursive = false),
|
||||
new Source(baseDir / "project", "*.sbt" || "*.scala", HiddenFileFilter, recursive = true)
|
||||
)
|
||||
unmanagedSourceDirectories.value.map(_ ** filter) ++ baseSources
|
||||
},
|
||||
unmanagedSources := (unmanagedSources / fileInputs).value.all.map(Stamped.file),
|
||||
managedSourceDirectories := Seq(sourceManaged.value),
|
||||
managedSources := generate(sourceGenerators).value,
|
||||
sourceGenerators :== Nil,
|
||||
sourceGenerators / outputs := Seq(managedDirectory.value ** AllPassFilter),
|
||||
sourceGenerators / fileOutputs := Seq(managedDirectory.value ** AllPassFilter),
|
||||
sourceDirectories := Classpaths
|
||||
.concatSettings(unmanagedSourceDirectories, managedSourceDirectories)
|
||||
.value,
|
||||
|
|
@ -421,17 +422,12 @@ object Defaults extends BuildCommon {
|
|||
resourceDirectories := Classpaths
|
||||
.concatSettings(unmanagedResourceDirectories, managedResourceDirectories)
|
||||
.value,
|
||||
unmanagedResources := {
|
||||
unmanagedResources / fileInputs := {
|
||||
val filter =
|
||||
(includeFilter in unmanagedResources).value -- (excludeFilter in unmanagedResources).value
|
||||
unmanagedResourceDirectories.value.map(_ ** filter).all.map(Stamped.file)
|
||||
},
|
||||
watchSources in ConfigGlobal := (watchSources in ConfigGlobal).value ++ {
|
||||
val bases = unmanagedResourceDirectories.value
|
||||
val include = (includeFilter in unmanagedResources).value
|
||||
val exclude = (excludeFilter in unmanagedResources).value
|
||||
bases.map(b => new Source(b, include, exclude))
|
||||
unmanagedResourceDirectories.value.map(_ ** filter)
|
||||
},
|
||||
unmanagedResources := (unmanagedResources / fileInputs).value.all.map(Stamped.file),
|
||||
resourceGenerators :== Nil,
|
||||
resourceGenerators += Def.task {
|
||||
PluginDiscovery.writeDescriptors(discoveredSbtPlugins.value, resourceManaged.value)
|
||||
|
|
@ -573,12 +569,13 @@ object Defaults extends BuildCommon {
|
|||
lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++ inTask(compile)(
|
||||
compileInputsSettings :+ (clean := Clean.taskIn(ThisScope).value)
|
||||
) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
|
||||
outputs := Seq(
|
||||
fileOutputs := Seq(
|
||||
compileAnalysisFileTask.value.toGlob,
|
||||
classDirectory.value ** "*.class"
|
||||
) ++ (sourceGenerators / outputs).value,
|
||||
) ++ (sourceGenerators / fileOutputs).value,
|
||||
compile := compileTask.value,
|
||||
clean := Clean.taskIn(ThisScope).value,
|
||||
internalDependencyConfigurations := InternalDependencies.configurations.value,
|
||||
manipulateBytecode := compileIncremental.value,
|
||||
compileIncremental := (compileIncrementalTask tag (Tags.Compile, Tags.CPU)).value,
|
||||
printWarnings := printWarningsTask.value,
|
||||
|
|
@ -627,48 +624,20 @@ object Defaults extends BuildCommon {
|
|||
clean := Clean.taskIn(ThisScope).value,
|
||||
consoleProject := consoleProjectTask.value,
|
||||
watchTransitiveSources := watchTransitiveSourcesTask.value,
|
||||
watchProjectTransitiveSources := watchTransitiveSourcesTaskImpl(watchProjectSources).value,
|
||||
watchOnEvent := Watched
|
||||
.onEvent(watchTransitiveSources.value, watchProjectTransitiveSources.value),
|
||||
watchHandleInput := Watched.handleInput,
|
||||
watchPreWatch := { (_, _) =>
|
||||
Watched.Ignore
|
||||
},
|
||||
watchOnTermination := Watched.onTermination,
|
||||
watchConfig := {
|
||||
val sources = watchTransitiveSources.value ++ watchProjectTransitiveSources.value
|
||||
val globs = sources.map(
|
||||
s => Glob(s.base, s.includeFilter -- s.excludeFilter, if (s.recursive) Int.MaxValue else 0)
|
||||
)
|
||||
val wm = watchingMessage.?.value
|
||||
.map(w => (count: Int) => Some(w(WatchState.empty(globs).withCount(count))))
|
||||
.getOrElse(watchStartMessage.value)
|
||||
val tm = triggeredMessage.?.value
|
||||
.map(tm => (_: NioPath, count: Int) => Some(tm(WatchState.empty(globs).withCount(count))))
|
||||
.getOrElse(watchTriggeredMessage.value)
|
||||
val logger = watchLogger.value
|
||||
val repo = FileManagement.toMonitoringRepository(FileManagement.repo.value)
|
||||
globs.foreach(repo.register)
|
||||
val monitor = FileManagement.monitor(repo, watchAntiEntropy.value, logger)
|
||||
WatchConfig.default(
|
||||
logger,
|
||||
monitor,
|
||||
watchHandleInput.value,
|
||||
watchPreWatch.value,
|
||||
watchOnEvent.value,
|
||||
watchOnTermination.value,
|
||||
tm,
|
||||
wm
|
||||
)
|
||||
},
|
||||
watchStartMessage := Watched.projectOnWatchMessage(thisProjectRef.value.project),
|
||||
watch := watchSetting.value,
|
||||
outputs += target.value ** AllPassFilter,
|
||||
fileOutputs += target.value ** AllPassFilter,
|
||||
transitiveGlobs := InputGraph.task.value,
|
||||
transitiveInputs := InputGraph.inputsTask.value,
|
||||
transitiveTriggers := InputGraph.triggersTask.value,
|
||||
)
|
||||
|
||||
def generate(generators: SettingKey[Seq[Task[Seq[File]]]]): Initialize[Task[Seq[File]]] =
|
||||
generators { _.join.map(_.flatten) }
|
||||
|
||||
@deprecated(
|
||||
"The watchTransitiveSourcesTask is used only for legacy builds and will be removed in a future version of sbt.",
|
||||
"1.3.0"
|
||||
)
|
||||
def watchTransitiveSourcesTask: Initialize[Task[Seq[Source]]] =
|
||||
watchTransitiveSourcesTaskImpl(watchSources)
|
||||
|
||||
|
|
@ -696,8 +665,8 @@ object Defaults extends BuildCommon {
|
|||
val interval = pollInterval.value
|
||||
val _antiEntropy = watchAntiEntropy.value
|
||||
val base = thisProjectRef.value
|
||||
val msg = watchingMessage.value
|
||||
val trigMsg = triggeredMessage.value
|
||||
val msg = watchingMessage.?.value.getOrElse(Watched.defaultWatchingMessage)
|
||||
val trigMsg = triggeredMessage.?.value.getOrElse(Watched.defaultTriggeredMessage)
|
||||
new Watched {
|
||||
val scoped = watchTransitiveSources in base
|
||||
val key = scoped.scopedKey
|
||||
|
|
@ -2039,7 +2008,7 @@ object Classpaths {
|
|||
transitiveClassifiers :== Seq(SourceClassifier, DocClassifier),
|
||||
sourceArtifactTypes :== Artifact.DefaultSourceTypes.toVector,
|
||||
docArtifactTypes :== Artifact.DefaultDocTypes.toVector,
|
||||
outputs :== Nil,
|
||||
fileOutputs :== Nil,
|
||||
sbtDependency := {
|
||||
val app = appConfiguration.value
|
||||
val id = app.provider.id
|
||||
|
|
@ -2053,7 +2022,12 @@ object Classpaths {
|
|||
val base = ModuleID(id.groupID, id.name, sbtVersion.value).withCrossVersion(cross)
|
||||
CrossVersion(scalaVersion, binVersion)(base).withCrossVersion(Disabled())
|
||||
},
|
||||
shellPrompt := shellPromptFromState
|
||||
shellPrompt := shellPromptFromState,
|
||||
dynamicDependency := { (): Unit },
|
||||
transitiveClasspathDependency := { (): Unit },
|
||||
transitiveGlobs := { (Nil: Seq[Glob], Nil: Seq[Glob]) },
|
||||
transitiveInputs := Nil,
|
||||
transitiveTriggers := Nil,
|
||||
)
|
||||
)
|
||||
|
||||
|
|
@ -2892,6 +2866,7 @@ object Classpaths {
|
|||
}
|
||||
private[sbt] def trackedExportedProducts(track: TrackLevel): Initialize[Task[Classpath]] =
|
||||
Def.task {
|
||||
val _ = (packageBin / dynamicDependency).value
|
||||
val art = (artifact in packageBin).value
|
||||
val module = projectID.value
|
||||
val config = configuration.value
|
||||
|
|
@ -2904,6 +2879,7 @@ object Classpaths {
|
|||
}
|
||||
private[sbt] def trackedExportedJarProducts(track: TrackLevel): Initialize[Task[Classpath]] =
|
||||
Def.task {
|
||||
val _ = (packageBin / dynamicDependency).value
|
||||
val art = (artifact in packageBin).value
|
||||
val module = projectID.value
|
||||
val config = configuration.value
|
||||
|
|
@ -2918,6 +2894,7 @@ object Classpaths {
|
|||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
|
||||
Def.taskDyn {
|
||||
val _ = (packageBin / dynamicDependency).value
|
||||
val useJars = exportJars.value
|
||||
if (useJars) trackedJarProductsImplTask(track)
|
||||
else trackedNonJarProductsImplTask(track)
|
||||
|
|
@ -2988,6 +2965,14 @@ object Classpaths {
|
|||
|
||||
def internalDependencies: Initialize[Task[Classpath]] =
|
||||
Def.taskDyn {
|
||||
val _ = (
|
||||
(exportedProductsNoTracking / transitiveClasspathDependency).value,
|
||||
(exportedProductsIfMissing / transitiveClasspathDependency).value,
|
||||
(exportedProducts / transitiveClasspathDependency).value,
|
||||
(exportedProductJarsNoTracking / transitiveClasspathDependency).value,
|
||||
(exportedProductJarsIfMissing / transitiveClasspathDependency).value,
|
||||
(exportedProductJars / transitiveClasspathDependency).value
|
||||
)
|
||||
internalDependenciesImplTask(
|
||||
thisProjectRef.value,
|
||||
classpathConfiguration.value,
|
||||
|
|
|
|||
|
|
@ -7,38 +7,24 @@
|
|||
|
||||
package sbt
|
||||
|
||||
import sbt.internal.{
|
||||
Load,
|
||||
BuildStructure,
|
||||
TaskTimings,
|
||||
TaskName,
|
||||
GCUtil,
|
||||
TaskProgress,
|
||||
TaskTraceEvent
|
||||
}
|
||||
import sbt.internal.util.{ Attributed, ConsoleAppender, ErrorHandling, HList, RMap, Signals, Types }
|
||||
import sbt.util.{ Logger, Show }
|
||||
import sbt.librarymanagement.{ Resolver, UpdateReport }
|
||||
|
||||
import scala.concurrent.duration.Duration
|
||||
import java.io.File
|
||||
import java.util.concurrent.atomic.AtomicReference
|
||||
import Def.{ dummyState, ScopedKey, Setting }
|
||||
import Keys.{
|
||||
Streams,
|
||||
TaskStreams,
|
||||
dummyRoots,
|
||||
executionRoots,
|
||||
pluginData,
|
||||
streams,
|
||||
streamsManager,
|
||||
transformState
|
||||
}
|
||||
import Project.richInitializeTask
|
||||
import Scope.Global
|
||||
|
||||
import sbt.Def.{ ScopedKey, Setting, dummyState }
|
||||
import sbt.Keys.{ TaskProgress => _, name => _, _ }
|
||||
import sbt.Project.richInitializeTask
|
||||
import sbt.Scope.Global
|
||||
import sbt.internal.Aggregation.KeyValue
|
||||
import sbt.internal.TaskName._
|
||||
import sbt.internal.TransitiveGlobs._
|
||||
import sbt.internal.util._
|
||||
import sbt.internal.{ BuildStructure, GCUtil, Load, TaskProgress, TaskTimings, TaskTraceEvent, _ }
|
||||
import sbt.librarymanagement.{ Resolver, UpdateReport }
|
||||
import sbt.std.Transform.DummyTaskMap
|
||||
import sbt.util.{ Logger, Show }
|
||||
|
||||
import scala.Console.RED
|
||||
import std.Transform.DummyTaskMap
|
||||
import TaskName._
|
||||
import scala.concurrent.duration.Duration
|
||||
|
||||
/**
|
||||
* An API that allows you to cancel executing tasks upon some signal.
|
||||
|
|
@ -166,8 +152,8 @@ object PluginData {
|
|||
}
|
||||
|
||||
object EvaluateTask {
|
||||
import std.Transform
|
||||
import Keys.state
|
||||
import std.Transform
|
||||
|
||||
lazy private val sharedProgress = new TaskTimings(reportOnShutdown = true)
|
||||
def taskTimingProgress: Option[ExecuteProgress[Task]] =
|
||||
|
|
@ -494,8 +480,13 @@ object EvaluateTask {
|
|||
results: RMap[Task, Result],
|
||||
state: State,
|
||||
root: Task[T]
|
||||
): (State, Result[T]) =
|
||||
(stateTransform(results)(state), results(root))
|
||||
): (State, Result[T]) = {
|
||||
val newState = results(root) match {
|
||||
case Value(KeyValue(_, st: StateTransform) :: Nil) => st.state
|
||||
case _ => stateTransform(results)(state)
|
||||
}
|
||||
(newState, results(root))
|
||||
}
|
||||
def stateTransform(results: RMap[Task, Result]): State => State =
|
||||
Function.chain(
|
||||
results.toTypedSeq flatMap {
|
||||
|
|
@ -565,7 +556,7 @@ object EvaluateTask {
|
|||
|
||||
// if the return type Seq[Setting[_]] is not explicitly given, scalac hangs
|
||||
val injectStreams: ScopedKey[_] => Seq[Setting[_]] = scoped =>
|
||||
if (scoped.key == streams.key)
|
||||
if (scoped.key == streams.key) {
|
||||
Seq(streams in scoped.scope := {
|
||||
(streamsManager map { mgr =>
|
||||
val stream = mgr(scoped)
|
||||
|
|
@ -573,6 +564,26 @@ object EvaluateTask {
|
|||
stream
|
||||
}).value
|
||||
})
|
||||
else
|
||||
Nil
|
||||
} else if (scoped.key == transitiveInputs.key) {
|
||||
scoped.scope.task.toOption.toSeq.map { key =>
|
||||
val updatedKey = ScopedKey(scoped.scope.copy(task = Zero), key)
|
||||
transitiveInputs in scoped.scope := InputGraph.inputsTask(updatedKey).value
|
||||
}
|
||||
} else if (scoped.key == transitiveTriggers.key) {
|
||||
scoped.scope.task.toOption.toSeq.map { key =>
|
||||
val updatedKey = ScopedKey(scoped.scope.copy(task = Zero), key)
|
||||
transitiveTriggers in scoped.scope := InputGraph.triggersTask(updatedKey).value
|
||||
}
|
||||
} else if (scoped.key == transitiveGlobs.key) {
|
||||
scoped.scope.task.toOption.toSeq.map { key =>
|
||||
val updatedKey = ScopedKey(scoped.scope.copy(task = Zero), key)
|
||||
transitiveGlobs in scoped.scope := InputGraph.task(updatedKey).value
|
||||
}
|
||||
} else if (scoped.key == dynamicDependency.key) {
|
||||
(dynamicDependency in scoped.scope := { () }) :: Nil
|
||||
} else if (scoped.key == transitiveClasspathDependency.key) {
|
||||
(transitiveClasspathDependency in scoped.scope := { () }) :: Nil
|
||||
} else {
|
||||
Nil
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ package sbt
|
|||
|
||||
import java.io.{ File, InputStream }
|
||||
import java.net.URL
|
||||
import java.nio.file.Path
|
||||
|
||||
import org.apache.ivy.core.module.descriptor.ModuleDescriptor
|
||||
import org.apache.ivy.core.module.id.ModuleRevisionId
|
||||
|
|
@ -22,7 +21,9 @@ import sbt.internal.inc.ScalaInstance
|
|||
import sbt.internal.io.WatchState
|
||||
import sbt.internal.librarymanagement.{ CompatibilityWarningOptions, IvySbt }
|
||||
import sbt.internal.server.ServerHandler
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util.{ AttributeKey, SourcePosition }
|
||||
import sbt.io.FileEventMonitor.Event
|
||||
import sbt.io._
|
||||
import sbt.librarymanagement.Configurations.CompilerPlugin
|
||||
import sbt.librarymanagement.LibraryManagementCodec._
|
||||
|
|
@ -90,27 +91,41 @@ object Keys {
|
|||
val serverHandlers = settingKey[Seq[ServerHandler]]("User-defined server handlers.")
|
||||
|
||||
val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
|
||||
@deprecated("This is no longer used for continuous execution", "1.3.0")
|
||||
val watch = SettingKey(BasicKeys.watch)
|
||||
val suppressSbtShellNotification = settingKey[Boolean]("""True to suppress the "Executing in batch mode.." message.""").withRank(CSetting)
|
||||
val enableGlobalCachingFileTreeRepository = settingKey[Boolean]("Toggles whether or not to create a global cache of the file system that can be used by tasks to quickly list a path").withRank(DSetting)
|
||||
val fileTreeRepository = taskKey[FileTree.Repository]("A repository of the file system.")
|
||||
val fileTreeRepository = taskKey[FileTree.Repository]("A repository of the file system.").withRank(DSetting)
|
||||
val pollInterval = settingKey[FiniteDuration]("Interval between checks for modified sources by the continuous execution command.").withRank(BMinusSetting)
|
||||
val pollingGlobs = settingKey[Seq[Glob]]("Directories that cannot be cached and must always be rescanned. Typically these will be NFS mounted or something similar.").withRank(DSetting)
|
||||
val watchAntiEntropy = settingKey[FiniteDuration]("Duration for which the watch EventMonitor will ignore events for a file after that file has triggered a build.").withRank(BMinusSetting)
|
||||
val watchConfig = taskKey[WatchConfig]("The configuration for continuous execution.").withRank(BMinusSetting)
|
||||
val watchLogger = taskKey[Logger]("A logger that reports watch events.").withRank(DSetting)
|
||||
val watchHandleInput = settingKey[InputStream => Watched.Action]("Function that is periodically invoked to determine if the continous build should be stopped or if a build should be triggered. It will usually read from stdin to respond to user commands.").withRank(BMinusSetting)
|
||||
val watchOnEvent = taskKey[FileAttributes.Event => Watched.Action]("Determines how to handle a file event").withRank(BMinusSetting)
|
||||
val watchOnTermination = taskKey[(Watched.Action, String, State) => State]("Transforms the input state after the continuous build completes.").withRank(BMinusSetting)
|
||||
val watchService = settingKey[() => WatchService]("Service to use to monitor file system changes.").withRank(BMinusSetting)
|
||||
val watchProjectSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources for the sbt meta project to watch to trigger a reload.").withRank(CSetting)
|
||||
val watchProjectTransitiveSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources in all projects for the sbt meta project to watch to trigger a reload.").withRank(CSetting)
|
||||
val watchPreWatch = settingKey[(Int, Boolean) => Watched.Action]("Function that may terminate a continuous build based on the number of iterations and the last result").withRank(BMinusSetting)
|
||||
val watchAntiEntropyRetentionPeriod = settingKey[FiniteDuration]("Wall clock Duration for which a FileEventMonitor will store anti-entropy events. This prevents spurious triggers when a task takes a long time to run. Higher values will consume more memory but make spurious triggers less likely.").withRank(BMinusSetting)
|
||||
val watchDeletionQuarantinePeriod = settingKey[FiniteDuration]("Period for which deletion events will be quarantined. This is to prevent spurious builds when a file is updated with a rename which manifests as a file deletion followed by a file creation. The higher this value is set, the longer the delay will be between a file deletion and a build trigger but the less likely it is for a spurious trigger.").withRank(DSetting)
|
||||
val watchLogLevel = settingKey[sbt.util.Level.Value]("Transform the default logger in continuous builds.").withRank(DSetting)
|
||||
val watchInputHandler = settingKey[InputStream => Watch.Action]("Function that is periodically invoked to determine if the continuous build should be stopped or if a build should be triggered. It will usually read from stdin to respond to user commands. This is only invoked if watchInputStream is set.").withRank(DSetting)
|
||||
val watchInputStream = taskKey[InputStream]("The input stream to read for user input events. This will usually be System.in").withRank(DSetting)
|
||||
val watchInputParser = settingKey[Parser[Watch.Action]]("A parser of user input that can be used to trigger or exit a continuous build").withRank(DSetting)
|
||||
val watchOnEnter = settingKey[() => Unit]("Function to run prior to beginning a continuous build. This will run before the continuous task(s) is(are) first evaluated.").withRank(DSetting)
|
||||
val watchOnExit = settingKey[() => Unit]("Function to run upon exit of a continuous build. It can be used to cleanup resources used during the watch.").withRank(DSetting)
|
||||
val watchOnInputEvent = settingKey[(Int, Event[FileAttributes]) => Watch.Action]("Callback to invoke if an event is triggered in a continuous build by one of the transitive inputs. This is only invoked if watchOnEvent is not explicitly set.").withRank(DSetting)
|
||||
val watchOnEvent = settingKey[Continuous.Arguments => Event[FileAttributes] => Watch.Action]("Determines how to handle a file event. The Seq[Glob] contains all of the transitive inputs for the task(s) being run by the continuous build.").withRank(DSetting)
|
||||
val watchOnMetaBuildEvent = settingKey[(Int, Event[FileAttributes]) => Watch.Action]("Callback to invoke if an event is triggered in a continuous build by one of the meta build triggers.").withRank(DSetting)
|
||||
val watchOnTermination = settingKey[(Watch.Action, String, Int, State) => State]("Transforms the state upon completion of a watch. The String argument is the command that was run during the watch. The Int parameter specifies how many times the command was run during the watch.").withRank(DSetting)
|
||||
val watchOnTrigger = settingKey[Continuous.Arguments => Event[FileAttributes] => Unit]("Callback to invoke when a continuous build triggers. The first parameter is the number of previous watch task invocations. The second parameter is the Event that triggered this build").withRank(DSetting)
|
||||
val watchOnTriggerEvent = settingKey[(Int, Event[FileAttributes]) => Watch.Action]("Callback to invoke if an event is triggered in a continuous build by one of the transitive triggers. This is only invoked if watchOnEvent is not explicitly set.").withRank(DSetting)
|
||||
val watchOnIteration = settingKey[Int => Watch.Action]("Function that is invoked before waiting for file system events or user input events. This is only invoked if watchOnStart is not explicitly set.").withRank(DSetting)
|
||||
val watchOnStart = settingKey[Continuous.Arguments => () => Watch.Action]("Function is invoked before waiting for file system or input events. The returned Action is used to either trigger the build, terminate the watch or wait for events.").withRank(DSetting)
|
||||
val watchService = settingKey[() => WatchService]("Service to use to monitor file system changes.").withRank(BMinusSetting).withRank(DSetting)
|
||||
val watchStartMessage = settingKey[(Int, String, Seq[String]) => Option[String]]("The message to show when triggered execution waits for sources to change. The parameters are the current watch iteration count, the current project name and the tasks that are being run with each build.").withRank(DSetting)
|
||||
// The watchTasks key should really be named watch, but that is already taken by the deprecated watch key. I'd be surprised if there are any plugins that use it so I think we should consider breaking binary compatibility to rename this task.
|
||||
val watchTasks = InputKey[StateTransform]("watch", "Watch a task (or multiple tasks) and rebuild when its file inputs change or user input is received. The semantics are more or less the same as the `~` command except that it cannot transform the state on exit. This means that it cannot be used to reload the build.").withRank(DSetting)
|
||||
val watchTrackMetaBuild = settingKey[Boolean]("Toggles whether or not changing the build files (e.g. **/*.sbt, project/**/(*.scala | *.java)) should automatically trigger a project reload").withRank(DSetting)
|
||||
val watchTriggeredMessage = settingKey[(Int, Event[FileAttributes], Seq[String]) => Option[String]]("The message to show before triggered execution executes an action after sources change. The parameters are the path that triggered the build and the current watch iteration count.").withRank(DSetting)
|
||||
|
||||
// Deprecated watch apis
|
||||
@deprecated("This is no longer used for continuous execution", "1.3.0")
|
||||
val watch = SettingKey(BasicKeys.watch)
|
||||
@deprecated("WatchSource has been replaced by Glob. To add file triggers to a task with key: Key, set `Key / watchTriggers := Seq[Glob](...)`.", "1.3.0")
|
||||
val watchSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources in this project for continuous execution to watch for changes.").withRank(BMinusSetting)
|
||||
val watchStartMessage = settingKey[Int => Option[String]]("The message to show when triggered execution waits for sources to change. The parameter is the current watch iteration count.").withRank(DSetting)
|
||||
@deprecated("This is for legacy builds only and will be removed in a future version of sbt", "1.3.0")
|
||||
val watchTransitiveSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources in all projects for continuous execution to watch.").withRank(CSetting)
|
||||
val watchTriggeredMessage = settingKey[(Path, Int) => Option[String]]("The message to show before triggered execution executes an action after sources change. The parameters are the path that triggered the build and the current watch iteration count.").withRank(DSetting)
|
||||
@deprecated("Use watchStartMessage instead", "1.3.0")
|
||||
val watchingMessage = settingKey[WatchState => String]("The message to show when triggered execution waits for sources to change.").withRank(DSetting)
|
||||
@deprecated("Use watchTriggeredMessage instead", "1.3.0")
|
||||
|
|
@ -133,6 +148,8 @@ object Keys {
|
|||
val managedSources = taskKey[Seq[File]]("Sources generated by the build.").withRank(BTask)
|
||||
val sources = taskKey[Seq[File]]("All sources, both managed and unmanaged.").withRank(BTask)
|
||||
val sourcesInBase = settingKey[Boolean]("If true, sources from the project's base directory are included as main sources.")
|
||||
val fileInputs = settingKey[Seq[Glob]]("The file globs that are used by a task. This setting will generally be scoped per task. It will also be used to determine the sources to watch during continuous execution.")
|
||||
val watchTriggers = settingKey[Seq[Glob]]("Describes files that should trigger a new continuous build.")
|
||||
|
||||
// Filters
|
||||
val includeFilter = settingKey[FileFilter]("Filter for including sources and resources files from default directories.").withRank(CSetting)
|
||||
|
|
@ -157,7 +174,7 @@ object Keys {
|
|||
val cleanKeepGlobs = settingKey[Seq[Glob]]("Globs to keep during a clean. Must be direct children of target.").withRank(CSetting)
|
||||
val crossPaths = settingKey[Boolean]("If true, enables cross paths, which distinguish input and output directories for cross-building.").withRank(ASetting)
|
||||
val taskTemporaryDirectory = settingKey[File]("Directory used for temporary files for tasks that is deleted after each task execution.").withRank(DSetting)
|
||||
val outputs = taskKey[Seq[Glob]]("Describes the output files of a task")
|
||||
val fileOutputs = taskKey[Seq[Glob]]("Describes the output files of a task")
|
||||
|
||||
// Generators
|
||||
val sourceGenerators = settingKey[Seq[Task[Seq[File]]]]("List of tasks that generate sources.").withRank(CSetting)
|
||||
|
|
@ -332,6 +349,7 @@ object Keys {
|
|||
val internalDependencyAsJars = taskKey[Classpath]("The internal (inter-project) classpath as JARs.")
|
||||
val dependencyClasspathAsJars = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies, all as JARs.")
|
||||
val fullClasspathAsJars = taskKey[Classpath]("The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies, all as JARs.")
|
||||
val internalDependencyConfigurations = settingKey[Seq[(ProjectRef, Set[String])]]("The project configurations that this configuration depends on")
|
||||
|
||||
val internalConfigurationMap = settingKey[Configuration => Configuration]("Maps configurations to the actual configuration used to define the classpath.").withRank(CSetting)
|
||||
val classpathConfiguration = taskKey[Configuration]("The configuration used to define the classpath.").withRank(CTask)
|
||||
|
|
@ -478,6 +496,8 @@ object Keys {
|
|||
"Provides a view into the file system that may or may not cache the tree in memory",
|
||||
1000
|
||||
)
|
||||
private[sbt] val dynamicDependency = settingKey[Unit]("Leaves a breadcrumb that the scoped task is evaluated inside of a dynamic task")
|
||||
private[sbt] val transitiveClasspathDependency = settingKey[Unit]("Leaves a breadcrumb that the scoped task has transitive classpath dependencies")
|
||||
|
||||
val stateStreams = AttributeKey[Streams]("stateStreams", "Streams manager, which provides streams for different contexts. Setting this on State will override the default Streams implementation.")
|
||||
val resolvedScoped = Def.resolvedScoped
|
||||
|
|
|
|||
|
|
@ -17,12 +17,13 @@ import sbt.Project.LoadAction
|
|||
import sbt.compiler.EvalImports
|
||||
import sbt.internal.Aggregation.AnyKeys
|
||||
import sbt.internal.CommandStrings.BootCommand
|
||||
import sbt.internal.FileManagement.CopiedFileTreeRepository
|
||||
import sbt.internal._
|
||||
import sbt.internal.inc.ScalaInstance
|
||||
import sbt.internal.util.Types.{ const, idFun }
|
||||
import sbt.internal.util._
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.io.IO
|
||||
import sbt.io._
|
||||
import sbt.io.syntax._
|
||||
import sbt.util.{ Level, Logger, Show }
|
||||
import xsbti.compile.CompilerCache
|
||||
|
|
@ -423,13 +424,7 @@ object BuiltinCommands {
|
|||
s
|
||||
}
|
||||
|
||||
def continuous: Command = Watched.continuous { (state: State, command: String) =>
|
||||
val extracted = Project.extract(state)
|
||||
val (s, watchConfig) = extracted.runTask(Keys.watchConfig, state)
|
||||
val updateState =
|
||||
(runCommand: () => State) => MainLoop.processCommand(Exec(command, None), s, runCommand)
|
||||
(s, watchConfig, updateState)
|
||||
}
|
||||
def continuous: Command = Continuous.continuous
|
||||
|
||||
private[this] def loadedEval(s: State, arg: String): Unit = {
|
||||
val extracted = Project extract s
|
||||
|
|
@ -853,23 +848,31 @@ object BuiltinCommands {
|
|||
}
|
||||
s.put(Keys.stateCompilerCache, cache)
|
||||
}
|
||||
private[sbt] val rawGlobalFileTreeRepository = AttributeKey[FileTreeRepository[FileAttributes]](
|
||||
"raw-global-file-tree-repository",
|
||||
"Provides a view into the file system that may or may not cache the tree in memory",
|
||||
1000
|
||||
)
|
||||
private[sbt] def registerGlobalCaches(s: State): State =
|
||||
try {
|
||||
val extracted = Project.extract(s)
|
||||
val cleanedUp = new AtomicBoolean(false)
|
||||
def cleanup(): Unit = {
|
||||
s.get(Keys.globalFileTreeRepository).foreach(_.close())
|
||||
s.attributes.remove(Keys.globalFileTreeRepository)
|
||||
s.get(rawGlobalFileTreeRepository).foreach(_.close())
|
||||
s.get(Keys.taskRepository).foreach(_.close())
|
||||
s.attributes.remove(Keys.taskRepository)
|
||||
()
|
||||
}
|
||||
cleanup()
|
||||
val fileTreeRepository = FileManagement.defaultFileTreeRepository(s, extracted)
|
||||
val newState = s.addExitHook(if (cleanedUp.compareAndSet(false, true)) cleanup())
|
||||
newState
|
||||
val fileTreeRepository = FileTreeRepository.default(FileAttributes.default)
|
||||
val fileCache = System.getProperty("sbt.io.filecache", "validate")
|
||||
val newState = s
|
||||
.addExitHook(if (cleanedUp.compareAndSet(false, true)) cleanup())
|
||||
.put(Keys.taskRepository, new TaskRepository.Repr)
|
||||
.put(Keys.globalFileTreeRepository, fileTreeRepository)
|
||||
.put(rawGlobalFileTreeRepository, fileTreeRepository)
|
||||
if (fileCache == "false" || (fileCache != "true" && Util.isWindows)) newState
|
||||
else {
|
||||
val copied = new CopiedFileTreeRepository(fileTreeRepository)
|
||||
newState.put(Keys.globalFileTreeRepository, copied)
|
||||
}
|
||||
} catch {
|
||||
case NonFatal(_) => s
|
||||
}
|
||||
|
|
|
|||
|
|
@ -88,7 +88,8 @@ object ScriptedPlugin extends AutoPlugin {
|
|||
val pub = (publishLocal).value
|
||||
use(analysis, pub)
|
||||
},
|
||||
scripted := scriptedTask.evaluated
|
||||
scripted := scriptedTask.evaluated,
|
||||
watchTriggers in scripted += sbtTestDirectory.value ** AllPassFilter
|
||||
)
|
||||
|
||||
private[sbt] def scriptedTestsTask: Initialize[Task[AnyRef]] =
|
||||
|
|
|
|||
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
final class StateTransform(val state: State) {
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case that: StateTransform => this.state == that.state
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = state.hashCode
|
||||
override def toString: String = s"StateTransform($state)"
|
||||
}
|
||||
object StateTransform {
|
||||
def apply(state: State): State = state
|
||||
}
|
||||
|
|
@ -0,0 +1,393 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
import java.io.InputStream
|
||||
|
||||
import sbt.BasicCommandStrings.ContinuousExecutePrefix
|
||||
import sbt.internal.FileAttributes
|
||||
import sbt.internal.LabeledFunctions._
|
||||
import sbt.internal.util.{ JLine, Util }
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util.complete.Parser._
|
||||
import sbt.io.FileEventMonitor.{ Creation, Deletion, Event, Update }
|
||||
import sbt.util.{ Level, Logger }
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
object Watch {
|
||||
|
||||
/**
|
||||
* This trait is used to control the state of [[Watch.apply]]. The [[Watch.Trigger]] action
|
||||
* indicates that [[Watch.apply]] should re-run the input task. The [[Watch.CancelWatch]]
|
||||
* actions indicate that [[Watch.apply]] should exit and return the [[Watch.CancelWatch]]
|
||||
* instance that caused the function to exit. The [[Watch.Ignore]] action is used to indicate
|
||||
* that the method should keep polling for new actions.
|
||||
*/
|
||||
sealed trait Action
|
||||
|
||||
/**
|
||||
* Provides a default `Ordering` for actions. Lower values correspond to higher priority actions.
|
||||
* [[CancelWatch]] is higher priority than [[ContinueWatch]].
|
||||
*/
|
||||
object Action {
|
||||
implicit object ordering extends Ordering[Action] {
|
||||
override def compare(left: Action, right: Action): Int = (left, right) match {
|
||||
case (a: ContinueWatch, b: ContinueWatch) => ContinueWatch.ordering.compare(a, b)
|
||||
case (_: ContinueWatch, _: CancelWatch) => 1
|
||||
case (a: CancelWatch, b: CancelWatch) => CancelWatch.ordering.compare(a, b)
|
||||
case (_: CancelWatch, _: ContinueWatch) => -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch should stop.
|
||||
*/
|
||||
sealed trait CancelWatch extends Action
|
||||
|
||||
/**
|
||||
* Action that does not terminate the watch but might trigger a build.
|
||||
*/
|
||||
sealed trait ContinueWatch extends Action
|
||||
|
||||
/**
|
||||
* Provides a default `Ordering` for classes extending [[ContinueWatch]]. [[Trigger]] is higher
|
||||
* priority than [[Ignore]].
|
||||
*/
|
||||
object ContinueWatch {
|
||||
|
||||
/**
|
||||
* A default `Ordering` for [[ContinueWatch]]. [[Trigger]] is higher priority than [[Ignore]].
|
||||
*/
|
||||
implicit object ordering extends Ordering[ContinueWatch] {
|
||||
override def compare(left: ContinueWatch, right: ContinueWatch): Int = left match {
|
||||
case Ignore => if (right == Ignore) 0 else 1
|
||||
case Trigger => if (right == Trigger) 0 else -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch should stop.
|
||||
*/
|
||||
case object CancelWatch extends CancelWatch {
|
||||
|
||||
/**
|
||||
* A default `Ordering` for [[ContinueWatch]]. The priority of each type of [[CancelWatch]]
|
||||
* is reflected by the ordering of the case statements in the [[ordering.compare]] method,
|
||||
* e.g. [[Custom]] is higher priority than [[HandleError]].
|
||||
*/
|
||||
implicit object ordering extends Ordering[CancelWatch] {
|
||||
override def compare(left: CancelWatch, right: CancelWatch): Int = left match {
|
||||
// Note that a negative return value means the left CancelWatch is preferred to the right
|
||||
// CancelWatch while the inverse is true for a positive return value. This logic could
|
||||
// likely be simplified, but the pattern matching approach makes it very clear what happens
|
||||
// for each type of Action.
|
||||
case _: Custom =>
|
||||
right match {
|
||||
case _: Custom => 0
|
||||
case _ => -1
|
||||
}
|
||||
case _: HandleError =>
|
||||
right match {
|
||||
case _: Custom => 1
|
||||
case _: HandleError => 0
|
||||
case _ => -1
|
||||
}
|
||||
case _: Run =>
|
||||
right match {
|
||||
case _: Run => 0
|
||||
case CancelWatch | Reload => -1
|
||||
case _ => 1
|
||||
}
|
||||
case CancelWatch =>
|
||||
right match {
|
||||
case CancelWatch => 0
|
||||
case Reload => -1
|
||||
case _ => 1
|
||||
}
|
||||
case Reload => if (right == Reload) 0 else 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Action that indicates that an error has occurred. The watch will be terminated when this action
|
||||
* is produced.
|
||||
*/
|
||||
final class HandleError(val throwable: Throwable) extends CancelWatch {
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case that: HandleError => this.throwable == that.throwable
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = throwable.hashCode
|
||||
override def toString: String = s"HandleError($throwable)"
|
||||
}
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch should continue as though nothing happened. This may be
|
||||
* because, for example, no user input was yet available.
|
||||
*/
|
||||
case object Ignore extends ContinueWatch
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch should pause while the build is reloaded. This is used to
|
||||
* automatically reload the project when the build files (e.g. build.sbt) are changed.
|
||||
*/
|
||||
case object Reload extends CancelWatch
|
||||
|
||||
/**
|
||||
* Action that indicates that we should exit and run the provided command.
|
||||
*
|
||||
* @param commands the commands to run after we exit the watch
|
||||
*/
|
||||
final class Run(val commands: String*) extends CancelWatch {
|
||||
override def toString: String = s"Run(${commands.mkString(", ")})"
|
||||
}
|
||||
// For now leave this private in case this isn't the best unapply type signature since it can't
|
||||
// be evolved in a binary compatible way.
|
||||
private object Run {
|
||||
def unapply(r: Run): Option[List[Exec]] = Some(r.commands.toList.map(Exec(_, None)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Action that indicates that the watch process should re-run the command.
|
||||
*/
|
||||
case object Trigger extends ContinueWatch
|
||||
|
||||
/**
|
||||
* A user defined Action. It is not sealed so that the user can create custom instances. If
|
||||
* the onStart or nextAction function passed into [[Watch.apply]] returns [[Watch.Custom]], then
|
||||
* the watch will terminate.
|
||||
*/
|
||||
trait Custom extends CancelWatch
|
||||
|
||||
private type NextAction = () => Watch.Action
|
||||
|
||||
/**
|
||||
* Runs a task and then blocks until the task is ready to run again or we no longer wish to
|
||||
* block execution.
|
||||
*
|
||||
* @param task the aggregated task to run with each iteration
|
||||
* @param onStart function to be invoked before we start polling for events
|
||||
* @param nextAction function that returns the next state transition [[Watch.Action]].
|
||||
* @return the exit [[Watch.Action]] that can be used to potentially modify the build state and
|
||||
* the count of the number of iterations that were run. If
|
||||
*/
|
||||
def apply(task: () => Unit, onStart: NextAction, nextAction: NextAction): Watch.Action = {
|
||||
def safeNextAction(delegate: NextAction): Watch.Action =
|
||||
try delegate()
|
||||
catch { case NonFatal(t) => new HandleError(t) }
|
||||
@tailrec def next(): Watch.Action = safeNextAction(nextAction) match {
|
||||
// This should never return Ignore due to this condition.
|
||||
case Ignore => next()
|
||||
case action => action
|
||||
}
|
||||
@tailrec def impl(): Watch.Action = {
|
||||
task()
|
||||
safeNextAction(onStart) match {
|
||||
case Ignore =>
|
||||
next() match {
|
||||
case Trigger => impl()
|
||||
case action => action
|
||||
}
|
||||
case Trigger => impl()
|
||||
case a => a
|
||||
}
|
||||
}
|
||||
try impl()
|
||||
catch { case NonFatal(t) => new HandleError(t) }
|
||||
}
|
||||
|
||||
private[sbt] object NullLogger extends Logger {
|
||||
override def trace(t: => Throwable): Unit = {}
|
||||
override def success(message: => String): Unit = {}
|
||||
override def log(level: Level.Value, message: => String): Unit = {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse all of the events and find the one for which we give the highest
|
||||
* weight. Within the [[Action]] hierarchy:
|
||||
* [[Custom]] > [[HandleError]] > [[CancelWatch]] > [[Reload]] > [[Trigger]] > [[Ignore]]
|
||||
* the first event of each kind is returned so long as there are no higher priority events
|
||||
* in the collection. For example, if there are multiple events that all return [[Trigger]], then
|
||||
* the first one is returned. If, on the other hand, one of the events returns [[Reload]],
|
||||
* then that event "wins" and the [[Reload]] action is returned with the [[Event[FileAttributes]]] that triggered it.
|
||||
*
|
||||
* @param events the ([[Action]], [[Event[FileAttributes]]]) pairs
|
||||
* @return the ([[Action]], [[Event[FileAttributes]]]) pair with highest weight if the input events
|
||||
* are non empty.
|
||||
*/
|
||||
@inline
|
||||
private[sbt] def aggregate(
|
||||
events: Seq[(Action, Event[FileAttributes])]
|
||||
): Option[(Action, Event[FileAttributes])] =
|
||||
if (events.isEmpty) None else Some(events.minBy(_._1))
|
||||
|
||||
private implicit class StringToExec(val s: String) extends AnyVal {
|
||||
def toExec: Exec = Exec(s, None)
|
||||
}
|
||||
|
||||
private[sbt] def withCharBufferedStdIn[R](f: InputStream => R): R =
|
||||
if (!Util.isWindows) JLine.usingTerminal { terminal =>
|
||||
terminal.init()
|
||||
val in = terminal.wrapInIfNeeded(System.in)
|
||||
try {
|
||||
f(in)
|
||||
} finally {
|
||||
terminal.reset()
|
||||
}
|
||||
} else
|
||||
f(System.in)
|
||||
|
||||
/**
|
||||
* A constant function that returns [[Trigger]].
|
||||
*/
|
||||
final val trigger: (Int, Event[FileAttributes]) => Watch.Action = {
|
||||
(_: Int, _: Event[FileAttributes]) =>
|
||||
Trigger
|
||||
}.label("Watched.trigger")
|
||||
|
||||
def ifChanged(action: Action): (Int, Event[FileAttributes]) => Watch.Action =
|
||||
(_: Int, event: Event[FileAttributes]) =>
|
||||
event match {
|
||||
case Update(prev, cur, _) if prev.value != cur.value => action
|
||||
case _: Creation[_] | _: Deletion[_] => action
|
||||
case _ => Ignore
|
||||
}
|
||||
|
||||
/**
|
||||
* The minimum delay between build triggers for the same file. If the file is detected
|
||||
* to have changed within this period from the last build trigger, the event will be discarded.
|
||||
*/
|
||||
final val defaultAntiEntropy: FiniteDuration = 500.milliseconds
|
||||
|
||||
/**
|
||||
* The duration in wall clock time for which a FileEventMonitor will retain anti-entropy
|
||||
* events for files. This is an implementation detail of the FileEventMonitor. It should
|
||||
* hopefully not need to be set by the users. It is needed because when a task takes a long time
|
||||
* to run, it is possible that events will be detected for the file that triggered the build that
|
||||
* occur within the anti-entropy period. We still allow it to be configured to limit the memory
|
||||
* usage of the FileEventMonitor (but this is somewhat unlikely to be a problem).
|
||||
*/
|
||||
final val defaultAntiEntropyRetentionPeriod: FiniteDuration = 10.minutes
|
||||
|
||||
/**
|
||||
* The duration for which we delay triggering when a file is deleted. This is needed because
|
||||
* many programs implement save as a file move of a temporary file onto the target file.
|
||||
* Depending on how the move is implemented, this may be detected as a deletion immediately
|
||||
* followed by a creation. If we trigger immediately on delete, we may, for example, try to
|
||||
* compile before all of the source files are actually available. The longer this value is set,
|
||||
* the less likely we are to spuriously trigger a build before all files are available, but
|
||||
* the longer it will take to trigger a build when the file is actually deleted and not renamed.
|
||||
*/
|
||||
final val defaultDeletionQuarantinePeriod: FiniteDuration = 50.milliseconds
|
||||
|
||||
/**
|
||||
* Converts user input to an Action with the following rules:
|
||||
* 1) 'x' or 'X' will exit sbt
|
||||
* 2) 'r' or 'R' will trigger a build
|
||||
* 3) new line characters cancel the watch and return to the shell
|
||||
*/
|
||||
final val defaultInputParser: Parser[Action] = {
|
||||
val exitParser: Parser[Action] = chars("xX") ^^^ new Run("exit")
|
||||
val rebuildParser: Parser[Action] = chars("rR") ^^^ Trigger
|
||||
val cancelParser: Parser[Action] = chars(legal = "\n\r") ^^^ new Run("iflast shell")
|
||||
exitParser | rebuildParser | cancelParser
|
||||
}
|
||||
|
||||
private[this] val options = {
|
||||
val enter = "<enter>"
|
||||
val newLine = if (Util.isWindows) enter else ""
|
||||
val opts = Seq(
|
||||
s"$enter: return to the shell",
|
||||
s"'r$newLine': repeat the current command",
|
||||
s"'x$newLine': exit sbt"
|
||||
)
|
||||
s"Options:\n${opts.mkString(" ", "\n ", "")}"
|
||||
}
|
||||
private def waitMessage(project: String, commands: Seq[String]): String = {
|
||||
val plural = if (commands.size > 1) "s" else ""
|
||||
val cmds = commands.mkString("; ")
|
||||
s"Monitoring source files for updates...\n" +
|
||||
s"Project: $project\nCommand$plural: $cmds\n$options"
|
||||
}
|
||||
|
||||
/**
|
||||
* A function that prints out the current iteration count and gives instructions for exiting
|
||||
* or triggering the build.
|
||||
*/
|
||||
val defaultStartWatch: (Int, String, Seq[String]) => Option[String] = {
|
||||
(count: Int, project: String, commands: Seq[String]) =>
|
||||
Some(s"$count. ${waitMessage(project, commands)}")
|
||||
}.label("Watched.defaultStartWatch")
|
||||
|
||||
/**
|
||||
* Default no-op callback.
|
||||
*/
|
||||
val defaultOnEnter: () => Unit = () => {}
|
||||
|
||||
private[sbt] val defaultCommandOnTermination: (Action, String, Int, State) => State =
|
||||
onTerminationImpl(ContinuousExecutePrefix).label("Watched.defaultCommandOnTermination")
|
||||
private[sbt] val defaultTaskOnTermination: (Action, String, Int, State) => State =
|
||||
onTerminationImpl("watch", ContinuousExecutePrefix)
|
||||
.label("Watched.defaultTaskOnTermination")
|
||||
|
||||
/**
|
||||
* Default handler to transform the state when the watch terminates. When the [[Watch.Action]]
|
||||
* is [[Reload]], the handler will prepend the original command (prefixed by ~) to the
|
||||
* [[State.remainingCommands]] and then invoke the [[StateOps.reload]] method. When the
|
||||
* [[Watch.Action]] is [[HandleError]], the handler returns the result of [[StateOps.fail]].
|
||||
* When the [[Watch.Action]] is [[Watch.Run]], we add the commands specified by
|
||||
* [[Watch.Run.commands]] to the stat's remaining commands. Otherwise the original state is
|
||||
* returned.
|
||||
*/
|
||||
private def onTerminationImpl(
|
||||
watchPrefixes: String*
|
||||
): (Action, String, Int, State) => State = { (action, command, count, state) =>
|
||||
val prefix = watchPrefixes.head
|
||||
val rc = state.remainingCommands
|
||||
.filterNot(c => watchPrefixes.exists(c.commandLine.trim.startsWith))
|
||||
action match {
|
||||
case Run(commands) => state.copy(remainingCommands = commands ++ rc)
|
||||
case Reload =>
|
||||
state.copy(remainingCommands = "reload".toExec :: s"$prefix $count $command".toExec :: rc)
|
||||
case _: HandleError => state.copy(remainingCommands = rc).fail
|
||||
case _ => state.copy(remainingCommands = rc)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A constant function that always returns `None`. When
|
||||
* `Keys.watchTriggeredMessage := Watched.defaultOnTriggerMessage`, then nothing is logged when
|
||||
* a build is triggered.
|
||||
*/
|
||||
final val defaultOnTriggerMessage: (Int, Event[FileAttributes], Seq[String]) => Option[String] =
|
||||
((_: Int, e: Event[FileAttributes], commands: Seq[String]) => {
|
||||
val msg = s"Build triggered by ${e.entry.typedPath.toPath}. " +
|
||||
s"Running ${commands.mkString("'", "; ", "'")}."
|
||||
Some(msg)
|
||||
}).label("Watched.defaultOnTriggerMessage")
|
||||
|
||||
final val noTriggerMessage: (Int, Event[FileAttributes], Seq[String]) => Option[String] =
|
||||
(_, _, _) => None
|
||||
|
||||
/**
|
||||
* The minimum delay between file system polling when a `PollingWatchService` is used.
|
||||
*/
|
||||
final val defaultPollInterval: FiniteDuration = 500.milliseconds
|
||||
|
||||
/**
|
||||
* A constant function that returns an Option wrapped string that clears the screen when
|
||||
* written to stdout.
|
||||
*/
|
||||
final val clearOnTrigger: Int => Option[String] =
|
||||
((_: Int) => Some(Watched.clearScreen)).label("Watched.clearOnTrigger")
|
||||
}
|
||||
|
|
@ -61,6 +61,7 @@ object Clean {
|
|||
case f => f.toGlob
|
||||
} ++ cleanKeepGlobs.value
|
||||
val excludeFilter: TypedPath => Boolean = excludes.toTypedPathFilter
|
||||
// Don't use a regular logger because the logger actually writes to the target directory.
|
||||
val debug = (logLevel in scope).?.value.orElse(state.value.get(logLevel.key)) match {
|
||||
case Some(Level.Debug) =>
|
||||
(string: String) =>
|
||||
|
|
@ -71,7 +72,7 @@ object Clean {
|
|||
}
|
||||
val delete = tryDelete(debug)
|
||||
cleanFiles.value.sorted.reverseIterator.foreach(delete)
|
||||
(outputs in scope).value.foreach { g =>
|
||||
(fileOutputs in scope).value.foreach { g =>
|
||||
val filter: TypedPath => Boolean = {
|
||||
val globFilter = g.toTypedPathFilter
|
||||
tp =>
|
||||
|
|
|
|||
|
|
@ -9,38 +9,28 @@ package sbt
|
|||
package internal
|
||||
|
||||
import java.io.IOException
|
||||
import java.net.Socket
|
||||
import java.util.concurrent.ConcurrentLinkedQueue
|
||||
import java.util.concurrent.atomic._
|
||||
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import scala.annotation.tailrec
|
||||
import BasicKeys.{
|
||||
autoStartServer,
|
||||
fullServerHandlers,
|
||||
logLevel,
|
||||
serverAuthentication,
|
||||
serverConnectionType,
|
||||
serverHost,
|
||||
serverLogLevel,
|
||||
serverPort
|
||||
}
|
||||
import java.net.Socket
|
||||
|
||||
import sbt.Watched.NullLogger
|
||||
import sbt.BasicKeys._
|
||||
import sbt.Watch.NullLogger
|
||||
import sbt.internal.langserver.{ LogMessageParams, MessageType }
|
||||
import sbt.internal.server._
|
||||
import sbt.internal.util.codec.JValueFormats
|
||||
import sbt.internal.util.{ MainAppender, ObjectEvent, StringEvent }
|
||||
import sbt.io.syntax._
|
||||
import sbt.io.{ Hash, IO }
|
||||
import sbt.protocol.{ EventMessage, ExecStatusEvent }
|
||||
import sbt.util.{ Level, LogExchange, Logger }
|
||||
import sjsonnew.JsonFormat
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe._
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import scala.concurrent.Await
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.{ Failure, Success, Try }
|
||||
import sbt.io.syntax._
|
||||
import sbt.io.{ Hash, IO }
|
||||
import sbt.internal.server._
|
||||
import sbt.internal.langserver.{ LogMessageParams, MessageType }
|
||||
import sbt.internal.util.{ MainAppender, ObjectEvent, StringEvent }
|
||||
import sbt.internal.util.codec.JValueFormats
|
||||
import sbt.protocol.{ EventMessage, ExecStatusEvent }
|
||||
import sbt.util.{ Level, LogExchange, Logger }
|
||||
|
||||
/**
|
||||
* The command exchange merges multiple command channels (e.g. network and console),
|
||||
|
|
|
|||
|
|
@ -0,0 +1,944 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import java.io.{ ByteArrayInputStream, InputStream }
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
|
||||
import sbt.BasicCommandStrings.{
|
||||
ContinuousExecutePrefix,
|
||||
FailureWall,
|
||||
continuousBriefHelp,
|
||||
continuousDetail
|
||||
}
|
||||
import sbt.BasicCommands.otherCommandParser
|
||||
import sbt.Def._
|
||||
import sbt.Scope.Global
|
||||
import sbt.internal.FileManagement.CopiedFileTreeRepository
|
||||
import sbt.internal.LabeledFunctions._
|
||||
import sbt.internal.io.WatchState
|
||||
import sbt.internal.util.complete.Parser._
|
||||
import sbt.internal.util.complete.{ Parser, Parsers }
|
||||
import sbt.internal.util.{ AttributeKey, Util }
|
||||
import sbt.io._
|
||||
import sbt.util.{ Level, _ }
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.concurrent.duration.FiniteDuration.FiniteDurationIsOrdered
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.Try
|
||||
|
||||
/**
|
||||
* Provides the implementation of the `~` command and `watch` task. The implementation is quite
|
||||
* complex because we have to parse the command string to figure out which tasks we want to run.
|
||||
* Using the tasks, we then have to extract all of the settings for the continuous build. Finally
|
||||
* we have to aggregate the settings for each task into an aggregated watch config that will
|
||||
* sanely watch multiple tasks and respond to file updates and user input in a way that makes
|
||||
* sense for each of the tasks that are being monitored.
|
||||
*
|
||||
* The behavior, on the other hand, should be fairly straightforward. For example, if a user
|
||||
* wants to continuously run the compile task for projects a and b, then we create FileEventMonitor
|
||||
* instances for each product and watch all of the directories that contain compile sources
|
||||
* (as well as the source directories of transitive inter-project classpath dependencies). If
|
||||
* a change is detected in project a, then we should trigger a build for both projects a and b.
|
||||
*
|
||||
* The semantics are flexible and may be adapted. For example, a user may want to watch two
|
||||
* unrelated tasks and only rebuild the task with sources that have been changed. This could be
|
||||
* handled at the `~` level, but it probably makes more sense to build a better task caching
|
||||
* system so that we don't rerun tasks if their inputs have not changed. As of 1.3.0, the
|
||||
* semantics match previous sbt versions as closely as possible while allowing the user more
|
||||
* freedom to adjust the behavior to best suit their use cases.
|
||||
*
|
||||
* For now Continuous extends DeprecatedContinuous to minimize the number of deprecation warnings
|
||||
* produced by this file. In sbt 2.0, the DeprecatedContinuous mixin should be eliminated and
|
||||
* the deprecated apis should no longer be supported.
|
||||
*
|
||||
*/
|
||||
object Continuous extends DeprecatedContinuous {
|
||||
|
||||
/**
|
||||
* Provides the dynamic inputs to the continuous build callbacks that cannot be stored as
|
||||
* settings. This wouldn't need to exist if there was a notion of a lazy setting in sbt.
|
||||
* @param logger the Logger
|
||||
* @param inputs the transitive task inputs
|
||||
* @param triggers the transitive task triggers
|
||||
*/
|
||||
final class Arguments private[Continuous] (
|
||||
val logger: Logger,
|
||||
val inputs: Seq[Glob],
|
||||
val triggers: Seq[Glob]
|
||||
)
|
||||
|
||||
/**
|
||||
* Provides a copy of System.in that can be scanned independently from System.in itself. This task
|
||||
* will only be valid during a continuous build started via `~` or the `watch` task. The
|
||||
* motivation is that a plugin may want to completely override the parsing of System.in which
|
||||
* is not straightforward since the default implementation is hard-wired to read from and
|
||||
* parse System.in. If an invalid parser is provided by [[Keys.watchInputParser]] and
|
||||
* [[Keys.watchInputStream]] is set to this task, then a custom parser can be provided via
|
||||
* [[Keys.watchInputHandler]] and the default System.in processing will not occur.
|
||||
*
|
||||
* @return the duplicated System.in
|
||||
*/
|
||||
def dupedSystemIn: Def.Initialize[Task[InputStream]] = Def.task {
|
||||
Keys.state.value.get(DupedSystemIn).map(_.duped).getOrElse(System.in)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a function from InputStream => [[Watch.Action]] from a [[Parser]]. This is intended
|
||||
* to be used to set the watchInputHandler setting for a task.
|
||||
* @param parser the parser
|
||||
* @return the function
|
||||
*/
|
||||
def defaultInputHandler(parser: Parser[Watch.Action]): InputStream => Watch.Action = {
|
||||
val builder = new StringBuilder
|
||||
val any = matched(Parsers.any.*)
|
||||
val fullParser = any ~> parser ~ any
|
||||
((inputStream: InputStream) => parse(inputStream, builder, fullParser))
|
||||
.label("Continuous.defaultInputHandler")
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements continuous execution. It works by first parsing the command and generating a task to
|
||||
* run with each build. It can run multiple commands that are separated by ";" in the command
|
||||
* input. If any of these commands are invalid, the watch will immediately exit.
|
||||
* @return a Command that can be used by sbt to implement continuous builds.
|
||||
*/
|
||||
private[sbt] def continuous: Command =
|
||||
Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(continuousParser) {
|
||||
case (state, (initialCount, command)) =>
|
||||
runToTermination(state, command, initialCount, isCommand = true)
|
||||
}
|
||||
|
||||
/**
|
||||
* The task implementation is quite similar to the command implementation. The tricky part is that
|
||||
* we have to modify the Task.info to apply the state transformation after the task completes.
|
||||
* @return the [[InputTask]]
|
||||
*/
|
||||
private[sbt] def continuousTask: Def.Initialize[InputTask[StateTransform]] =
|
||||
Def.inputTask {
|
||||
val (initialCount, command) = continuousParser.parsed
|
||||
new StateTransform(
|
||||
runToTermination(Keys.state.value, command, initialCount, isCommand = false)
|
||||
)
|
||||
}
|
||||
|
||||
private[this] val DupedSystemIn =
|
||||
AttributeKey[DupedInputStream](
|
||||
"duped-system-in",
|
||||
"Receives a copy of all of the bytes from System.in.",
|
||||
10000
|
||||
)
|
||||
val dynamicInputs = taskKey[FileTree.DynamicInputs](
|
||||
"The input globs found during task evaluation that are used in watch."
|
||||
)
|
||||
def dynamicInputsImpl: Def.Initialize[Task[FileTree.DynamicInputs]] = Def.task {
|
||||
Keys.state.value.get(DynamicInputs).getOrElse(FileTree.DynamicInputs.none)
|
||||
}
|
||||
private[sbt] val DynamicInputs =
|
||||
AttributeKey[FileTree.DynamicInputs](
|
||||
"dynamic-inputs",
|
||||
"Stores the inputs (dynamic and regular) for a task",
|
||||
10000
|
||||
)
|
||||
|
||||
private[this] val continuousParser: State => Parser[(Int, String)] = {
|
||||
def toInt(s: String): Int = Try(s.toInt).getOrElse(0)
|
||||
// This allows us to re-enter the watch with the previous count.
|
||||
val digitParser: Parser[Int] =
|
||||
(Parsers.Space.* ~> matched(Parsers.Digit.+) <~ Parsers.Space.*).map(toInt)
|
||||
state =>
|
||||
val ocp = otherCommandParser(state)
|
||||
(digitParser.? ~ ocp).map { case (i, s) => (i.getOrElse(0), s) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the [[Config]] necessary to watch a task. It will extract the internal dependency
|
||||
* configurations for the task (these are the classpath dependencies specified by
|
||||
* [[Project.dependsOn]]). Using these configurations and the settings map, it walks the
|
||||
* dependency graph for the key and extracts all of the transitive globs specified by the
|
||||
* inputs and triggers keys. It also extracts the legacy globs specified by the watchSources key.
|
||||
*
|
||||
* @param state the current [[State]] instance.
|
||||
* @param scopedKey the [[ScopedKey]] instance corresponding to the task we're monitoring
|
||||
* @param compiledMap the map of all of the build settings
|
||||
* @param extracted the [[Extracted]] instance for the build
|
||||
* @param logger a logger that can be used while generating the [[Config]]
|
||||
* @return the [[Config]] instance
|
||||
*/
|
||||
private def getConfig(
|
||||
state: State,
|
||||
scopedKey: ScopedKey[_],
|
||||
compiledMap: CompiledMap,
|
||||
)(implicit extracted: Extracted, logger: Logger): Config = {
|
||||
|
||||
// Extract all of the globs that we will monitor during the continuous build.
|
||||
val (inputs, triggers) = {
|
||||
val configs = scopedKey.get(Keys.internalDependencyConfigurations).getOrElse(Nil)
|
||||
val args = new InputGraph.Arguments(scopedKey, extracted, compiledMap, logger, configs, state)
|
||||
InputGraph.transitiveGlobs(args)
|
||||
} match {
|
||||
case (i: Seq[Glob], t: Seq[Glob]) => (i.distinct.sorted, t.distinct.sorted)
|
||||
}
|
||||
|
||||
val repository = getRepository(state)
|
||||
val registeringSet = state.get(DynamicInputs).get
|
||||
registeringSet.value.foreach(_ ++= inputs)
|
||||
(inputs ++ triggers).foreach(repository.register(_: Glob))
|
||||
val watchSettings = new WatchSettings(scopedKey)
|
||||
new Config(
|
||||
scopedKey,
|
||||
repository,
|
||||
() => registeringSet.value.fold(Nil: Seq[Glob])(_.toSeq).sorted,
|
||||
triggers,
|
||||
watchSettings
|
||||
)
|
||||
}
|
||||
private def getRepository(state: State): FileTreeRepository[FileAttributes] = {
|
||||
lazy val exception =
|
||||
new IllegalStateException("Tried to access FileTreeRepository for uninitialized state")
|
||||
state
|
||||
.get(Keys.globalFileTreeRepository)
|
||||
.getOrElse(throw exception)
|
||||
}
|
||||
|
||||
private[sbt] def setup[R](state: State, command: String)(
|
||||
f: (Seq[String], State, Seq[(String, State, () => Boolean)], Seq[String]) => R
|
||||
): R = {
|
||||
// First set up the state so that we can capture whether or not a task completed successfully
|
||||
// or if it threw an Exception (we lose the actual exception, but that should still be printed
|
||||
// to the console anyway).
|
||||
val failureCommandName = "SbtContinuousWatchOnFail"
|
||||
val onFail = Command.command(failureCommandName)(identity)
|
||||
// This adds the "SbtContinuousWatchOnFail" onFailure handler which allows us to determine
|
||||
// whether or not the last task successfully ran. It is used in the makeTask method below.
|
||||
val s = (FailureWall :: state).copy(
|
||||
onFailure = Some(Exec(failureCommandName, None)),
|
||||
definedCommands = state.definedCommands :+ onFail
|
||||
)
|
||||
|
||||
/*
|
||||
* Takes a task string and converts it to an EitherTask. We cannot preserve either
|
||||
* the value returned by the task or any exception thrown by the task, but we can determine
|
||||
* whether or not the task ran successfully using the onFail command defined above. Each
|
||||
* task gets its own state with its own file tree repository. This is so that we can keep
|
||||
* track of what globs are actually used by the task to ensure that we monitor them, even
|
||||
* if they are not visible in the input graph due to the use of Def.taskDyn.
|
||||
*/
|
||||
def makeTask(cmd: String): (String, State, () => Boolean) = {
|
||||
val newState = s.put(DynamicInputs, FileTree.DynamicInputs.empty)
|
||||
val task = Parser
|
||||
.parse(cmd, Command.combine(newState.definedCommands)(newState))
|
||||
.getOrElse(
|
||||
throw new IllegalStateException(
|
||||
"No longer able to parse command after transforming state"
|
||||
)
|
||||
)
|
||||
(
|
||||
cmd,
|
||||
newState,
|
||||
() => {
|
||||
MainLoop
|
||||
.processCommand(Exec(cmd, None), newState, task)
|
||||
.remainingCommands
|
||||
.forall(_.commandLine != failureCommandName)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// We support multiple commands in watch, so it's necessary to run the command string through
|
||||
// the multi parser.
|
||||
val trimmed = command.trim
|
||||
val commands = Parser.parse(trimmed, BasicCommands.multiParserImpl(Some(s))) match {
|
||||
case Left(_) => trimmed :: Nil
|
||||
case Right(c) => c
|
||||
}
|
||||
|
||||
// Convert the command strings to runnable tasks, which are represented by
|
||||
// () => Try[Boolean].
|
||||
val taskParser = Command.combine(s.definedCommands)(s)
|
||||
// This specified either the task corresponding to a command or the command itself if the
|
||||
// the command cannot be converted to a task.
|
||||
val (invalid, valid) =
|
||||
commands.foldLeft((Nil: Seq[String], Nil: Seq[(String, State, () => Boolean)])) {
|
||||
case ((i, v), cmd) =>
|
||||
Parser.parse(cmd, taskParser) match {
|
||||
case Right(_) => (i, v :+ makeTask(cmd))
|
||||
case Left(c) => (i :+ c, v)
|
||||
}
|
||||
}
|
||||
f(commands, s, valid, invalid)
|
||||
}
|
||||
|
||||
private[sbt] def runToTermination(
|
||||
state: State,
|
||||
command: String,
|
||||
count: Int,
|
||||
isCommand: Boolean
|
||||
): State = Watch.withCharBufferedStdIn { in =>
|
||||
val duped = new DupedInputStream(in)
|
||||
implicit val extracted: Extracted = Project.extract(state)
|
||||
val (stateWithRepo, repo) = state.get(Keys.globalFileTreeRepository) match {
|
||||
case Some(r) => (state, r)
|
||||
case _ =>
|
||||
val repo = if ("polling" == System.getProperty("sbt.watch.mode")) {
|
||||
val service =
|
||||
new PollingWatchService(extracted.getOpt(Keys.pollInterval).getOrElse(500.millis))
|
||||
FileTreeRepository.legacy(FileAttributes.default _, (_: Any) => {}, service)
|
||||
} else {
|
||||
state
|
||||
.get(BuiltinCommands.rawGlobalFileTreeRepository)
|
||||
.map(new CopiedFileTreeRepository(_))
|
||||
.getOrElse(FileTreeRepository.default(FileAttributes.default))
|
||||
}
|
||||
(state.put(Keys.globalFileTreeRepository, repo), repo)
|
||||
}
|
||||
try {
|
||||
setup(stateWithRepo.put(DupedSystemIn, duped), command) { (commands, s, valid, invalid) =>
|
||||
EvaluateTask.withStreams(extracted.structure, s)(_.use(Keys.streams in Global) { streams =>
|
||||
implicit val logger: Logger = streams.log
|
||||
if (invalid.isEmpty) {
|
||||
val currentCount = new AtomicInteger(count)
|
||||
val configs = getAllConfigs(valid.map(v => v._1 -> v._2))
|
||||
val callbacks = aggregate(configs, logger, in, s, currentCount, isCommand, commands)
|
||||
val task = () => {
|
||||
currentCount.getAndIncrement()
|
||||
// abort as soon as one of the tasks fails
|
||||
valid.takeWhile(_._3.apply())
|
||||
()
|
||||
}
|
||||
callbacks.onEnter()
|
||||
// Here we enter the Watched.watch state machine. We will not return until one of the
|
||||
// state machine callbacks returns Watched.CancelWatch, Watched.Custom, Watched.HandleError
|
||||
// or Watched.Reload. The task defined above will be run at least once. It will be run
|
||||
// additional times whenever the state transition callbacks return Watched.Trigger.
|
||||
try {
|
||||
val terminationAction = Watch(task, callbacks.onStart, callbacks.nextEvent)
|
||||
callbacks.onTermination(terminationAction, command, currentCount.get(), state)
|
||||
} finally {
|
||||
configs.foreach(_.repository.close())
|
||||
callbacks.onExit()
|
||||
}
|
||||
} else {
|
||||
// At least one of the commands in the multi command string could not be parsed, so we
|
||||
// log an error and exit.
|
||||
val invalidCommands = invalid.mkString("'", "', '", "'")
|
||||
logger.error(s"Terminating watch due to invalid command(s): $invalidCommands")
|
||||
state.fail
|
||||
}
|
||||
})
|
||||
}
|
||||
} finally repo.close()
|
||||
}
|
||||
|
||||
private def parseCommand(command: String, state: State): Seq[ScopedKey[_]] = {
|
||||
// Collect all of the scoped keys that are used to delegate the multi commands. These are
|
||||
// necessary to extract all of the transitive globs that we need to monitor during watch.
|
||||
// We have to add the <~ Parsers.any.* to ensure that we're able to extract the input key
|
||||
// from input tasks.
|
||||
val scopedKeyParser: Parser[Seq[ScopedKey[_]]] = Act.aggregatedKeyParser(state) <~ Parsers.any.*
|
||||
Parser.parse(command, scopedKeyParser) match {
|
||||
case Right(scopedKeys: Seq[ScopedKey[_]]) => scopedKeys
|
||||
case Left(e) =>
|
||||
throw new IllegalStateException(s"Error attempting to extract scope from $command: $e.")
|
||||
case _ => Nil: Seq[ScopedKey[_]]
|
||||
}
|
||||
}
|
||||
private def getAllConfigs(
|
||||
inputs: Seq[(String, State)]
|
||||
)(implicit extracted: Extracted, logger: Logger): Seq[Config] = {
|
||||
val commandKeys = inputs.map { case (c, s) => s -> parseCommand(c, s) }
|
||||
val compiledMap = InputGraph.compile(extracted.structure)
|
||||
commandKeys.flatMap {
|
||||
case (s, scopedKeys) => scopedKeys.map(getConfig(s, _, compiledMap))
|
||||
}
|
||||
}
|
||||
|
||||
private class Callbacks(
|
||||
val nextEvent: () => Watch.Action,
|
||||
val onEnter: () => Unit,
|
||||
val onExit: () => Unit,
|
||||
val onStart: () => Watch.Action,
|
||||
val onTermination: (Watch.Action, String, Int, State) => State
|
||||
)
|
||||
|
||||
/**
|
||||
* Aggregates a collection of [[Config]] instances into a single instance of [[Callbacks]].
|
||||
* This allows us to monitor and respond to changes for all of
|
||||
* the inputs and triggers for each of the tasks that we are monitoring in the continuous build.
|
||||
* To monitor all of the inputs and triggers, it creates a [[FileEventMonitor]] for each task
|
||||
* and then aggregates each of the individual [[FileEventMonitor]] instances into an aggregated
|
||||
* instance. It aggregates all of the event callbacks into a single callback that delegates
|
||||
* to each of the individual callbacks. For the callbacks that return a [[Watch.Action]],
|
||||
* the aggregated callback will select the minimum [[Watch.Action]] returned where the ordering
|
||||
* is such that the highest priority [[Watch.Action]] have the lowest values. Finally, to
|
||||
* handle user input, we read from the provided input stream and buffer the result. Each
|
||||
* task's input parser is then applied to the buffered result and, again, we return the mimimum
|
||||
* [[Watch.Action]] returned by the parsers (when the parsers fail, they just return
|
||||
* [[Watch.Ignore]], which is the lowest priority [[Watch.Action]].
|
||||
*
|
||||
* @param configs the [[Config]] instances
|
||||
* @param rawLogger the default sbt logger instance
|
||||
* @param state the current state
|
||||
* @param extracted the [[Extracted]] instance for the current build
|
||||
* @return the [[Callbacks]] to pass into [[Watch.apply]]
|
||||
*/
|
||||
private def aggregate(
|
||||
configs: Seq[Config],
|
||||
rawLogger: Logger,
|
||||
inputStream: InputStream,
|
||||
state: State,
|
||||
count: AtomicInteger,
|
||||
isCommand: Boolean,
|
||||
commands: Seq[String]
|
||||
)(
|
||||
implicit extracted: Extracted
|
||||
): Callbacks = {
|
||||
val project = extracted.currentRef.project
|
||||
val logger = setLevel(rawLogger, configs.map(_.watchSettings.logLevel).min, state)
|
||||
val onEnter = () => configs.foreach(_.watchSettings.onEnter())
|
||||
val onStart: () => Watch.Action = getOnStart(project, commands, configs, rawLogger, count)
|
||||
val nextInputEvent: () => Watch.Action = parseInputEvents(configs, state, inputStream, logger)
|
||||
val (nextFileEvent, cleanupFileMonitor): (() => Option[(Event, Watch.Action)], () => Unit) =
|
||||
getFileEvents(configs, rawLogger, state, count, commands)
|
||||
val nextEvent: () => Watch.Action =
|
||||
combineInputAndFileEvents(nextInputEvent, nextFileEvent, logger)
|
||||
val onExit = () => {
|
||||
cleanupFileMonitor()
|
||||
configs.foreach(_.watchSettings.onExit())
|
||||
}
|
||||
val onTermination = getOnTermination(configs, isCommand)
|
||||
new Callbacks(nextEvent, onEnter, onExit, onStart, onTermination)
|
||||
}
|
||||
|
||||
private def getOnTermination(
|
||||
configs: Seq[Config],
|
||||
isCommand: Boolean
|
||||
): (Watch.Action, String, Int, State) => State = {
|
||||
configs.flatMap(_.watchSettings.onTermination).distinct match {
|
||||
case Seq(head, tail @ _*) =>
|
||||
tail.foldLeft(head) {
|
||||
case (onTermination, configOnTermination) =>
|
||||
(action, cmd, count, state) =>
|
||||
configOnTermination(action, cmd, count, onTermination(action, cmd, count, state))
|
||||
}
|
||||
case _ =>
|
||||
if (isCommand) Watch.defaultCommandOnTermination else Watch.defaultTaskOnTermination
|
||||
}
|
||||
}
|
||||
|
||||
private def getOnStart(
|
||||
project: String,
|
||||
commands: Seq[String],
|
||||
configs: Seq[Config],
|
||||
logger: Logger,
|
||||
count: AtomicInteger
|
||||
): () => Watch.Action = {
|
||||
val f = configs.map { params =>
|
||||
val ws = params.watchSettings
|
||||
ws.onStart.map(_.apply(params.arguments(logger))).getOrElse { () =>
|
||||
ws.onIteration.map(_(count.get)).getOrElse {
|
||||
if (configs.size == 1) { // Only allow custom start messages for single tasks
|
||||
ws.startMessage match {
|
||||
case Some(Left(sm)) => logger.info(sm(params.watchState(count.get())))
|
||||
case Some(Right(sm)) => sm(count.get(), project, commands).foreach(logger.info(_))
|
||||
case None =>
|
||||
Watch.defaultStartWatch(count.get(), project, commands).foreach(logger.info(_))
|
||||
}
|
||||
}
|
||||
Watch.Ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
() =>
|
||||
{
|
||||
val res = f.view.map(_()).min
|
||||
// Print the default watch message if there are multiple tasks
|
||||
if (configs.size > 1)
|
||||
Watch.defaultStartWatch(count.get(), project, commands).foreach(logger.info(_))
|
||||
res
|
||||
}
|
||||
}
|
||||
private def getFileEvents(
|
||||
configs: Seq[Config],
|
||||
logger: Logger,
|
||||
state: State,
|
||||
count: AtomicInteger,
|
||||
commands: Seq[String]
|
||||
)(implicit extracted: Extracted): (() => Option[(Event, Watch.Action)], () => Unit) = {
|
||||
val trackMetaBuild = configs.forall(_.watchSettings.trackMetaBuild)
|
||||
val buildGlobs =
|
||||
if (trackMetaBuild) extracted.getOpt(Keys.fileInputs in Keys.settingsData).getOrElse(Nil)
|
||||
else Nil
|
||||
val buildFilter = buildGlobs.toEntryFilter
|
||||
|
||||
val defaultTrigger = if (Util.isWindows) Watch.ifChanged(Watch.Trigger) else Watch.trigger
|
||||
val onEvent: Event => (Event, Watch.Action) = {
|
||||
val f = configs.map { params =>
|
||||
val ws = params.watchSettings
|
||||
val oe = ws.onEvent
|
||||
.map(_.apply(params.arguments(logger)))
|
||||
.getOrElse {
|
||||
val onInputEvent = ws.onInputEvent.getOrElse(defaultTrigger)
|
||||
val onTriggerEvent = ws.onTriggerEvent.getOrElse(defaultTrigger)
|
||||
val onMetaBuildEvent = ws.onMetaBuildEvent.getOrElse(Watch.ifChanged(Watch.Reload))
|
||||
val triggerFilter = params.triggers.toEntryFilter
|
||||
val excludedBuildFilter = buildFilter
|
||||
event: Event =>
|
||||
val inputFilter = params.inputs().toEntryFilter
|
||||
val c = count.get()
|
||||
val entry = event.entry
|
||||
Seq[Watch.Action](
|
||||
if (inputFilter(entry)) onInputEvent(c, event) else Watch.Ignore,
|
||||
if (triggerFilter(entry)) onTriggerEvent(c, event) else Watch.Ignore,
|
||||
if (excludedBuildFilter(entry)) onMetaBuildEvent(c, event) else Watch.Ignore
|
||||
).min
|
||||
}
|
||||
event: Event =>
|
||||
event -> oe(event)
|
||||
}
|
||||
event: Event =>
|
||||
f.view.map(_.apply(event)).minBy(_._2)
|
||||
}
|
||||
val monitor: FileEventMonitor[FileAttributes] = new FileEventMonitor[FileAttributes] {
|
||||
|
||||
/**
|
||||
* Create a filtered monitor that only accepts globs that have been registered for the
|
||||
* task at hand.
|
||||
* @param monitor the file event monitor to filter
|
||||
* @param globs the globs to accept. This must be a function because we want to be able
|
||||
* to accept globs that are added dynamically as part of task evaluation.
|
||||
* @return the filtered FileEventMonitor.
|
||||
*/
|
||||
private def filter(
|
||||
monitor: FileEventMonitor[FileAttributes],
|
||||
globs: () => Seq[Glob]
|
||||
): FileEventMonitor[FileAttributes] = {
|
||||
new FileEventMonitor[FileAttributes] {
|
||||
override def poll(duration: Duration): Seq[FileEventMonitor.Event[FileAttributes]] =
|
||||
monitor.poll(duration).filter(e => globs().toEntryFilter(e.entry))
|
||||
override def close(): Unit = monitor.close()
|
||||
}
|
||||
}
|
||||
// TODO make this a normal monitor
|
||||
private[this] val monitors: Seq[FileEventMonitor[FileAttributes]] =
|
||||
configs.map { config =>
|
||||
// Create a logger with a scoped key prefix so that we can tell from which
|
||||
// monitor events occurred.
|
||||
val l = logger.withPrefix(config.key.show)
|
||||
val monitor: FileEventMonitor[FileAttributes] =
|
||||
FileManagement.monitor(config.repository, config.watchSettings.antiEntropy, l)
|
||||
val allGlobs: () => Seq[Glob] = () => (config.inputs() ++ config.triggers).distinct.sorted
|
||||
filter(monitor, allGlobs)
|
||||
} ++ (if (trackMetaBuild) {
|
||||
val l = logger.withPrefix("meta-build")
|
||||
val antiEntropy = configs.map(_.watchSettings.antiEntropy).max
|
||||
val repo = getRepository(state)
|
||||
buildGlobs.foreach(repo.register)
|
||||
val monitor = FileManagement.monitor(repo, antiEntropy, l)
|
||||
filter(monitor, () => buildGlobs) :: Nil
|
||||
} else Nil)
|
||||
override def poll(duration: Duration): Seq[FileEventMonitor.Event[FileAttributes]] = {
|
||||
val res = monitors.flatMap(_.poll(0.millis)).toSet.toVector
|
||||
if (res.isEmpty) Thread.sleep(duration.toMillis)
|
||||
res
|
||||
}
|
||||
override def close(): Unit = monitors.foreach(_.close())
|
||||
}
|
||||
val watchLogger: WatchLogger = msg => logger.debug(msg.toString)
|
||||
val retentionPeriod = configs.map(_.watchSettings.antiEntropyRetentionPeriod).max
|
||||
val antiEntropy = configs.map(_.watchSettings.antiEntropy).max
|
||||
val quarantinePeriod = configs.map(_.watchSettings.deletionQuarantinePeriod).max
|
||||
val antiEntropyMonitor = FileEventMonitor.antiEntropy(
|
||||
monitor,
|
||||
antiEntropy,
|
||||
watchLogger,
|
||||
quarantinePeriod,
|
||||
retentionPeriod
|
||||
)
|
||||
/*
|
||||
* This is a callback that will be invoked whenever onEvent returns a Trigger action. The
|
||||
* motivation is to allow the user to specify this callback via setting so that, for example,
|
||||
* they can clear the screen when the build triggers.
|
||||
*/
|
||||
val onTrigger: Event => Unit = { event: Event =>
|
||||
configs.foreach { params =>
|
||||
params.watchSettings.onTrigger.foreach(ot => ot(params.arguments(logger))(event))
|
||||
}
|
||||
if (configs.size == 1) {
|
||||
val config = configs.head
|
||||
config.watchSettings.triggerMessage match {
|
||||
case Left(tm) => logger.info(tm(config.watchState(count.get())))
|
||||
case Right(tm) => tm(count.get(), event, commands).foreach(logger.info(_))
|
||||
}
|
||||
} else {
|
||||
Watch.defaultOnTriggerMessage(count.get(), event, commands).foreach(logger.info(_))
|
||||
}
|
||||
}
|
||||
|
||||
(() => {
|
||||
val actions = antiEntropyMonitor.poll(2.milliseconds).map(onEvent)
|
||||
if (actions.exists(_._2 != Watch.Ignore)) {
|
||||
val builder = new StringBuilder
|
||||
val min = actions.minBy {
|
||||
case (e, a) =>
|
||||
if (builder.nonEmpty) builder.append(", ")
|
||||
val path = e.entry.typedPath.toPath.toString
|
||||
builder.append(path)
|
||||
builder.append(" -> ")
|
||||
builder.append(a.toString)
|
||||
a
|
||||
}
|
||||
logger.debug(s"Received file event actions: $builder. Returning: $min")
|
||||
if (min._2 == Watch.Trigger) onTrigger(min._1)
|
||||
Some(min)
|
||||
} else None
|
||||
}, () => monitor.close())
|
||||
}
|
||||
|
||||
/**
|
||||
* Each task has its own input parser that can be used to modify the watch based on the input
|
||||
* read from System.in as well as a custom task-specific input stream that can be used as
|
||||
* an alternative source of control. In this method, we create two functions for each task,
|
||||
* one from `String => Seq[Watch.Action]` and another from `() => Seq[Watch.Action]`.
|
||||
* Each of these functions is invoked to determine the next state transformation for the watch.
|
||||
* The first function is a task specific copy of System.in. For each task we keep a mutable
|
||||
* buffer of the characters previously seen from System.in. Every time we receive new characters
|
||||
* we update the buffer and then try to parse a Watch.Action for each task. Any trailing
|
||||
* characters are captured and can be used for the next trigger. Because each task has a local
|
||||
* copy of the buffer, we do not have to worry about one task breaking parsing of another. We
|
||||
* also provide an alternative per task InputStream that is read in a similar way except that
|
||||
* we don't need to copy the custom InputStream which allows the function to be
|
||||
* `() => Seq[Watch.Action]` which avoids actually exposing the InputStream anywhere.
|
||||
*/
|
||||
private def parseInputEvents(
|
||||
configs: Seq[Config],
|
||||
state: State,
|
||||
inputStream: InputStream,
|
||||
logger: Logger
|
||||
)(
|
||||
implicit extracted: Extracted
|
||||
): () => Watch.Action = {
|
||||
/*
|
||||
* This parses the buffer until all possible actions are extracted. By draining the input
|
||||
* to a state where it does not parse an action, we can wait until we receive new input
|
||||
* to attempt to parse again.
|
||||
*/
|
||||
type ActionParser = String => Watch.Action
|
||||
// Transform the Config.watchSettings.inputParser instances to functions of type
|
||||
// String => Watch.Action. The String that is provided will contain any characters that
|
||||
// have been read from stdin. If there are any characters available, then it calls the
|
||||
// parse method with the InputStream set to a ByteArrayInputStream that wraps the input
|
||||
// string. The parse method then appends those bytes to a mutable buffer and attempts to
|
||||
// parse the buffer. To make this work with streaming input, we prefix the parser with any.*.
|
||||
// If the Config.watchSettings.inputStream is set, the same process is applied except that
|
||||
// instead of passing in the wrapped InputStream for the input string, we directly pass
|
||||
// in the inputStream provided by Config.watchSettings.inputStream.
|
||||
val inputHandlers: Seq[ActionParser] = configs.map { c =>
|
||||
val any = Parsers.any.*
|
||||
val inputParser = c.watchSettings.inputParser
|
||||
val parser = any ~> inputParser ~ matched(any)
|
||||
// Each parser gets its own copy of System.in that it can modify while parsing.
|
||||
val systemInBuilder = new StringBuilder
|
||||
def inputStream(string: String): InputStream = new ByteArrayInputStream(string.getBytes)
|
||||
// This string is provided in the closure below by reading from System.in
|
||||
val default: String => Watch.Action =
|
||||
string => parse(inputStream(string), systemInBuilder, parser)
|
||||
val alternative = c.watchSettings.inputStream
|
||||
.map { inputStreamKey =>
|
||||
val is = extracted.runTask(inputStreamKey, state)._2
|
||||
val handler = c.watchSettings.inputHandler.getOrElse(defaultInputHandler(inputParser))
|
||||
() =>
|
||||
handler(is)
|
||||
}
|
||||
.getOrElse(() => Watch.Ignore)
|
||||
(string: String) =>
|
||||
(default(string) :: alternative() :: Nil).min
|
||||
}
|
||||
() =>
|
||||
{
|
||||
val stringBuilder = new StringBuilder
|
||||
while (inputStream.available > 0) stringBuilder += inputStream.read().toChar
|
||||
val newBytes = stringBuilder.toString
|
||||
val parse: ActionParser => Watch.Action = parser => parser(newBytes)
|
||||
val allEvents = inputHandlers.map(parse).filterNot(_ == Watch.Ignore)
|
||||
if (allEvents.exists(_ != Watch.Ignore)) {
|
||||
val res = allEvents.min
|
||||
logger.debug(s"Received input events: ${allEvents mkString ","}. Taking $res")
|
||||
res
|
||||
} else Watch.Ignore
|
||||
}
|
||||
}
|
||||
|
||||
private def combineInputAndFileEvents(
|
||||
nextInputAction: () => Watch.Action,
|
||||
nextFileEvent: () => Option[(Event, Watch.Action)],
|
||||
logger: Logger
|
||||
): () => Watch.Action = () => {
|
||||
val (inputAction: Watch.Action, fileEvent: Option[(Event, Watch.Action)] @unchecked) =
|
||||
Seq(nextInputAction, nextFileEvent).map(_.apply()).toIndexedSeq match {
|
||||
case Seq(ia: Watch.Action, fe @ Some(_)) => (ia, fe)
|
||||
case Seq(ia: Watch.Action, None) => (ia, None)
|
||||
}
|
||||
val min: Watch.Action = (fileEvent.map(_._2).toSeq :+ inputAction).min
|
||||
lazy val inputMessage =
|
||||
s"Received input event: $inputAction." +
|
||||
(if (inputAction != min) s" Dropping in favor of file event: $min" else "")
|
||||
if (inputAction != Watch.Ignore) logger.debug(inputMessage)
|
||||
fileEvent
|
||||
.collect {
|
||||
case (event, action) if action != Watch.Ignore =>
|
||||
s"Received file event $action for ${event.entry.typedPath.toPath}." +
|
||||
(if (action != min) s" Dropping in favor of input event: $min" else "")
|
||||
}
|
||||
.foreach(logger.debug(_))
|
||||
min
|
||||
}
|
||||
|
||||
@tailrec
|
||||
private final def parse(
|
||||
is: InputStream,
|
||||
builder: StringBuilder,
|
||||
parser: Parser[(Watch.Action, String)]
|
||||
): Watch.Action = {
|
||||
if (is.available > 0) builder += is.read().toChar
|
||||
Parser.parse(builder.toString, parser) match {
|
||||
case Right((action, rest)) =>
|
||||
builder.clear()
|
||||
builder ++= rest
|
||||
action
|
||||
case _ if is.available > 0 => parse(is, builder, parser)
|
||||
case _ => Watch.Ignore
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a custom logger for the watch process that is able to log at a different level
|
||||
* from the provided logger.
|
||||
* @param logger the delegate logger.
|
||||
* @param logLevel the log level for watch events
|
||||
* @return the wrapped logger.
|
||||
*/
|
||||
private def setLevel(logger: Logger, logLevel: Level.Value, state: State): Logger = {
|
||||
val delegateLevel: Level.Value = state.get(Keys.logLevel.key).getOrElse(Level.Info)
|
||||
/*
|
||||
* The delegate logger may be set to, say, info level, but we want it to print out debug
|
||||
* messages if the logLevel variable above is Debug. To do this, we promote Debug messages
|
||||
* to the Info level (or Warn or Error if that's what the input logger is set to).
|
||||
*/
|
||||
new Logger {
|
||||
override def trace(t: => Throwable): Unit = logger.trace(t)
|
||||
override def success(message: => String): Unit = logger.success(message)
|
||||
override def log(level: Level.Value, message: => String): Unit = {
|
||||
val levelString = if (level < delegateLevel) s"[$level] " else ""
|
||||
val newMessage = s"[watch] $levelString$message"
|
||||
val watchLevel = if (level < delegateLevel && level >= logLevel) delegateLevel else level
|
||||
logger.log(watchLevel, newMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private type WatchOnEvent = (Int, Event) => Watch.Action
|
||||
|
||||
/**
|
||||
* Contains all of the user defined settings that will be used to build a [[Callbacks]]
|
||||
* instance that is used to produce the arguments to [[Watch.apply]]. The
|
||||
* callback settings (e.g. onEvent or onInputEvent) come in two forms: those that return a
|
||||
* function from [[Arguments]] => F for some function type `F` and those that directly return a function, e.g.
|
||||
* `(Int, Boolean) => Watch.Action`. The former are a low level interface that will usually
|
||||
* be unspecified and automatically filled in by [[Continuous.aggregate]]. The latter are
|
||||
* intended to be user configurable and will be scoped to the input [[ScopedKey]]. To ensure
|
||||
* that the scoping makes sense, we first try and extract the setting from the [[ScopedKey]]
|
||||
* instance's task scope, which is the scope with the task axis set to the task key. If that
|
||||
* fails, we fall back on the task axis. To make this concrete, to get the logLevel for
|
||||
* `foo / Compile / compile` (which is a TaskKey with scope `foo / Compile`), we first try and
|
||||
* get the setting in the `foo / Compile / compile` scope. If logLevel is not set at the task
|
||||
* level, then we fall back to the `foo / Compile` scope.
|
||||
*
|
||||
* This has to be done by manually extracting the settings via [[Extracted]] because there is
|
||||
* no good way to automatically add a [[WatchSettings]] setting to every task in the build.
|
||||
* Thankfully these map retrievals are reasonably fast so there is not a significant runtime
|
||||
* performance penalty for creating the [[WatchSettings]] this way. The drawback is that we
|
||||
* have to manually resolve the settings in multiple scopes which may lead to inconsistencies
|
||||
* with scope resolution elsewhere in sbt.
|
||||
*
|
||||
* @param key the [[ScopedKey]] instance that sets the [[Scope]] for the settings we're extracting
|
||||
* @param extracted the [[Extracted]] instance for the build
|
||||
*/
|
||||
private final class WatchSettings private[Continuous] (val key: ScopedKey[_])(
|
||||
implicit extracted: Extracted
|
||||
) {
|
||||
val antiEntropy: FiniteDuration =
|
||||
key.get(Keys.watchAntiEntropy).getOrElse(Watch.defaultAntiEntropy)
|
||||
val antiEntropyRetentionPeriod: FiniteDuration =
|
||||
key
|
||||
.get(Keys.watchAntiEntropyRetentionPeriod)
|
||||
.getOrElse(Watch.defaultAntiEntropyRetentionPeriod)
|
||||
val deletionQuarantinePeriod: FiniteDuration =
|
||||
key.get(Keys.watchDeletionQuarantinePeriod).getOrElse(Watch.defaultDeletionQuarantinePeriod)
|
||||
val inputHandler: Option[InputStream => Watch.Action] = key.get(Keys.watchInputHandler)
|
||||
val inputParser: Parser[Watch.Action] =
|
||||
key.get(Keys.watchInputParser).getOrElse(Watch.defaultInputParser)
|
||||
val logLevel: Level.Value = key.get(Keys.watchLogLevel).getOrElse(Level.Info)
|
||||
val onEnter: () => Unit = key.get(Keys.watchOnEnter).getOrElse(() => {})
|
||||
val onEvent: Option[Arguments => Event => Watch.Action] = key.get(Keys.watchOnEvent)
|
||||
val onExit: () => Unit = key.get(Keys.watchOnExit).getOrElse(() => {})
|
||||
val onInputEvent: Option[WatchOnEvent] = key.get(Keys.watchOnInputEvent)
|
||||
val onIteration: Option[Int => Watch.Action] = key.get(Keys.watchOnIteration)
|
||||
val onMetaBuildEvent: Option[WatchOnEvent] = key.get(Keys.watchOnMetaBuildEvent)
|
||||
val onStart: Option[Arguments => () => Watch.Action] = key.get(Keys.watchOnStart)
|
||||
val onTermination: Option[(Watch.Action, String, Int, State) => State] =
|
||||
key.get(Keys.watchOnTermination)
|
||||
val onTrigger: Option[Arguments => Event => Unit] = key.get(Keys.watchOnTrigger)
|
||||
val onTriggerEvent: Option[WatchOnEvent] = key.get(Keys.watchOnTriggerEvent)
|
||||
val startMessage: StartMessage = getStartMessage(key)
|
||||
val trackMetaBuild: Boolean = key.get(Keys.watchTrackMetaBuild).getOrElse(true)
|
||||
val triggerMessage: TriggerMessage = getTriggerMessage(key)
|
||||
|
||||
// Unlike the rest of the settings, InputStream is a TaskKey which means that if it is set,
|
||||
// we have to use Extracted.runTask to get the value. The reason for this is because it is
|
||||
// logical that users may want to use a different InputStream on each task invocation. The
|
||||
// alternative would be SettingKey[() => InputStream], but that doesn't feel right because
|
||||
// one might want the InputStream to depend on other tasks.
|
||||
val inputStream: Option[TaskKey[InputStream]] = key.get(Keys.watchInputStream)
|
||||
}
|
||||
|
||||
/**
|
||||
* Container class for all of the components we need to setup a watch for a particular task or
|
||||
* input task.
|
||||
* @param key the [[ScopedKey]] instance for the task we will watch
|
||||
* @param repository the task [[FileTreeRepository]] instance
|
||||
* @param inputs the transitive task inputs (see [[InputGraph]])
|
||||
* @param triggers the transitive triggers (see [[InputGraph]])
|
||||
* @param watchSettings the [[WatchSettings]] instance for the task
|
||||
*/
|
||||
private final class Config private[internal] (
|
||||
val key: ScopedKey[_],
|
||||
val repository: FileTreeRepository[FileAttributes],
|
||||
val inputs: () => Seq[Glob],
|
||||
val triggers: Seq[Glob],
|
||||
val watchSettings: WatchSettings
|
||||
) {
|
||||
private[sbt] def watchState(count: Int): DeprecatedWatchState =
|
||||
WatchState.empty(inputs() ++ triggers).withCount(count)
|
||||
def arguments(logger: Logger): Arguments = new Arguments(logger, inputs(), triggers)
|
||||
}
|
||||
private def getStartMessage(key: ScopedKey[_])(implicit e: Extracted): StartMessage = Some {
|
||||
lazy val default = key.get(Keys.watchStartMessage).getOrElse(Watch.defaultStartWatch)
|
||||
key.get(deprecatedWatchingMessage).map(Left(_)).getOrElse(Right(default))
|
||||
}
|
||||
private def getTriggerMessage(key: ScopedKey[_])(implicit e: Extracted): TriggerMessage = {
|
||||
lazy val default =
|
||||
key.get(Keys.watchTriggeredMessage).getOrElse(Watch.defaultOnTriggerMessage)
|
||||
key.get(deprecatedWatchingMessage).map(Left(_)).getOrElse(Right(default))
|
||||
}
|
||||
|
||||
private implicit class ScopeOps(val scope: Scope) {
|
||||
|
||||
/**
|
||||
* This shows the [[Scope]] in the format that a user would likely type it in a build
|
||||
* or in the sbt console. For example, the key corresponding to the command
|
||||
* foo/Compile/compile will pretty print as "foo / Compile / compile", not
|
||||
* "ProjectRef($URI, foo) / compile / compile", where the ProjectRef part is just noise that
|
||||
* is rarely relevant for debugging.
|
||||
* @return the pretty printed output.
|
||||
*/
|
||||
def show: String = {
|
||||
val mask = ScopeMask(
|
||||
config = scope.config.toOption.isDefined,
|
||||
task = scope.task.toOption.isDefined,
|
||||
extra = scope.extra.toOption.isDefined
|
||||
)
|
||||
Scope
|
||||
.displayMasked(scope, " ", (_: Reference) match {
|
||||
case p: ProjectRef => s"${p.project.trim} /"
|
||||
case _ => "Global /"
|
||||
}, mask)
|
||||
.dropRight(3) // delete trailing "/"
|
||||
.trim
|
||||
}
|
||||
}
|
||||
|
||||
private implicit class ScopedKeyOps(val scopedKey: ScopedKey[_]) extends AnyVal {
|
||||
|
||||
/**
|
||||
* Gets the value for a setting key scoped to the wrapped [[ScopedKey]]. If the task axis is not
|
||||
* set in the [[ScopedKey]], then we first set the task axis and try to extract the setting
|
||||
* from that scope otherwise we fallback on the [[ScopedKey]] instance's scope. We use the
|
||||
* reverse order if the task is set.
|
||||
*
|
||||
* @param settingKey the [[SettingKey]] to extract
|
||||
* @param extracted the provided [[Extracted]] instance
|
||||
* @tparam T the type of the [[SettingKey]]
|
||||
* @return the optional value of the [[SettingKey]] if it is defined at the input
|
||||
* [[ScopedKey]] instance's scope or task scope.
|
||||
*/
|
||||
def get[T](settingKey: SettingKey[T])(implicit extracted: Extracted): Option[T] = {
|
||||
lazy val taskScope = Project.fillTaskAxis(scopedKey).scope
|
||||
scopedKey.scope match {
|
||||
case scope if scope.task.toOption.isDefined =>
|
||||
extracted.getOpt(settingKey in scope) orElse extracted.getOpt(settingKey in taskScope)
|
||||
case scope =>
|
||||
extracted.getOpt(settingKey in taskScope) orElse extracted.getOpt(settingKey in scope)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the [[ScopedKey]] for a task scoped to the wrapped [[ScopedKey]]. If the task axis is
|
||||
* not set in the [[ScopedKey]], then we first set the task axis and try to extract the tak
|
||||
* from that scope otherwise we fallback on the [[ScopedKey]] instance's scope. We use the
|
||||
* reverse order if the task is set.
|
||||
*
|
||||
* @param taskKey the [[TaskKey]] to extract
|
||||
* @param extracted the provided [[Extracted]] instance
|
||||
* @tparam T the type of the [[SettingKey]]
|
||||
* @return the optional value of the [[SettingKey]] if it is defined at the input
|
||||
* [[ScopedKey]] instance's scope or task scope.
|
||||
*/
|
||||
def get[T](taskKey: TaskKey[T])(implicit extracted: Extracted): Option[TaskKey[T]] = {
|
||||
lazy val taskScope = Project.fillTaskAxis(scopedKey).scope
|
||||
scopedKey.scope match {
|
||||
case scope if scope.task.toOption.isDefined =>
|
||||
if (extracted.getOpt(taskKey in scope).isDefined) Some(taskKey in scope)
|
||||
else if (extracted.getOpt(taskKey in taskScope).isDefined) Some(taskKey in taskScope)
|
||||
else None
|
||||
case scope =>
|
||||
if (extracted.getOpt(taskKey in taskScope).isDefined) Some(taskKey in taskScope)
|
||||
else if (extracted.getOpt(taskKey in scope).isDefined) Some(taskKey in scope)
|
||||
else None
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This shows the [[ScopedKey[_]] in the format that a user would likely type it in a build
|
||||
* or in the sbt console. For example, the key corresponding to the command
|
||||
* foo/Compile/compile will pretty print as "foo / Compile / compile", not
|
||||
* "ProjectRef($URI, foo) / compile / compile", where the ProjectRef part is just noise that
|
||||
* is rarely relevant for debugging.
|
||||
* @return the pretty printed output.
|
||||
*/
|
||||
def show: String = s"${scopedKey.scope.show} / ${scopedKey.key}"
|
||||
}
|
||||
|
||||
private implicit class LoggerOps(val logger: Logger) extends AnyVal {
|
||||
|
||||
/**
|
||||
* Creates a logger that adds a prefix to the messages that it logs. The motivation is so that
|
||||
* we can tell from which FileEventMonitor an event originated.
|
||||
* @param prefix the string to prefix the message with
|
||||
* @return the wrapped Logger.
|
||||
*/
|
||||
def withPrefix(prefix: String): Logger = new Logger {
|
||||
override def trace(t: => Throwable): Unit = logger.trace(t)
|
||||
override def success(message: => String): Unit = logger.success(message)
|
||||
override def log(level: Level.Value, message: => String): Unit =
|
||||
logger.log(level, s"$prefix - $message")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal
|
||||
|
||||
import sbt.internal.io.{ WatchState => WS }
|
||||
|
||||
private[internal] trait DeprecatedContinuous {
|
||||
protected type Event = sbt.io.FileEventMonitor.Event[FileAttributes]
|
||||
protected type StartMessage =
|
||||
Option[Either[WS => String, (Int, String, Seq[String]) => Option[String]]]
|
||||
protected type TriggerMessage = Either[WS => String, (Int, Event, Seq[String]) => Option[String]]
|
||||
protected type DeprecatedWatchState = WS
|
||||
protected val deprecatedWatchingMessage = sbt.Keys.watchingMessage
|
||||
protected val deprecatedTriggeredMessage = sbt.Keys.triggeredMessage
|
||||
}
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal
|
||||
|
||||
import java.io.{ InputStream, PipedInputStream, PipedOutputStream }
|
||||
import java.util.concurrent.LinkedBlockingQueue
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.JavaConverters._
|
||||
|
||||
/**
|
||||
* Creates a copy of the provided [[InputStream]] that forwards its contents to an arbitrary
|
||||
* number of connected [[InputStream]] instances via pipe.
|
||||
* @param in the [[InputStream]] to wrap.
|
||||
*/
|
||||
private[internal] class DupedInputStream(val in: InputStream)
|
||||
extends InputStream
|
||||
with AutoCloseable {
|
||||
|
||||
/**
|
||||
* Returns a copied [[InputStream]] that will receive the same bytes as System.in.
|
||||
* @return
|
||||
*/
|
||||
def duped: InputStream = {
|
||||
val pipedOutputStream = new PipedOutputStream()
|
||||
pipes += pipedOutputStream
|
||||
val res = new PollingInputStream(new PipedInputStream(pipedOutputStream))
|
||||
buffer.forEach(pipedOutputStream.write(_))
|
||||
res
|
||||
}
|
||||
|
||||
private[this] val pipes = new java.util.Vector[PipedOutputStream].asScala
|
||||
private[this] val buffer = new LinkedBlockingQueue[Int]
|
||||
private class PollingInputStream(val pipedInputStream: PipedInputStream) extends InputStream {
|
||||
override def available(): Int = {
|
||||
fillBuffer()
|
||||
pipedInputStream.available()
|
||||
}
|
||||
override def read(): Int = {
|
||||
fillBuffer()
|
||||
pipedInputStream.read
|
||||
}
|
||||
}
|
||||
override def available(): Int = {
|
||||
fillBuffer()
|
||||
buffer.size
|
||||
}
|
||||
override def read(): Int = {
|
||||
fillBuffer()
|
||||
buffer.take()
|
||||
}
|
||||
|
||||
private[this] def fillBuffer(): Unit = synchronized {
|
||||
@tailrec
|
||||
def impl(): Unit = in.available match {
|
||||
case i if i > 0 =>
|
||||
val res = in.read()
|
||||
buffer.add(res)
|
||||
pipes.foreach { p =>
|
||||
p.write(res)
|
||||
p.flush()
|
||||
}
|
||||
impl()
|
||||
case _ =>
|
||||
}
|
||||
impl()
|
||||
}
|
||||
}
|
||||
|
|
@ -7,13 +7,13 @@
|
|||
|
||||
package sbt.internal
|
||||
|
||||
import java.nio.file.{ Path, Paths }
|
||||
import java.nio.file.Paths
|
||||
import java.util.Optional
|
||||
|
||||
import sbt.Stamped
|
||||
import sbt.internal.inc.ExternalLookup
|
||||
import sbt.io.syntax._
|
||||
import sbt.io.{ AllPassFilter, Glob, TypedPath }
|
||||
import sbt.Stamped
|
||||
import sbt.io.{ AllPassFilter, TypedPath }
|
||||
import xsbti.compile._
|
||||
import xsbti.compile.analysis.Stamp
|
||||
|
||||
|
|
@ -22,7 +22,6 @@ import scala.collection.mutable
|
|||
private[sbt] object ExternalHooks {
|
||||
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
|
||||
def apply(options: CompileOptions, repo: FileTree.Repository): DefaultExternalHooks = {
|
||||
def listEntries(glob: Glob): Seq[(Path, FileAttributes)] = repo.get(glob)
|
||||
import scala.collection.JavaConverters._
|
||||
val sources = options.sources()
|
||||
val cachedSources = new java.util.HashMap[File, Stamp]
|
||||
|
|
@ -34,13 +33,9 @@ private[sbt] object ExternalHooks {
|
|||
val allBinaries = new java.util.HashMap[File, Stamp]
|
||||
options.classpath.foreach {
|
||||
case f if f.getName.endsWith(".jar") =>
|
||||
// This gives us the entry for the path itself, which is necessary if the path is a jar file
|
||||
// rather than a directory.
|
||||
listEntries(f.toGlob) foreach { case (p, a) => allBinaries.put(p.toFile, a.stamp) }
|
||||
repo.get(f.toGlob) foreach { case (p, a) => allBinaries.put(p.toFile, a.stamp) }
|
||||
case f =>
|
||||
listEntries(f ** AllPassFilter) foreach {
|
||||
case (p, a) => allBinaries.put(p.toFile, a.stamp)
|
||||
}
|
||||
repo.get(f ** AllPassFilter) foreach { case (p, a) => allBinaries.put(p.toFile, a.stamp) }
|
||||
}
|
||||
|
||||
val lookup = new ExternalLookup {
|
||||
|
|
|
|||
|
|
@ -9,60 +9,15 @@ package sbt
|
|||
package internal
|
||||
|
||||
import java.io.IOException
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
import sbt.BasicCommandStrings.ContinuousExecutePrefix
|
||||
import sbt.Keys._
|
||||
import sbt.internal.io.HybridPollingFileTreeRepository
|
||||
import sbt.internal.util.Util
|
||||
import sbt.io.FileTreeDataView.{ Entry, Observable, Observer, Observers }
|
||||
import sbt.io.{ FileTreeRepository, _ }
|
||||
import sbt.util.{ Level, Logger }
|
||||
import sbt.util.Logger
|
||||
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.mutable
|
||||
import scala.concurrent.duration._
|
||||
|
||||
private[sbt] object FileManagement {
|
||||
private[sbt] def defaultFileTreeRepository(
|
||||
state: State,
|
||||
extracted: Extracted
|
||||
): FileTreeRepository[FileAttributes] = {
|
||||
val pollingGlobs = extracted.getOpt(Keys.pollingGlobs).getOrElse(Nil)
|
||||
val remaining = state.remainingCommands.map(_.commandLine)
|
||||
// If the session is interactive or if the commands include a continuous build, then use
|
||||
// the default configuration. Otherwise, use the sbt1_2_compat config, which does not cache
|
||||
// anything, which makes it less likely to cause issues with CI.
|
||||
val interactive =
|
||||
remaining.contains("shell") || remaining.lastOption.contains("iflast shell")
|
||||
val scripted = remaining.contains("setUpScripted")
|
||||
val continuous = remaining.lastOption.exists(_.startsWith(ContinuousExecutePrefix))
|
||||
val enableCache = extracted
|
||||
.getOpt(Keys.enableGlobalCachingFileTreeRepository)
|
||||
.getOrElse(!scripted && (interactive || continuous))
|
||||
val pollInterval = extracted.getOpt(Keys.pollInterval).getOrElse(500.milliseconds)
|
||||
val watchLogger: WatchLogger = extracted.getOpt(Keys.logLevel) match {
|
||||
case Level.Debug =>
|
||||
new WatchLogger { override def debug(msg: => Any): Unit = println(s"[watch-debug] $msg") }
|
||||
case _ => new WatchLogger { override def debug(msg: => Any): Unit = {} }
|
||||
}
|
||||
if (enableCache) {
|
||||
if (pollingGlobs.isEmpty) FileTreeRepository.default(FileAttributes.default)
|
||||
else
|
||||
new HybridMonitoringRepository[FileAttributes](
|
||||
FileTreeRepository.hybrid(FileAttributes.default, pollingGlobs: _*),
|
||||
pollInterval,
|
||||
watchLogger
|
||||
)
|
||||
} else {
|
||||
if (Util.isWindows) new PollingFileRepository(FileAttributes.default)
|
||||
else {
|
||||
val service = Watched.createWatchService(pollInterval)
|
||||
FileTreeRepository.legacy(FileAttributes.default _, (_: Any) => {}, service)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def monitor(
|
||||
repository: FileTreeRepository[FileAttributes],
|
||||
antiEntropy: FiniteDuration,
|
||||
|
|
@ -101,41 +56,7 @@ private[sbt] object FileManagement {
|
|||
}
|
||||
}
|
||||
|
||||
private[sbt] def repo: Def.Initialize[Task[FileTreeRepository[FileAttributes]]] = Def.task {
|
||||
lazy val msg = s"Tried to get FileTreeRepository for uninitialized state."
|
||||
state.value.get(Keys.globalFileTreeRepository).getOrElse(throw new IllegalStateException(msg))
|
||||
}
|
||||
|
||||
private[sbt] class HybridMonitoringRepository[T](
|
||||
underlying: HybridPollingFileTreeRepository[T],
|
||||
delay: FiniteDuration,
|
||||
logger: WatchLogger
|
||||
) extends FileTreeRepository[T] {
|
||||
private val registered: mutable.Set[Glob] = ConcurrentHashMap.newKeySet[Glob].asScala
|
||||
override def listEntries(glob: Glob): Seq[Entry[T]] = underlying.listEntries(glob)
|
||||
override def list(glob: Glob): Seq[TypedPath] = underlying.list(glob)
|
||||
override def addObserver(observer: Observer[T]): Int = underlying.addObserver(observer)
|
||||
override def removeObserver(handle: Int): Unit = underlying.removeObserver(handle)
|
||||
override def close(): Unit = underlying.close()
|
||||
override def register(glob: Glob): Either[IOException, Boolean] = {
|
||||
registered.add(glob)
|
||||
underlying.register(glob)
|
||||
}
|
||||
override def unregister(glob: Glob): Unit = underlying.unregister(glob)
|
||||
private[sbt] def toMonitoringRepository: FileTreeRepository[T] = {
|
||||
val polling = underlying.toPollingRepository(delay, logger)
|
||||
registered.foreach(polling.register)
|
||||
polling
|
||||
}
|
||||
}
|
||||
private[sbt] def toMonitoringRepository[T](
|
||||
repository: FileTreeRepository[T]
|
||||
): FileTreeRepository[T] = repository match {
|
||||
case p: PollingFileRepository[T] => p.toMonitoringRepository
|
||||
case h: HybridMonitoringRepository[T] => h.toMonitoringRepository
|
||||
case r: FileTreeRepository[T] => new CopiedFileRepository(r)
|
||||
}
|
||||
private class CopiedFileRepository[T](underlying: FileTreeRepository[T])
|
||||
private[sbt] class CopiedFileTreeRepository[T](underlying: FileTreeRepository[T])
|
||||
extends FileTreeRepository[T] {
|
||||
def addObserver(observer: Observer[T]) = underlying.addObserver(observer)
|
||||
def close(): Unit = {} // Don't close the underlying observable
|
||||
|
|
@ -145,41 +66,4 @@ private[sbt] object FileManagement {
|
|||
def register(glob: Glob): Either[IOException, Boolean] = underlying.register(glob)
|
||||
def unregister(glob: Glob): Unit = underlying.unregister(glob)
|
||||
}
|
||||
private[sbt] class PollingFileRepository[T](converter: TypedPath => T)
|
||||
extends FileTreeRepository[T] { self =>
|
||||
private val registered: mutable.Set[Glob] = ConcurrentHashMap.newKeySet[Glob].asScala
|
||||
private[this] val view = FileTreeView.DEFAULT
|
||||
private[this] val dataView = view.asDataView(converter)
|
||||
private[this] val handles: mutable.Map[FileTreeRepository[T], Int] =
|
||||
new ConcurrentHashMap[FileTreeRepository[T], Int].asScala
|
||||
private val observers: Observers[T] = new Observers
|
||||
override def addObserver(observer: Observer[T]): Int = observers.addObserver(observer)
|
||||
override def close(): Unit = {
|
||||
handles.foreach { case (repo, handle) => repo.removeObserver(handle) }
|
||||
observers.close()
|
||||
}
|
||||
override def list(glob: Glob): Seq[TypedPath] = view.list(glob)
|
||||
override def listEntries(glob: Glob): Seq[Entry[T]] = dataView.listEntries(glob)
|
||||
override def removeObserver(handle: Int): Unit = observers.removeObserver(handle)
|
||||
override def register(glob: Glob): Either[IOException, Boolean] = Right(registered.add(glob))
|
||||
override def unregister(glob: Glob): Unit = registered -= glob
|
||||
|
||||
private[sbt] def toMonitoringRepository: FileTreeRepository[T] = {
|
||||
val legacy = FileTreeRepository.legacy(converter)
|
||||
registered.foreach(legacy.register)
|
||||
handles += legacy -> legacy.addObserver(observers)
|
||||
new FileTreeRepository[T] {
|
||||
override def listEntries(glob: Glob): Seq[Entry[T]] = legacy.listEntries(glob)
|
||||
override def list(glob: Glob): Seq[TypedPath] = legacy.list(glob)
|
||||
def addObserver(observer: Observer[T]): Int = legacy.addObserver(observer)
|
||||
override def removeObserver(handle: Int): Unit = legacy.removeObserver(handle)
|
||||
override def close(): Unit = legacy.close()
|
||||
override def register(glob: Glob): Either[IOException, Boolean] = {
|
||||
self.register(glob)
|
||||
legacy.register(glob)
|
||||
}
|
||||
override def unregister(glob: Glob): Unit = legacy.unregister(glob)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,12 +14,28 @@ import sbt.internal.util.appmacro.MacroDefaults
|
|||
import sbt.io.FileTreeDataView.Entry
|
||||
import sbt.io._
|
||||
|
||||
import scala.collection.mutable
|
||||
import scala.language.experimental.macros
|
||||
|
||||
private[sbt] object FileTree {
|
||||
private def toPair(e: Entry[FileAttributes]): Option[(Path, FileAttributes)] =
|
||||
e.value.toOption.map(a => e.typedPath.toPath -> a)
|
||||
object FileTree {
|
||||
private sealed trait CacheOptions
|
||||
private case object NoCache extends CacheOptions
|
||||
private case object UseCache extends CacheOptions
|
||||
private case object LogDifferences extends CacheOptions
|
||||
private def toPair(
|
||||
filter: Entry[FileAttributes] => Boolean
|
||||
)(e: Entry[FileAttributes]): Option[(Path, FileAttributes)] =
|
||||
e.value.toOption.flatMap(a => if (filter(e)) Some(e.typedPath.toPath -> a) else None)
|
||||
trait Repository extends sbt.internal.Repository[Seq, Glob, (Path, FileAttributes)]
|
||||
private[sbt] trait DynamicInputs {
|
||||
def value: Option[mutable.Set[Glob]]
|
||||
}
|
||||
private[sbt] object DynamicInputs {
|
||||
def empty: DynamicInputs = new impl(Some(mutable.Set.empty[Glob]))
|
||||
final val none: DynamicInputs = new impl(None)
|
||||
private final class impl(override val value: Option[mutable.Set[Glob]]) extends DynamicInputs
|
||||
implicit def default: DynamicInputs = macro MacroDefaults.dynamicInputs
|
||||
}
|
||||
private[sbt] object Repository {
|
||||
|
||||
/**
|
||||
|
|
@ -32,26 +48,57 @@ private[sbt] object FileTree {
|
|||
private[sbt] object polling extends Repository {
|
||||
val view = FileTreeView.DEFAULT.asDataView(FileAttributes.default)
|
||||
override def get(key: Glob): Seq[(Path, FileAttributes)] =
|
||||
view.listEntries(key).flatMap(toPair)
|
||||
view.listEntries(key).flatMap(toPair(key.toEntryFilter))
|
||||
override def close(): Unit = {}
|
||||
}
|
||||
}
|
||||
private class ViewRepository(underlying: FileTreeDataView[FileAttributes]) extends Repository {
|
||||
override def get(key: Glob): Seq[(Path, FileAttributes)] =
|
||||
underlying.listEntries(key).flatMap(toPair)
|
||||
override def close(): Unit = {}
|
||||
}
|
||||
private class CachingRepository(underlying: FileTreeRepository[FileAttributes])
|
||||
extends Repository {
|
||||
lazy val cacheOptions = System.getProperty("sbt.io.filecache") match {
|
||||
case "false" => NoCache
|
||||
case "true" => UseCache
|
||||
case _ => LogDifferences
|
||||
}
|
||||
override def get(key: Glob): Seq[(Path, FileAttributes)] = {
|
||||
underlying.register(key)
|
||||
underlying.listEntries(key).flatMap(toPair)
|
||||
cacheOptions match {
|
||||
case LogDifferences =>
|
||||
val res = Repository.polling.get(key)
|
||||
val filter = key.toEntryFilter
|
||||
val cacheRes = underlying
|
||||
.listEntries(key)
|
||||
.flatMap(e => if (filter(e)) Some(e.typedPath.toPath) else None)
|
||||
.toSet
|
||||
val resSet = res.map(_._1).toSet
|
||||
if (cacheRes != resSet) {
|
||||
val msg = "Warning: got different files when using the internal file cache compared " +
|
||||
s"to polling the file system for key: $key.\n"
|
||||
val fileDiff = cacheRes diff resSet match {
|
||||
case d if d.nonEmpty =>
|
||||
new Exception("hmm").printStackTrace()
|
||||
s"Cache had files not found in the file system:\n${d.mkString("\n")}.\n"
|
||||
case _ => ""
|
||||
}
|
||||
val cacheDiff = resSet diff cacheRes match {
|
||||
case d if d.nonEmpty =>
|
||||
(if (fileDiff.isEmpty) "" else " ") +
|
||||
s"File system had files not in the cache:\n${d.mkString("\n")}.\n"
|
||||
case _ => ""
|
||||
}
|
||||
val diff = fileDiff + cacheDiff
|
||||
val instructions = "Please open an issue at https://github.com/sbt/sbt. To disable " +
|
||||
"this warning, run sbt with -Dsbt.io.filecache=false"
|
||||
System.err.println(msg + diff + instructions)
|
||||
}
|
||||
res
|
||||
case UseCache =>
|
||||
underlying.listEntries(key).flatMap(toPair(key.toEntryFilter))
|
||||
case NoCache =>
|
||||
Repository.polling.get(key)
|
||||
}
|
||||
}
|
||||
override def close(): Unit = underlying.close()
|
||||
}
|
||||
private[sbt] def repository(underlying: FileTreeDataView[FileAttributes]): Repository =
|
||||
underlying match {
|
||||
case r: FileTreeRepository[FileAttributes] => new CachingRepository(r)
|
||||
case v => new ViewRepository(v)
|
||||
}
|
||||
private[sbt] def repository(underlying: FileTreeRepository[FileAttributes]): Repository =
|
||||
new CachingRepository(underlying)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,9 +8,14 @@
|
|||
package sbt
|
||||
package internal
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.Path
|
||||
import java.util.concurrent.ConcurrentSkipListMap
|
||||
|
||||
import sbt.io.Glob
|
||||
import sbt.io.{ FileFilter, Glob, SimpleFileFilter }
|
||||
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.mutable
|
||||
|
||||
/**
|
||||
* Retrieve files from a repository. This should usually be an extension class for
|
||||
|
|
@ -19,48 +24,45 @@ import sbt.io.Glob
|
|||
*/
|
||||
private[sbt] sealed trait GlobLister extends Any {
|
||||
|
||||
/**
|
||||
* Get the sources described this [[GlobLister]].
|
||||
*
|
||||
* @param repository the [[FileTree.Repository]] to delegate file i/o.
|
||||
* @return the files described by this [[GlobLister]].
|
||||
*/
|
||||
def all(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)]
|
||||
final def all(repository: FileTree.Repository): Seq[(Path, FileAttributes)] =
|
||||
all(repository, FileTree.DynamicInputs.empty)
|
||||
|
||||
/**
|
||||
* Get the unique sources described this [[GlobLister]].
|
||||
* Get the sources described this `GlobLister`. The results should not return any duplicate
|
||||
* entries for each path in the result set.
|
||||
*
|
||||
* @param repository the [[FileTree.Repository]] to delegate file i/o.
|
||||
* @return the files described by this [[GlobLister]] with any duplicates removed.
|
||||
* @param repository the file tree repository for retrieving the files for a given glob.
|
||||
* @param dynamicInputs the task dynamic inputs to track for watch.
|
||||
* @return the files described by this `GlobLister`.
|
||||
*/
|
||||
def unique(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)]
|
||||
def all(
|
||||
implicit repository: FileTree.Repository,
|
||||
dynamicInputs: FileTree.DynamicInputs
|
||||
): Seq[(Path, FileAttributes)]
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides implicit definitions to provide a [[GlobLister]] given a Glob or
|
||||
* Provides implicit definitions to provide a `GlobLister` given a Glob or
|
||||
* Traversable[Glob].
|
||||
*/
|
||||
object GlobLister extends GlobListers
|
||||
private[sbt] object GlobLister extends GlobListers
|
||||
|
||||
/**
|
||||
* Provides implicit definitions to provide a [[GlobLister]] given a Glob or
|
||||
* Provides implicit definitions to provide a `GlobLister` given a Glob or
|
||||
* Traversable[Glob].
|
||||
*/
|
||||
private[sbt] trait GlobListers {
|
||||
import GlobListers._
|
||||
|
||||
/**
|
||||
* Generate a [[GlobLister]] given a particular [[Glob]]s.
|
||||
* Generate a GlobLister given a particular [[Glob]]s.
|
||||
*
|
||||
* @param source the input Glob
|
||||
*/
|
||||
implicit def fromGlob(source: Glob): GlobLister = new impl(source :: Nil)
|
||||
|
||||
/**
|
||||
* Generate a [[GlobLister]] given a collection of Globs. If the input collection type
|
||||
* preserves uniqueness, e.g. `Set[Glob]`, then the output of [[GlobLister.all]] will be
|
||||
* the unique source list. Otherwise duplicates are possible in all and it is necessary to call
|
||||
* [[GlobLister.unique]] to de-duplicate the files.
|
||||
* Generate a GlobLister given a collection of Globs.
|
||||
*
|
||||
* @param sources the collection of sources
|
||||
* @tparam T the source collection type
|
||||
|
|
@ -69,9 +71,37 @@ private[sbt] trait GlobListers {
|
|||
new impl(sources)
|
||||
}
|
||||
private[internal] object GlobListers {
|
||||
private def covers(left: Glob, right: Glob): Boolean = {
|
||||
right.base.startsWith(left.base) && {
|
||||
left.depth == Int.MaxValue || {
|
||||
val depth = left.base.relativize(right.base).getNameCount
|
||||
depth < left.depth - right.depth
|
||||
}
|
||||
}
|
||||
}
|
||||
private def aggregate(globs: Traversable[Glob]): Seq[(Glob, Traversable[Glob])] = {
|
||||
val sorted = globs.toSeq.sorted
|
||||
val map = new ConcurrentSkipListMap[Path, (Glob, mutable.Set[Glob])]
|
||||
if (sorted.size > 1) {
|
||||
sorted.foreach { glob =>
|
||||
map.subMap(glob.base.getRoot, glob.base.resolve(Char.MaxValue.toString)).asScala.find {
|
||||
case (_, (g, _)) => covers(g, glob)
|
||||
} match {
|
||||
case Some((_, (_, globs))) => globs += glob
|
||||
case None =>
|
||||
val globs = mutable.Set(glob)
|
||||
val filter: FileFilter = new SimpleFileFilter((file: File) => {
|
||||
globs.exists(_.toFileFilter.accept(file))
|
||||
})
|
||||
map.put(glob.base, (Glob(glob.base, filter, glob.depth), globs))
|
||||
}
|
||||
}
|
||||
map.asScala.values.toIndexedSeq
|
||||
} else sorted.map(g => g -> (g :: Nil))
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements [[GlobLister]] given a collection of Globs. If the input collection type
|
||||
* Implements `GlobLister` given a collection of Globs. If the input collection type
|
||||
* preserves uniqueness, e.g. `Set[Glob]`, then the output will be the unique source list.
|
||||
* Otherwise duplicates are possible.
|
||||
*
|
||||
|
|
@ -79,18 +109,15 @@ private[internal] object GlobListers {
|
|||
* @tparam T the collection type
|
||||
*/
|
||||
private class impl[T <: Traversable[Glob]](val globs: T) extends AnyVal with GlobLister {
|
||||
private def get[T0 <: Traversable[Glob]](
|
||||
traversable: T0,
|
||||
repository: FileTree.Repository
|
||||
): Seq[(Path, FileAttributes)] =
|
||||
traversable.flatMap { glob =>
|
||||
val sourceFilter = glob.toFileFilter
|
||||
repository.get(glob).filter { case (p, _) => sourceFilter.accept(p.toFile) }
|
||||
override def all(
|
||||
implicit repository: FileTree.Repository,
|
||||
dynamicInputs: FileTree.DynamicInputs
|
||||
): Seq[(Path, FileAttributes)] = {
|
||||
aggregate(globs).flatMap {
|
||||
case (glob, allGlobs) =>
|
||||
dynamicInputs.value.foreach(_ ++= allGlobs)
|
||||
repository.get(glob)
|
||||
}.toIndexedSeq
|
||||
|
||||
override def all(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)] =
|
||||
get(globs, repository)
|
||||
override def unique(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)] =
|
||||
get(globs.toSet[Glob], repository)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,216 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal
|
||||
|
||||
import sbt.Def._
|
||||
import sbt.Keys._
|
||||
import sbt.Project.richInitializeTask
|
||||
import sbt._
|
||||
import sbt.internal.io.Source
|
||||
import sbt.internal.util.AttributeMap
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.io.Glob
|
||||
|
||||
import scala.annotation.tailrec
|
||||
|
||||
object TransitiveGlobs {
|
||||
val transitiveTriggers = Def.taskKey[Seq[Glob]]("The transitive triggers for a key")
|
||||
val transitiveInputs = Def.taskKey[Seq[Glob]]("The transitive inputs for a key")
|
||||
val transitiveGlobs =
|
||||
Def.taskKey[(Seq[Glob], Seq[Glob])]("The transitive inputs and triggers for a key")
|
||||
}
|
||||
private[sbt] object InputGraph {
|
||||
@deprecated("Source is also deprecated.", "1.3.0")
|
||||
private implicit class SourceOps(val source: Source) {
|
||||
def toGlob: Glob =
|
||||
Glob(
|
||||
source.base,
|
||||
source.includeFilter -- source.excludeFilter,
|
||||
if (source.recursive) Int.MaxValue else 0
|
||||
)
|
||||
}
|
||||
private[sbt] def inputsTask: Def.Initialize[Task[Seq[Glob]]] =
|
||||
Def.task(transitiveGlobs(arguments.value)._1.sorted)
|
||||
private[sbt] def inputsTask(key: ScopedKey[_]): Def.Initialize[Task[Seq[Glob]]] =
|
||||
withParams((e, cm) => Def.task(transitiveGlobs(argumentsImpl(key, e, cm).value)._1.sorted))
|
||||
private[sbt] def triggersTask: Def.Initialize[Task[Seq[Glob]]] =
|
||||
Def.task(transitiveGlobs(arguments.value)._2.sorted)
|
||||
private[sbt] def triggersTask(key: ScopedKey[_]): Def.Initialize[Task[Seq[Glob]]] =
|
||||
withParams((e, cm) => Def.task(transitiveGlobs(argumentsImpl(key, e, cm).value)._2.sorted))
|
||||
private[sbt] def task: Def.Initialize[Task[(Seq[Glob], Seq[Glob])]] =
|
||||
Def.task(transitiveGlobs(arguments.value))
|
||||
private[sbt] def task(key: ScopedKey[_]): Def.Initialize[Task[(Seq[Glob], Seq[Glob])]] =
|
||||
withParams((e, cm) => Def.task(transitiveGlobs(argumentsImpl(key, e, cm).value)))
|
||||
private def withParams[R](
|
||||
f: (Extracted, CompiledMap) => Def.Initialize[Task[R]]
|
||||
): Def.Initialize[Task[R]] = Def.taskDyn {
|
||||
val extracted = Project.extract(state.value)
|
||||
f(extracted, compile(extracted.structure))
|
||||
}
|
||||
|
||||
private[sbt] def compile(structure: BuildStructure): CompiledMap =
|
||||
compiled(structure.settings)(structure.delegates, structure.scopeLocal, (_: ScopedKey[_]) => "")
|
||||
private[sbt] final class Arguments(
|
||||
val scopedKey: ScopedKey[_],
|
||||
val extracted: Extracted,
|
||||
val compiledMap: CompiledMap,
|
||||
val log: sbt.util.Logger,
|
||||
val dependencyConfigurations: Seq[(ProjectRef, Set[String])],
|
||||
val state: State
|
||||
) {
|
||||
def structure: BuildStructure = extracted.structure
|
||||
def data: Map[Scope, AttributeMap] = extracted.structure.data.data
|
||||
}
|
||||
private def argumentsImpl(
|
||||
scopedKey: ScopedKey[_],
|
||||
extracted: Extracted,
|
||||
compiledMap: CompiledMap
|
||||
): Def.Initialize[Task[Arguments]] = Def.task {
|
||||
val log = (streamsManager map { mgr =>
|
||||
val stream = mgr(scopedKey)
|
||||
stream.open()
|
||||
stream
|
||||
}).value.log
|
||||
val configs = (internalDependencyConfigurations in scopedKey.scope).value
|
||||
new Arguments(
|
||||
scopedKey,
|
||||
extracted,
|
||||
compiledMap,
|
||||
log,
|
||||
configs,
|
||||
state.value
|
||||
)
|
||||
}
|
||||
private val ShowTransitive = "(?:show)?(?:[ ]*)(.*)/(?:[ ]*)transitive(?:Inputs|Globs|Triggers)".r
|
||||
private def arguments: Def.Initialize[Task[Arguments]] = Def.taskDyn {
|
||||
Def.task {
|
||||
val extracted = Project.extract(state.value)
|
||||
val compiledMap = compile(extracted.structure)
|
||||
state.value.currentCommand.map(_.commandLine) match {
|
||||
case Some(ShowTransitive(key)) =>
|
||||
Parser.parse(key.trim, Act.scopedKeyParser(state.value)) match {
|
||||
case Right(scopedKey) => argumentsImpl(scopedKey, extracted, compiledMap)
|
||||
case _ => argumentsImpl(Keys.resolvedScoped.value, extracted, compiledMap)
|
||||
}
|
||||
case Some(_) => argumentsImpl(Keys.resolvedScoped.value, extracted, compiledMap)
|
||||
}
|
||||
}.value
|
||||
}
|
||||
private[sbt] def transitiveGlobs(args: Arguments): (Seq[Glob], Seq[Glob]) = {
|
||||
import args._
|
||||
val taskScope = Project.fillTaskAxis(scopedKey).scope
|
||||
def delegates(sk: ScopedKey[_]): Seq[ScopedKey[_]] =
|
||||
Project.delegates(structure, sk.scope, sk.key)
|
||||
// We add the triggers to the delegate scopes to make it possible for the user to do something
|
||||
// like: Compile / compile / watchTriggers += baseDirectory.value ** "*.proto". We do not do the
|
||||
// same for inputs because inputs are expected to be explicitly used as part of the task.
|
||||
val allKeys: Seq[ScopedKey[_]] =
|
||||
(delegates(scopedKey).toSet ++ delegates(ScopedKey(taskScope, watchTriggers.key))).toSeq
|
||||
val keys = collectKeys(args, allKeys, Set.empty, Set.empty)
|
||||
def getGlobs(scopedKey: ScopedKey[Seq[Glob]]): Seq[Glob] =
|
||||
data.get(scopedKey.scope).flatMap(_.get(scopedKey.key)).getOrElse(Nil)
|
||||
val (inputGlobs, triggerGlobs) = keys.partition(_.key == fileInputs.key) match {
|
||||
case (i, t) => (i.flatMap(getGlobs), t.flatMap(getGlobs))
|
||||
}
|
||||
(inputGlobs.distinct, (triggerGlobs ++ legacy(keys :+ scopedKey, args)).distinct)
|
||||
}
|
||||
|
||||
private def legacy(keys: Seq[ScopedKey[_]], args: Arguments): Seq[Glob] = {
|
||||
import args._
|
||||
val projectScopes =
|
||||
keys.view
|
||||
.map(_.scope.copy(task = Zero, extra = Zero))
|
||||
.distinct
|
||||
.toIndexedSeq
|
||||
val projects = projectScopes.flatMap(_.project.toOption).distinct.toSet
|
||||
val scopes: Seq[Either[Scope, Seq[Glob]]] =
|
||||
data.flatMap {
|
||||
case (s, am) =>
|
||||
if (s == Scope.Global || s.project.toOption.exists(projects.contains))
|
||||
am.get(Keys.watchSources.key) match {
|
||||
case Some(k) =>
|
||||
k.work match {
|
||||
// Avoid extracted.runTask if possible.
|
||||
case Pure(w, _) => Some(Right(w().map(_.toGlob)))
|
||||
case _ => Some(Left(s))
|
||||
}
|
||||
case _ => None
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}.toSeq
|
||||
scopes.flatMap {
|
||||
case Left(scope) =>
|
||||
extracted.runTask(Keys.watchSources in scope, state)._2.map(_.toGlob)
|
||||
case Right(globs) => globs
|
||||
}
|
||||
}
|
||||
@tailrec
|
||||
private def collectKeys(
|
||||
arguments: Arguments,
|
||||
dependencies: Seq[ScopedKey[_]],
|
||||
accumulator: Set[ScopedKey[Seq[Glob]]],
|
||||
visited: Set[ScopedKey[_]]
|
||||
): Seq[ScopedKey[Seq[Glob]]] = dependencies match {
|
||||
// Iterates until the dependency list is empty. The visited parameter prevents the graph
|
||||
// traversal from getting stuck in a cycle.
|
||||
case Seq(dependency, rest @ _*) =>
|
||||
(if (!visited(dependency)) arguments.compiledMap.get(dependency) else None) match {
|
||||
case Some(compiled) =>
|
||||
val newVisited = visited + compiled.key
|
||||
val baseGlobs: Seq[ScopedKey[Seq[Glob]]] = compiled.key match {
|
||||
case key: ScopedKey[Seq[Glob]] @unchecked if isGlobKey(key) => key :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
val base: (Seq[ScopedKey[_]], Seq[ScopedKey[Seq[Glob]]]) = (Nil, baseGlobs)
|
||||
val (newDependencies, newScopes) =
|
||||
(compiled.dependencies.filterNot(newVisited) ++ compiled.settings.map(_.key))
|
||||
.foldLeft(base) {
|
||||
case ((d, s), key: ScopedKey[Seq[Glob]] @unchecked)
|
||||
if isGlobKey(key) && !newVisited(key) =>
|
||||
(d, s :+ key)
|
||||
case ((d, s), key) if key.key == dynamicDependency.key =>
|
||||
key.scope.task.toOption
|
||||
.map { k =>
|
||||
val newKey = ScopedKey(key.scope.copy(task = Zero), k)
|
||||
if (newVisited(newKey)) (d, s) else (d :+ newKey, s)
|
||||
}
|
||||
.getOrElse((d, s))
|
||||
case ((d, s), key) if key.key == transitiveClasspathDependency.key =>
|
||||
key.scope.task.toOption
|
||||
.map { task =>
|
||||
val zeroedTaskScope = key.scope.copy(task = Zero)
|
||||
val transitiveKeys = arguments.dependencyConfigurations.flatMap {
|
||||
case (p, configs) =>
|
||||
configs.map(c => ScopedKey(zeroedTaskScope in (p, ConfigKey(c)), task))
|
||||
}
|
||||
|
||||
(d ++ transitiveKeys.filterNot(newVisited), s)
|
||||
}
|
||||
.getOrElse((d, s))
|
||||
case ((d, s), key) =>
|
||||
(d ++ (if (!newVisited(key)) Some(key) else None), s)
|
||||
}
|
||||
// Append the Keys.triggers key in case there are no other references to Keys.triggers.
|
||||
val transitiveTrigger = compiled.key.scope.task.toOption match {
|
||||
case _: Some[_] => ScopedKey(compiled.key.scope, watchTriggers.key)
|
||||
case None => ScopedKey(Project.fillTaskAxis(compiled.key).scope, watchTriggers.key)
|
||||
}
|
||||
val newRest = rest ++ newDependencies ++ (if (newVisited(transitiveTrigger)) Nil
|
||||
else Some(transitiveTrigger))
|
||||
collectKeys(arguments, newRest, accumulator ++ newScopes, newVisited)
|
||||
case _ if rest.nonEmpty => collectKeys(arguments, rest, accumulator, visited)
|
||||
case _ => accumulator.toIndexedSeq
|
||||
}
|
||||
case _ => accumulator.toIndexedSeq
|
||||
}
|
||||
private[this] def isGlobKey(key: ScopedKey[_]): Boolean = key.key match {
|
||||
case fileInputs.key | watchTriggers.key => true
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import sbt.Keys._
|
||||
|
||||
private[sbt] object InternalDependencies {
|
||||
def configurations: Def.Initialize[Seq[(ProjectRef, Set[String])]] = Def.setting {
|
||||
val allConfigs = Classpaths.allConfigs(configuration.value).map(_.name).toSet
|
||||
val ref = thisProjectRef.value
|
||||
val applicableConfigs = allConfigs + "*"
|
||||
((ref -> allConfigs) +: buildDependencies.value.classpath
|
||||
.get(ref)
|
||||
.toSeq
|
||||
.flatMap(_.flatMap {
|
||||
case ResolvedClasspathDependency(p, rawConfigs) =>
|
||||
val configs = rawConfigs.getOrElse("*->compile").split(";").flatMap { config =>
|
||||
config.split("->") match {
|
||||
case Array(n, c) if applicableConfigs.contains(n) => Some(c)
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
if (configs.isEmpty) None else Some(p -> configs.toSet)
|
||||
})).distinct
|
||||
}
|
||||
}
|
||||
|
|
@ -293,9 +293,9 @@ private[sbt] object Load {
|
|||
def finalTransforms(ss: Seq[Setting[_]]): Seq[Setting[_]] = {
|
||||
def mapSpecial(to: ScopedKey[_]) = λ[ScopedKey ~> ScopedKey](
|
||||
(key: ScopedKey[_]) =>
|
||||
if (key.key == streams.key)
|
||||
if (key.key == streams.key) {
|
||||
ScopedKey(Scope.fillTaskAxis(Scope.replaceThis(to.scope)(key.scope), to.key), key.key)
|
||||
else key
|
||||
} else key
|
||||
)
|
||||
def setDefining[T] =
|
||||
(key: ScopedKey[T], value: T) =>
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ private[sbt] final class TaskProgress
|
|||
|
||||
private[this] def deleteConsoleLines(n: Int): Unit = {
|
||||
(1 to n) foreach { _ =>
|
||||
console.println(s"$DeleteLine")
|
||||
console.println(DeleteLine)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,175 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import java.io.{ File, InputStream }
|
||||
import java.nio.file.{ Files, Path }
|
||||
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger }
|
||||
|
||||
import org.scalatest.{ FlatSpec, Matchers }
|
||||
import sbt.Watch.{ NullLogger, _ }
|
||||
import sbt.WatchSpec._
|
||||
import sbt.internal.FileAttributes
|
||||
import sbt.io.FileEventMonitor.Event
|
||||
import sbt.io._
|
||||
import sbt.io.syntax._
|
||||
import sbt.util.Logger
|
||||
|
||||
import scala.collection.mutable
|
||||
import scala.concurrent.duration._
|
||||
|
||||
class WatchSpec extends FlatSpec with Matchers {
|
||||
private type NextAction = () => Watch.Action
|
||||
private def watch(task: Task, callbacks: (NextAction, NextAction)): Watch.Action =
|
||||
Watch(task, callbacks._1, callbacks._2)
|
||||
object TestDefaults {
|
||||
def callbacks(
|
||||
inputs: Seq[Glob],
|
||||
fileEventMonitor: Option[FileEventMonitor[FileAttributes]] = None,
|
||||
logger: Logger = NullLogger,
|
||||
parseEvent: () => Watch.Action = () => Ignore,
|
||||
onStartWatch: () => Watch.Action = () => CancelWatch: Watch.Action,
|
||||
onWatchEvent: Event[FileAttributes] => Watch.Action = _ => Ignore,
|
||||
triggeredMessage: Event[FileAttributes] => Option[String] = _ => None,
|
||||
watchingMessage: () => Option[String] = () => None
|
||||
): (NextAction, NextAction) = {
|
||||
val monitor = fileEventMonitor.getOrElse {
|
||||
val fileTreeRepository = FileTreeRepository.default(FileAttributes.default)
|
||||
inputs.foreach(fileTreeRepository.register)
|
||||
val m =
|
||||
FileEventMonitor.antiEntropy(
|
||||
fileTreeRepository,
|
||||
50.millis,
|
||||
m => logger.debug(m.toString),
|
||||
50.millis,
|
||||
10.minutes
|
||||
)
|
||||
new FileEventMonitor[FileAttributes] {
|
||||
override def poll(duration: Duration): Seq[Event[FileAttributes]] = m.poll(duration)
|
||||
override def close(): Unit = m.close()
|
||||
}
|
||||
}
|
||||
val onTrigger: Event[FileAttributes] => Unit = event => {
|
||||
triggeredMessage(event).foreach(logger.info(_))
|
||||
}
|
||||
val onStart: () => Watch.Action = () => {
|
||||
watchingMessage().foreach(logger.info(_))
|
||||
onStartWatch()
|
||||
}
|
||||
val nextAction: NextAction = () => {
|
||||
val inputAction = parseEvent()
|
||||
val fileActions = monitor.poll(10.millis).map { e: Event[FileAttributes] =>
|
||||
onWatchEvent(e) match {
|
||||
case Trigger => onTrigger(e); Trigger
|
||||
case action => action
|
||||
}
|
||||
}
|
||||
(inputAction +: fileActions).min
|
||||
}
|
||||
(onStart, nextAction)
|
||||
}
|
||||
}
|
||||
object NullInputStream extends InputStream {
|
||||
override def available(): Int = 0
|
||||
override def read(): Int = -1
|
||||
}
|
||||
private class Task extends (() => Unit) {
|
||||
private val count = new AtomicInteger(0)
|
||||
override def apply(): Unit = {
|
||||
count.incrementAndGet()
|
||||
()
|
||||
}
|
||||
def getCount: Int = count.get()
|
||||
}
|
||||
"Watch" should "stop" in IO.withTemporaryDirectory { dir =>
|
||||
val task = new Task
|
||||
watch(task, TestDefaults.callbacks(inputs = Seq(dir.toRealPath ** AllPassFilter))) shouldBe CancelWatch
|
||||
}
|
||||
it should "trigger" in IO.withTemporaryDirectory { dir =>
|
||||
val triggered = new AtomicBoolean(false)
|
||||
val task = new Task
|
||||
val callbacks = TestDefaults.callbacks(
|
||||
inputs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
onStartWatch = () => if (task.getCount == 2) CancelWatch else Ignore,
|
||||
onWatchEvent = _ => { triggered.set(true); Trigger },
|
||||
watchingMessage = () => {
|
||||
new File(dir, "file").createNewFile; None
|
||||
}
|
||||
)
|
||||
watch(task, callbacks) shouldBe CancelWatch
|
||||
assert(triggered.get())
|
||||
}
|
||||
it should "filter events" in IO.withTemporaryDirectory { dir =>
|
||||
val realDir = dir.toRealPath
|
||||
val queue = new mutable.Queue[Path]
|
||||
val foo = realDir.toPath.resolve("foo")
|
||||
val bar = realDir.toPath.resolve("bar")
|
||||
val task = new Task
|
||||
val callbacks = TestDefaults.callbacks(
|
||||
inputs = Seq(realDir ** AllPassFilter),
|
||||
onStartWatch = () => if (task.getCount == 2) CancelWatch else Ignore,
|
||||
onWatchEvent = e => if (e.entry.typedPath.toPath == foo) Trigger else Ignore,
|
||||
triggeredMessage = e => { queue += e.entry.typedPath.toPath; None },
|
||||
watchingMessage = () => {
|
||||
IO.touch(bar.toFile); Thread.sleep(5); IO.touch(foo.toFile)
|
||||
None
|
||||
}
|
||||
)
|
||||
watch(task, callbacks) shouldBe CancelWatch
|
||||
queue.toIndexedSeq shouldBe Seq(foo)
|
||||
}
|
||||
it should "enforce anti-entropy" in IO.withTemporaryDirectory { dir =>
|
||||
val realDir = dir.toRealPath
|
||||
val queue = new mutable.Queue[Path]
|
||||
val foo = realDir.toPath.resolve("foo")
|
||||
val bar = realDir.toPath.resolve("bar")
|
||||
val task = new Task
|
||||
val callbacks = TestDefaults.callbacks(
|
||||
inputs = Seq(realDir ** AllPassFilter),
|
||||
onStartWatch = () => if (task.getCount == 3) CancelWatch else Ignore,
|
||||
onWatchEvent = _ => Trigger,
|
||||
triggeredMessage = e => { queue += e.entry.typedPath.toPath; None },
|
||||
watchingMessage = () => {
|
||||
task.getCount match {
|
||||
case 1 => Files.createFile(bar)
|
||||
case 2 =>
|
||||
bar.toFile.setLastModified(5000)
|
||||
Files.createFile(foo)
|
||||
case _ =>
|
||||
}
|
||||
None
|
||||
}
|
||||
)
|
||||
watch(task, callbacks) shouldBe CancelWatch
|
||||
queue.toIndexedSeq shouldBe Seq(bar, foo)
|
||||
}
|
||||
it should "halt on error" in IO.withTemporaryDirectory { dir =>
|
||||
val exception = new IllegalStateException("halt")
|
||||
val task = new Task { override def apply(): Unit = throw exception }
|
||||
val callbacks = TestDefaults.callbacks(
|
||||
Seq(dir.toRealPath ** AllPassFilter),
|
||||
)
|
||||
watch(task, callbacks) shouldBe new HandleError(exception)
|
||||
}
|
||||
it should "reload" in IO.withTemporaryDirectory { dir =>
|
||||
val task = new Task
|
||||
val callbacks = TestDefaults.callbacks(
|
||||
inputs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
onStartWatch = () => Ignore,
|
||||
onWatchEvent = _ => Reload,
|
||||
watchingMessage = () => { new File(dir, "file").createNewFile(); None }
|
||||
)
|
||||
watch(task, callbacks) shouldBe Reload
|
||||
}
|
||||
}
|
||||
|
||||
object WatchSpec {
|
||||
implicit class FileOps(val f: File) {
|
||||
def toRealPath: File = f.toPath.toRealPath().toFile
|
||||
}
|
||||
}
|
||||
|
|
@ -21,7 +21,8 @@ object SbtLauncherPlugin extends AutoPlugin {
|
|||
case Some(jar) => jar.data
|
||||
case None =>
|
||||
sys.error(
|
||||
s"Could not resolve sbt launcher!, dependencies := ${libraryDependencies.value}")
|
||||
s"Could not resolve sbt launcher!, dependencies := ${libraryDependencies.value}"
|
||||
)
|
||||
}
|
||||
},
|
||||
sbtLaunchJar := {
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ trait Import {
|
|||
val ExistsFileFilter = sbt.io.ExistsFileFilter
|
||||
val FileFilter = sbt.io.FileFilter
|
||||
type FileFilter = sbt.io.FileFilter
|
||||
type Glob = sbt.io.Glob
|
||||
val GlobFilter = sbt.io.GlobFilter
|
||||
val Hash = sbt.io.Hash
|
||||
val HiddenFileFilter = sbt.io.HiddenFileFilter
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
package a
|
||||
|
||||
object A {}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
package b
|
||||
|
||||
object B {
|
||||
println(a.A.toString)
|
||||
}
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
lazy val root = (project in file(".")).
|
||||
aggregate(a, b, c, d).
|
||||
settings(
|
||||
inThisBuild(Seq(
|
||||
scalaVersion := "2.11.7",
|
||||
trackInternalDependencies := TrackLevel.NoTracking
|
||||
))
|
||||
)
|
||||
|
||||
lazy val a = project in file("a")
|
||||
|
||||
lazy val b = (project in file("b")).dependsOn(a % "*->compile")
|
||||
|
||||
lazy val c = (project in file("c")).settings(exportToInternal := TrackLevel.NoTracking)
|
||||
|
||||
lazy val d = (project in file("d"))
|
||||
.dependsOn(c % "test->test;compile->compile")
|
||||
.settings(trackInternalDependencies := TrackLevel.TrackIfMissing)
|
||||
|
||||
def getConfigs(key: SettingKey[Seq[(ProjectRef, Set[String])]]):
|
||||
Def.Initialize[Map[String, Set[String]]] =
|
||||
Def.setting(key.value.map { case (p, c) => p.project -> c }.toMap)
|
||||
val checkA = taskKey[Unit]("Verify that project a's internal dependencies are as expected")
|
||||
checkA := {
|
||||
val compileDeps = getConfigs(a / Compile / internalDependencyConfigurations).value
|
||||
assert(compileDeps == Map("a" -> Set("compile")))
|
||||
val testDeps = getConfigs(a / Test / internalDependencyConfigurations).value
|
||||
assert(testDeps == Map("a" -> Set("compile", "runtime", "test")))
|
||||
}
|
||||
|
||||
val checkB = taskKey[Unit]("Verify that project b's internal dependencies are as expected")
|
||||
checkB := {
|
||||
val compileDeps = getConfigs(b / Compile / internalDependencyConfigurations).value
|
||||
assert(compileDeps == Map("b" -> Set("compile"), "a" -> Set("compile")))
|
||||
val testDeps = getConfigs(b / Test / internalDependencyConfigurations).value
|
||||
assert(testDeps == Map("b" -> Set("compile", "runtime", "test"), "a" -> Set("compile")))
|
||||
}
|
||||
|
||||
val checkC = taskKey[Unit]("Verify that project c's internal dependencies are as expected")
|
||||
checkC := {
|
||||
val compileDeps = getConfigs(c / Compile / internalDependencyConfigurations).value
|
||||
assert(compileDeps == Map("c" -> Set("compile")))
|
||||
val testDeps = getConfigs(c / Test / internalDependencyConfigurations).value
|
||||
assert(testDeps == Map("c" -> Set("compile", "runtime", "test")))
|
||||
}
|
||||
|
||||
val checkD = taskKey[Unit]("Verify that project d's internal dependencies are as expected")
|
||||
checkD := {
|
||||
val compileDeps = getConfigs(d / Compile / internalDependencyConfigurations).value
|
||||
assert(compileDeps == Map("d" -> Set("compile"), "c" -> Set("compile")))
|
||||
val testDeps = getConfigs(d / Test / internalDependencyConfigurations).value
|
||||
assert(testDeps == Map("d" -> Set("compile", "runtime", "test"), "c" -> Set("compile", "test")))
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
package c
|
||||
|
||||
object C {}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
package d
|
||||
|
||||
object D { println(c.C.toString) }
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
> checkA
|
||||
|
||||
> checkB
|
||||
|
||||
> checkC
|
||||
|
||||
> checkD
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
// The project contains two files: { Foo.txt, Bar.md } in the subdirector base/subdir/nested-subdir
|
||||
|
||||
// Check that we can correctly extract Foo.txt with a recursive source
|
||||
val foo = taskKey[Seq[File]]("Retrieve Foo.txt")
|
||||
|
||||
foo / fileInputs += baseDirectory.value ** "*.txt"
|
||||
|
||||
foo := (foo / fileInputs).value.all.map(_._1.toFile)
|
||||
|
||||
val checkFoo = taskKey[Unit]("Check that the Foo.txt file is retrieved")
|
||||
|
||||
checkFoo := assert(foo.value == Seq(baseDirectory.value / "base/subdir/nested-subdir/Foo.txt"))
|
||||
|
||||
// Check that we can correctly extract Bar.md with a non-recursive source
|
||||
val bar = taskKey[Seq[File]]("Retrieve Bar.md")
|
||||
|
||||
bar / fileInputs += baseDirectory.value / "base/subdir/nested-subdir" * "*.md"
|
||||
|
||||
bar := (bar / fileInputs).value.all.map(_._1.toFile)
|
||||
|
||||
val checkBar = taskKey[Unit]("Check that the Bar.md file is retrieved")
|
||||
|
||||
checkBar := assert(bar.value == Seq(baseDirectory.value / "base/subdir/nested-subdir/Bar.md"))
|
||||
|
||||
// Check that we can correctly extract Bar.md and Foo.md with a non-recursive source
|
||||
val all = taskKey[Seq[File]]("Retrieve all files")
|
||||
|
||||
all / fileInputs += baseDirectory.value / "base" / "subdir" / "nested-subdir" * AllPassFilter
|
||||
|
||||
val checkAll = taskKey[Unit]("Check that the Bar.md file is retrieved")
|
||||
|
||||
checkAll := {
|
||||
import sbt.dsl.LinterLevel.Ignore
|
||||
val expected = Set("Foo.txt", "Bar.md").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
|
||||
assert((all / fileInputs).value.all.map(_._1.toFile).toSet == expected)
|
||||
}
|
||||
|
||||
val set = taskKey[Seq[File]]("Specify redundant sources in a set")
|
||||
|
||||
set / fileInputs ++= Seq(
|
||||
baseDirectory.value / "base" ** -DirectoryFilter,
|
||||
baseDirectory.value / "base" / "subdir" / "nested-subdir" * -DirectoryFilter
|
||||
)
|
||||
|
||||
val checkSet = taskKey[Unit]("Verify that redundant sources are handled")
|
||||
|
||||
checkSet := {
|
||||
val redundant = (set / fileInputs).value.all.map(_._1.toFile)
|
||||
assert(redundant.size == 2)
|
||||
|
||||
val deduped = (set / fileInputs).value.toSet[Glob].all.map(_._1.toFile)
|
||||
val expected = Seq("Bar.md", "Foo.txt").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
|
||||
assert(deduped.sorted == expected)
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
> checkFoo
|
||||
|
||||
> checkBar
|
||||
|
||||
> checkAll
|
||||
|
||||
> checkSet
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
import java.nio.file.Path
|
||||
|
||||
import sbt.internal.{FileAttributes, FileTree}
|
||||
|
||||
val allInputs = taskKey[Seq[File]]("")
|
||||
val allInputsExplicit = taskKey[Seq[File]]("")
|
||||
|
||||
val checkInputs = inputKey[Unit]("")
|
||||
val checkInputsExplicit = inputKey[Unit]("")
|
||||
|
||||
allInputs := (Compile / unmanagedSources / fileInputs).value.all.map(_._1.toFile)
|
||||
|
||||
checkInputs := {
|
||||
val res = allInputs.value
|
||||
val scala = (Compile / scalaSource).value
|
||||
val expected = Def.spaceDelimited("<args>").parsed.map(scala / _).toSet
|
||||
assert(res.toSet == expected)
|
||||
}
|
||||
|
||||
// In this test we override the FileTree.Repository used by the all method.
|
||||
allInputsExplicit := {
|
||||
val files = scala.collection.mutable.Set.empty[File]
|
||||
val underlying = implicitly[FileTree.Repository]
|
||||
val repo = new FileTree.Repository {
|
||||
override def get(glob: Glob): Seq[(Path, FileAttributes)] = {
|
||||
val res = underlying.get(glob)
|
||||
files ++= res.map(_._1.toFile)
|
||||
res
|
||||
}
|
||||
override def close(): Unit = {}
|
||||
}
|
||||
val include = (Compile / unmanagedSources / includeFilter).value
|
||||
val _ = (Compile / unmanagedSources / fileInputs).value.all(repo).map(_._1.toFile).toSet
|
||||
files.filter(include.accept).toSeq
|
||||
}
|
||||
|
||||
checkInputsExplicit := {
|
||||
val res = allInputsExplicit.value
|
||||
val scala = (Compile / scalaSource).value
|
||||
val expected = Def.spaceDelimited("<args>").parsed.map(scala / _).toSet
|
||||
assert(res.toSet == expected)
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
package bar
|
||||
|
||||
object Bar
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
package foo
|
||||
|
||||
object Foo
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
> checkInputs foo/Foo.scala bar/Bar.scala
|
||||
|
||||
> checkInputsExplicit foo/Foo.scala bar/Bar.scala
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
import sbt.internal.TransitiveGlobs._
|
||||
val cached = settingKey[Unit]("")
|
||||
val newInputs = settingKey[Unit]("")
|
||||
Compile / cached / fileInputs := (Compile / unmanagedSources / fileInputs).value ++
|
||||
(Compile / unmanagedResources / fileInputs).value
|
||||
Test / cached / fileInputs := (Test / unmanagedSources / fileInputs).value ++
|
||||
(Test / unmanagedResources / fileInputs).value
|
||||
Compile / newInputs / fileInputs += baseDirectory.value * "*.sc"
|
||||
|
||||
Compile / unmanagedSources / fileInputs ++= (Compile / newInputs / fileInputs).value
|
||||
|
||||
val checkCompile = taskKey[Unit]("check compile inputs")
|
||||
checkCompile := {
|
||||
val actual = (Compile / compile / transitiveInputs).value.toSet
|
||||
val expected = ((Compile / cached / fileInputs).value ++ (Compile / newInputs / fileInputs).value).toSet
|
||||
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
|
||||
if (actual != expected) {
|
||||
val actualExtra = actual diff expected
|
||||
val expectedExtra = expected diff actual
|
||||
throw new IllegalStateException(
|
||||
s"$actual did not equal $expected\n" +
|
||||
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields $actualExtra" else ""}" +
|
||||
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
val checkRun = taskKey[Unit]("check runtime inputs")
|
||||
checkRun := {
|
||||
val actual = (Runtime / run / transitiveInputs).value.toSet
|
||||
// Runtime doesn't add any new inputs, but it should correctly find the Compile inputs via
|
||||
// delegation.
|
||||
val expected = ((Compile / cached / fileInputs).value ++ (Compile / newInputs / fileInputs).value).toSet
|
||||
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
|
||||
if (actual != expected) {
|
||||
val actualExtra = actual diff expected
|
||||
val expectedExtra = expected diff actual
|
||||
throw new IllegalStateException(
|
||||
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields: $actualExtra" else ""}" +
|
||||
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
|
||||
}
|
||||
}
|
||||
|
||||
val checkTest = taskKey[Unit]("check test inputs")
|
||||
checkTest := {
|
||||
val actual = (Test / compile / transitiveInputs).value.toSet
|
||||
val expected = ((Test / cached / fileInputs).value ++ (Compile / newInputs / fileInputs).value ++
|
||||
(Compile / cached / fileInputs).value).toSet
|
||||
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
|
||||
if (actual != expected) {
|
||||
val actualExtra = actual diff expected
|
||||
val expectedExtra = expected diff actual
|
||||
throw new IllegalStateException(
|
||||
s"$actual did not equal $expected\n" +
|
||||
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields $actualExtra" else ""}" +
|
||||
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
package bar
|
||||
|
||||
object Bar
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
package foo
|
||||
|
||||
object Foo
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
> checkCompile
|
||||
|
||||
> checkRun
|
||||
|
||||
> checkTest
|
||||
|
|
@ -8,6 +8,6 @@ setStringValue := setStringValueImpl.evaluated
|
|||
|
||||
checkStringValue := checkStringValueImpl.evaluated
|
||||
|
||||
watchSources += file("string.txt")
|
||||
setStringValue / watchTriggers := baseDirectory.value * "string.txt" :: Nil
|
||||
|
||||
watchOnEvent := { _ => Watched.CancelWatch }
|
||||
watchOnEvent := { _ => _ => Watch.CancelWatch }
|
||||
|
|
@ -1,16 +1,16 @@
|
|||
import sbt._
|
||||
|
||||
import Keys.baseDirectory
|
||||
|
||||
object Build {
|
||||
private[this] var string: String = ""
|
||||
private[this] val stringFile = file("string.txt")
|
||||
val setStringValue = inputKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
string = Def.spaceDelimited().parsed.mkString(" ").trim
|
||||
IO.write(stringFile, string)
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
IO.write(baseDirectory.value / stringFile, string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
assert(string == Def.spaceDelimited().parsed.mkString(" ").trim)
|
||||
assert(IO.read(stringFile) == string)
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(baseDirectory.value / stringFile) == string)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
> ~; setStringValue string.txt foo; setStringValue string.txt bar
|
||||
|
||||
> checkStringValue string.txt bar
|
||||
|
||||
> ~;setStringValue string.txt foo;setStringValue string.txt bar; checkStringValue string.txt bar
|
||||
|
||||
> ~; setStringValue string.txt foo;setStringValue string.txt bar; checkStringValue string.txt bar
|
||||
|
||||
> ~; setStringValue string.txt foo; setStringValue string.txt bar; checkStringValue string.txt bar
|
||||
|
||||
# no leading semicolon
|
||||
> ~ setStringValue string.txt foo; setStringValue string.txt bar; checkStringValue string.txt bar
|
||||
|
||||
> ~ setStringValue string.txt foo
|
||||
|
||||
> checkStringValue string.txt foo
|
||||
|
||||
# All of the other tests have involved input tasks, so include commands with regular tasks as well.
|
||||
> ~; compile; setStringValue string.txt baz; checkStringValue string.txt baz
|
||||
# Ensure that trailing semi colons work
|
||||
> ~ compile; setStringValue string.txt baz; checkStringValue string.txt baz;
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
import sbt.input.aggregation.Build
|
||||
|
||||
val root = Build.root
|
||||
val foo = Build.foo
|
||||
val bar = Build.bar
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
package sbt.input.aggregation
|
||||
|
||||
import sbt._
|
||||
import Keys._
|
||||
|
||||
object Build {
|
||||
val setStringValue = inputKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
|
||||
IO.write(file(stringFile), string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(file(stringFile)) == string)
|
||||
}
|
||||
lazy val foo = project.settings(
|
||||
watchStartMessage := { (count: Int, _, _) => Some(s"FOO $count") },
|
||||
Compile / compile / watchTriggers += baseDirectory.value * "foo.txt",
|
||||
Compile / compile / watchStartMessage := { (count: Int, _, _) =>
|
||||
// this checks that Compile / compile / watchStartMessage
|
||||
// is preferred to Compile / watchStartMessage
|
||||
val outputFile = baseDirectory.value / "foo.txt"
|
||||
IO.write(outputFile, "compile")
|
||||
Some(s"compile $count")
|
||||
},
|
||||
Compile / watchStartMessage := { (count: Int, _, _) => Some(s"Compile $count") },
|
||||
Runtime / watchStartMessage := { (count: Int, _, _) => Some(s"Runtime $count") },
|
||||
setStringValue := {
|
||||
val _ = (fileInputs in (bar, setStringValue)).value
|
||||
setStringValueImpl.evaluated
|
||||
},
|
||||
checkStringValue := checkStringValueImpl.evaluated,
|
||||
watchOnEvent := { _ => _ => Watch.CancelWatch }
|
||||
)
|
||||
lazy val bar = project.settings(fileInputs in setStringValue += baseDirectory.value * "foo.txt")
|
||||
lazy val root = (project in file(".")).aggregate(foo, bar).settings(
|
||||
watchOnEvent := { _ => _ => Watch.CancelWatch }
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
> ~ foo/Runtime/setStringValue bar/foo.txt foo
|
||||
|
||||
> checkStringValue bar/foo.txt foo
|
||||
|
||||
> ~ foo/compile
|
||||
|
||||
> checkStringValue foo/foo.txt compile
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
import sbt.watch.task.Build
|
||||
|
||||
val root = Build.root
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
package sbt.watch.task
|
||||
|
||||
import sbt._
|
||||
import Keys._
|
||||
import sbt.internal.FileTree
|
||||
|
||||
object Build {
|
||||
val reloadFile = settingKey[File]("file to toggle whether or not to reload")
|
||||
val setStringValue = taskKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
val foo = taskKey[Unit]("foo")
|
||||
def setStringValueImpl: Def.Initialize[Task[Unit]] = Def.task {
|
||||
val i = (setStringValue / fileInputs).value
|
||||
val (stringFile, string) = ("foo.txt", "bar")
|
||||
val absoluteFile = baseDirectory.value.toPath.resolve(stringFile).toFile
|
||||
IO.write(absoluteFile, string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(file(stringFile)) == string)
|
||||
}
|
||||
lazy val root = (project in file(".")).settings(
|
||||
reloadFile := baseDirectory.value / "reload",
|
||||
foo / fileInputs += baseDirectory.value * "foo.txt",
|
||||
setStringValue := Def.taskDyn {
|
||||
// This hides foo / fileInputs from the input graph
|
||||
Def.taskDyn {
|
||||
val _ = (foo / fileInputs).value.all
|
||||
// By putting setStringValueImpl.value inside a Def.task, we ensure that
|
||||
// (foo / fileInputs).value is registered with the file repository before modifying the file.
|
||||
Def.task(setStringValueImpl.value)
|
||||
}
|
||||
}.value,
|
||||
checkStringValue := checkStringValueImpl.evaluated,
|
||||
watchOnInputEvent := { (_, _) => Watch.CancelWatch },
|
||||
watchOnTriggerEvent := { (_, _) => Watch.CancelWatch },
|
||||
watchTasks := Def.inputTask {
|
||||
val prev = watchTasks.evaluated
|
||||
new StateTransform(prev.state.fail)
|
||||
}.evaluated
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# This tests that we can override the state transformation in the watch task
|
||||
# In the build, watchOnEvent should return CancelWatch which should be successful, but we
|
||||
# override watchTasks to fail the state instead
|
||||
|
||||
-> watch root / setStringValue
|
||||
|
||||
> checkStringValue foo.txt bar
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
import sbt.input.aggregation.Build
|
||||
|
||||
val root = Build.root
|
||||
val foo = Build.foo
|
||||
val bar = Build.bar
|
||||
|
||||
Global / watchTriggers += baseDirectory.value * "baz.txt"
|
||||
|
|
@ -0,0 +1,94 @@
|
|||
package sbt.input.aggregation
|
||||
|
||||
import sbt._
|
||||
import Keys._
|
||||
import sbt.internal.TransitiveGlobs._
|
||||
|
||||
object Build {
|
||||
val setStringValue = inputKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
val checkTriggers = taskKey[Unit]("Check that the triggers are correctly aggregated.")
|
||||
val checkGlobs = taskKey[Unit]("Check that the globs are correctly aggregated and that the globs are the union of the inputs and the triggers")
|
||||
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
|
||||
IO.write(file(stringFile), string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(file(stringFile)) == string)
|
||||
}
|
||||
def checkGlobsImpl: Def.Initialize[Task[Unit]] = Def.task {
|
||||
val (globInputs, globTriggers) = (Compile / compile / transitiveGlobs).value
|
||||
val inputs = (Compile / compile / transitiveInputs).value.toSet
|
||||
val triggers = (Compile / compile / transitiveTriggers).value.toSet
|
||||
assert(globInputs.toSet == inputs)
|
||||
assert(globTriggers.toSet == triggers)
|
||||
}
|
||||
lazy val foo = project.settings(
|
||||
setStringValue := {
|
||||
val _ = (fileInputs in (bar, setStringValue)).value
|
||||
setStringValueImpl.evaluated
|
||||
},
|
||||
checkStringValue := checkStringValueImpl.evaluated,
|
||||
watchOnTriggerEvent := { (_, _) => Watch.CancelWatch },
|
||||
watchOnInputEvent := { (_, _) => Watch.CancelWatch },
|
||||
Compile / compile / watchOnStart := { _ => () => Watch.CancelWatch },
|
||||
checkTriggers := {
|
||||
val actual = (Compile / compile / transitiveTriggers).value.toSet
|
||||
val base = baseDirectory.value.getParentFile
|
||||
// This checks that since foo depends on bar there is a transitive trigger generated
|
||||
// for the "bar.txt" trigger added to bar / Compile / unmanagedResources (which is a
|
||||
// transitive dependency of
|
||||
val expected: Set[Glob] = Set(base * "baz.txt", (base / "bar") * "bar.txt")
|
||||
assert(actual == expected)
|
||||
},
|
||||
Test / test / watchTriggers += baseDirectory.value * "test.txt",
|
||||
Test / checkTriggers := {
|
||||
val testTriggers = (Test / test / transitiveTriggers).value.toSet
|
||||
// This validates that since the "test.txt" trigger is only added to the Test / test task,
|
||||
// that the Test / compile does not pick it up. Both of them pick up the the triggers that
|
||||
// are found in the test above for the compile configuration because of the transitive
|
||||
// classpath dependency that is added in Defaults.internalDependencies.
|
||||
val compileTriggers = (Test / compile / transitiveTriggers).value.toSet
|
||||
val base = baseDirectory.value.getParentFile
|
||||
val expected: Set[Glob] = Set(
|
||||
base * "baz.txt", (base / "bar") * "bar.txt", (base / "foo") * "test.txt")
|
||||
assert(testTriggers == expected)
|
||||
assert((testTriggers - ((base / "foo") * "test.txt")) == compileTriggers)
|
||||
},
|
||||
checkGlobs := checkGlobsImpl.value
|
||||
).dependsOn(bar)
|
||||
lazy val bar = project.settings(
|
||||
fileInputs in setStringValue += baseDirectory.value * "foo.txt",
|
||||
setStringValue / watchTriggers += baseDirectory.value * "bar.txt",
|
||||
// This trigger should transitively propagate to foo / compile and foo / Test / compile
|
||||
Compile / unmanagedResources / watchTriggers += baseDirectory.value * "bar.txt",
|
||||
checkTriggers := {
|
||||
val base = baseDirectory.value.getParentFile
|
||||
val actual = (Compile / compile / transitiveTriggers).value
|
||||
val expected: Set[Glob] = Set((base / "bar") * "bar.txt", base * "baz.txt")
|
||||
assert(actual.toSet == expected)
|
||||
},
|
||||
// This trigger should not transitively propagate to any foo task
|
||||
Test / unmanagedResources / watchTriggers += baseDirectory.value * "bar-test.txt",
|
||||
Test / checkTriggers := {
|
||||
val testTriggers = (Test / test / transitiveTriggers).value.toSet
|
||||
val compileTriggers = (Test / compile / transitiveTriggers).value.toSet
|
||||
val base = baseDirectory.value.getParentFile
|
||||
val expected: Set[Glob] = Set(
|
||||
base * "baz.txt", (base / "bar") * "bar.txt", (base / "bar") * "bar-test.txt")
|
||||
assert(testTriggers == expected)
|
||||
assert(testTriggers == compileTriggers)
|
||||
},
|
||||
checkGlobs := checkGlobsImpl.value
|
||||
)
|
||||
lazy val root = (project in file(".")).aggregate(foo, bar).settings(
|
||||
watchOnEvent := { _ => _ => Watch.CancelWatch },
|
||||
checkTriggers := {
|
||||
val actual = (Compile / compile / transitiveTriggers).value
|
||||
val expected: Seq[Glob] = baseDirectory.value * "baz.txt" :: Nil
|
||||
assert(actual == expected)
|
||||
},
|
||||
checkGlobs := checkGlobsImpl.value
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
> checkTriggers
|
||||
|
||||
> Test / checkTriggers
|
||||
|
||||
> checkGlobs
|
||||
|
||||
# do not set the project here to ensure the bar/bar.txt trigger is captured by aggregation
|
||||
# also add random spaces and multiple commands to ensure the parser is sane.
|
||||
> ~ setStringValue bar/bar.txt bar; root / setStringValue bar/bar.txt baz
|
||||
|
||||
> checkStringValue bar/bar.txt baz
|
||||
|
|
@ -0,0 +1 @@
|
|||
val root = sbt.input.parser.Build.root
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
package sbt
|
||||
package input.parser
|
||||
|
||||
import complete.Parser
|
||||
import complete.Parser._
|
||||
|
||||
import java.io.{ PipedInputStream, PipedOutputStream }
|
||||
import Keys._
|
||||
|
||||
object Build {
|
||||
val root = (project in file(".")).settings(
|
||||
useSuperShell := false,
|
||||
watchInputStream := inputStream,
|
||||
watchStartMessage := { (_, _, _) =>
|
||||
Build.outputStream.write('\n'.toByte)
|
||||
Build.outputStream.flush()
|
||||
Some("default start message")
|
||||
}
|
||||
)
|
||||
val outputStream = new PipedOutputStream()
|
||||
val inputStream = new PipedInputStream(outputStream)
|
||||
val byeParser: Parser[Watch.Action] = "bye" ^^^ Watch.CancelWatch
|
||||
val helloParser: Parser[Watch.Action] = "hello" ^^^ Watch.Ignore
|
||||
// Note that the order is byeParser | helloParser. In general, we want the higher priority
|
||||
// action to come first because otherwise we would potentially scan past it.
|
||||
val helloOrByeParser: Parser[Watch.Action] = byeParser | helloParser
|
||||
val alternativeStartMessage: (Int, String, Seq[String]) => Option[String] = { (_, _, _) =>
|
||||
outputStream.write("xybyexyblahxyhelloxy".getBytes)
|
||||
outputStream.flush()
|
||||
Some("alternative start message")
|
||||
}
|
||||
val otherAlternativeStartMessage: (Int, String, Seq[String]) => Option[String] = { (_, _, _) =>
|
||||
outputStream.write("xyhellobyexyblahx".getBytes)
|
||||
outputStream.flush()
|
||||
Some("other alternative start message")
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
# this should exit because watchStartMessage writes "\n" to Build.outputStream, which in turn
|
||||
# triggers a CancelWatch
|
||||
> ~ compile
|
||||
|
||||
> set watchStartMessage := sbt.input.parser.Build.alternativeStartMessage
|
||||
|
||||
> set watchInputParser := sbt.input.parser.Build.helloOrByeParser
|
||||
|
||||
# this should exit because we write "xybyexyblahxyhelloxy" to Build.outputStream. The
|
||||
# helloOrByeParser will produce Watch.Ignore and Watch.CancelWatch but the
|
||||
# Watch.CancelWatch event should win.
|
||||
> ~ compile
|
||||
|
||||
> set watchStartMessage := sbt.input.parser.Build.otherAlternativeStartMessage
|
||||
|
||||
# this is the same as above except that hello appears before bye in the string
|
||||
> ~ compile
|
||||
|
|
@ -0,0 +1,13 @@
|
|||
import sbt.legacy.sources.Build._
|
||||
|
||||
Global / watchSources += new sbt.internal.io.Source(baseDirectory.value, "global.txt", NothingFilter, false)
|
||||
|
||||
watchSources in setStringValue += new sbt.internal.io.Source(baseDirectory.value, "foo.txt", NothingFilter, false)
|
||||
|
||||
setStringValue := setStringValueImpl.evaluated
|
||||
|
||||
checkStringValue := checkStringValueImpl.evaluated
|
||||
|
||||
watchOnTriggerEvent := { (_, _) => Watch.CancelWatch }
|
||||
watchOnInputEvent := { (_, _) => Watch.CancelWatch }
|
||||
watchOnMetaBuildEvent := { (_, _) => Watch.CancelWatch }
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
package sbt.legacy.sources
|
||||
|
||||
import sbt._
|
||||
import Keys._
|
||||
|
||||
object Build {
|
||||
val setStringValue = inputKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
|
||||
IO.write(file(stringFile), string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(file(stringFile)) == string)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
> ~ setStringValue foo.txt foo
|
||||
|
||||
> checkStringValue foo.txt foo
|
||||
|
|
@ -1,11 +1,6 @@
|
|||
import scala.util.Try
|
||||
|
||||
val checkCount = inputKey[Unit]("check that compile has run a specified number of times")
|
||||
val checkReloadCount = inputKey[Unit]("check whether the project was reloaded")
|
||||
val failingTask = taskKey[Unit]("should always fail")
|
||||
val maybeReload = settingKey[(Int, Boolean) => Watched.Action]("possibly reload")
|
||||
val resetCount = taskKey[Unit]("reset compile count")
|
||||
val reloadFile = settingKey[File]("get the current reload file")
|
||||
|
||||
checkCount := {
|
||||
val expected = Def.spaceDelimited().parsed.head.toInt
|
||||
|
|
@ -13,12 +8,6 @@ checkCount := {
|
|||
throw new IllegalStateException(s"Expected ${expected} compilation runs, got ${Count.get}")
|
||||
}
|
||||
|
||||
maybeReload := { (_, _) =>
|
||||
if (Count.reloadCount(reloadFile.value) == 0) Watched.Reload else Watched.CancelWatch
|
||||
}
|
||||
|
||||
reloadFile := baseDirectory.value / "reload-count"
|
||||
|
||||
resetCount := {
|
||||
Count.reset()
|
||||
}
|
||||
|
|
@ -27,24 +16,6 @@ failingTask := {
|
|||
throw new IllegalStateException("failed")
|
||||
}
|
||||
|
||||
watchPreWatch := maybeReload.value
|
||||
|
||||
checkReloadCount := {
|
||||
val expected = Def.spaceDelimited().parsed.head.toInt
|
||||
assert(Count.reloadCount(reloadFile.value) == expected)
|
||||
}
|
||||
|
||||
val addReloadShutdownHook = Command.command("addReloadShutdownHook") { state =>
|
||||
state.addExitHook {
|
||||
val base = Project.extract(state).get(baseDirectory)
|
||||
val file = base / "reload-count"
|
||||
val currentCount = Try(Count.reloadCount(file)).getOrElse(0)
|
||||
IO.write(file, s"${currentCount + 1}".getBytes)
|
||||
}
|
||||
}
|
||||
|
||||
commands += addReloadShutdownHook
|
||||
|
||||
Compile / compile := {
|
||||
Count.increment()
|
||||
// Trigger a new build by updating the last modified time
|
||||
|
|
|
|||
|
|
@ -0,0 +1,4 @@
|
|||
val checkReloaded = taskKey[Unit]("Asserts that the build was reloaded")
|
||||
checkReloaded := { () }
|
||||
|
||||
watchOnIteration := { _ => Watch.CancelWatch }
|
||||
|
|
@ -0,0 +1 @@
|
|||
watchOnStart := { _ => () => Watch.Reload }
|
||||
|
|
@ -1,28 +1,24 @@
|
|||
# verify that reloading occurs if watchPreWatch returns Watched.Reload
|
||||
> addReloadShutdownHook
|
||||
> checkReloadCount 0
|
||||
# verify that reloading occurs if watchOnStart returns Watch.Reload
|
||||
$ copy-file changes/extra.sbt extra.sbt
|
||||
|
||||
> ~compile
|
||||
> checkReloadCount 1
|
||||
> checkReloaded
|
||||
|
||||
# verify that the watch terminates when we reach the specified count
|
||||
> resetCount
|
||||
> set watchPreWatch := { (count: Int, _) => if (count == 2) Watched.CancelWatch else Watched.Ignore }
|
||||
> set watchOnIteration := { (count: Int) => if (count == 2) Watch.CancelWatch else Watch.Ignore }
|
||||
> ~compile
|
||||
> checkCount 2
|
||||
|
||||
# verify that the watch terminates and returns an error when we reach the specified count
|
||||
> resetCount
|
||||
> set watchPreWatch := { (count: Int, _) => if (count == 2) Watched.HandleError else Watched.Ignore }
|
||||
# Returning Watched.HandleError causes the '~' command to fail
|
||||
> set watchOnIteration := { (count: Int) => if (count == 2) new Watch.HandleError(new Exception("")) else Watch.Ignore }
|
||||
# Returning Watch.HandleError causes the '~' command to fail
|
||||
-> ~compile
|
||||
> checkCount 2
|
||||
|
||||
# verify that a re-build is triggered when we reach the specified count
|
||||
> resetCount
|
||||
> set watchPreWatch := { (count: Int, _) => if (count == 2) Watched.Trigger else if (count == 3) Watched.CancelWatch else Watched.Ignore }
|
||||
> set watchOnIteration := { (count: Int) => if (count == 2) Watch.Trigger else if (count == 3) Watch.CancelWatch else Watch.Ignore }
|
||||
> ~compile
|
||||
> checkCount 3
|
||||
|
||||
# verify that the watch exits and returns an error if the task fails
|
||||
> set watchPreWatch := { (_, lastStatus: Boolean) => if (lastStatus) Watched.Ignore else Watched.HandleError }
|
||||
-> ~failingTask
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
import sbt.watch.task.Build
|
||||
|
||||
val root = Build.root
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
package sbt.watch.task
|
||||
|
||||
import sbt._
|
||||
import Keys._
|
||||
|
||||
object Build {
|
||||
val reloadFile = settingKey[File]("file to toggle whether or not to reload")
|
||||
val setStringValue = inputKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
|
||||
IO.write(file(stringFile), string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(file(stringFile)) == string)
|
||||
}
|
||||
lazy val root = (project in file(".")).settings(
|
||||
reloadFile := baseDirectory.value / "reload",
|
||||
setStringValue / watchTriggers += baseDirectory.value * "foo.txt",
|
||||
setStringValue := setStringValueImpl.evaluated,
|
||||
checkStringValue := checkStringValueImpl.evaluated,
|
||||
watchOnTriggerEvent := { (_, _) => Watch.CancelWatch },
|
||||
watchTasks := Def.inputTask {
|
||||
val prev = watchTasks.evaluated
|
||||
new StateTransform(prev.state.fail)
|
||||
}.evaluated
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
# This tests that we can override the state transformation in the watch task
|
||||
# In the build, watchOnEvent should return CancelWatch which should be successful, but we
|
||||
# override watchTasks to fail the state instead
|
||||
|
||||
-> watch root / setStringValue foo.txt bar
|
||||
|
||||
> checkStringValue foo.txt bar
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
import sbt.watch.task.Build
|
||||
|
||||
val root = Build.root
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
package sbt.watch.task
|
||||
|
||||
import sbt._
|
||||
import Keys._
|
||||
|
||||
object Build {
|
||||
val setStringValue = inputKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
|
||||
IO.write(file(stringFile), string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(file(stringFile)) == string)
|
||||
}
|
||||
lazy val root = (project in file(".")).settings(
|
||||
setStringValue / watchTriggers += baseDirectory.value * "foo.txt",
|
||||
setStringValue := setStringValueImpl.evaluated,
|
||||
checkStringValue := checkStringValueImpl.evaluated,
|
||||
watchStartMessage := { (_, _, _) =>
|
||||
IO.touch(baseDirectory.value / "foo.txt", true)
|
||||
Some("watching")
|
||||
},
|
||||
watchOnStart := { _ => () => Watch.CancelWatch }
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
package sbt.watch.task
|
||||
|
||||
import sbt._
|
||||
import Keys._
|
||||
|
||||
object Build {
|
||||
val reloadFile = settingKey[File]("file to toggle whether or not to reload")
|
||||
val setStringValue = inputKey[Unit]("set a global string to a value")
|
||||
val checkStringValue = inputKey[Unit]("check the value of a global")
|
||||
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
|
||||
IO.write(file(stringFile), string)
|
||||
}
|
||||
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val Seq(stringFile, string) = Def.spaceDelimited().parsed
|
||||
assert(IO.read(file(stringFile)) == string)
|
||||
}
|
||||
lazy val root = (project in file(".")).settings(
|
||||
reloadFile := baseDirectory.value / "reload",
|
||||
setStringValue / watchTriggers += baseDirectory.value * "foo.txt",
|
||||
setStringValue := setStringValueImpl.evaluated,
|
||||
checkStringValue := checkStringValueImpl.evaluated,
|
||||
watchStartMessage := { (_, _, _) =>
|
||||
IO.touch(baseDirectory.value / "foo.txt", true)
|
||||
Some("watching")
|
||||
},
|
||||
watchOnTriggerEvent := { (f, e) =>
|
||||
if (reloadFile.value.exists) Watch.CancelWatch else {
|
||||
IO.touch(reloadFile.value, true)
|
||||
Watch.Reload
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
# this tests that if the watch _task_ is able to reload the project
|
||||
|
||||
# the original version of the build will only return Watch.Reload for trigger events while the
|
||||
# updated version will return Watch.CancelWatch. If this test exits, it more or less works.
|
||||
$ copy-file changes/Build.scala project/Build.scala
|
||||
|
||||
# setStringValue has foo.txt as a watch source so running that command should first trigger a
|
||||
# reload. After the project has been reloaded, the next write to setStringValue will also
|
||||
# trigger a CancelWatch event, hence we exit.
|
||||
> watch root / setStringValue foo.txt bar
|
||||
|
||||
> checkStringValue foo.txt bar
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
> ~; setStringValue foo; setStringValue bar
|
||||
|
||||
> checkStringValue bar
|
||||
|
||||
> ~;setStringValue foo;setStringValue bar; checkStringValue bar
|
||||
|
||||
> ~; setStringValue foo;setStringValue bar; checkStringValue bar
|
||||
|
||||
> ~; setStringValue foo; setStringValue bar; checkStringValue bar
|
||||
|
||||
# no leading semicolon
|
||||
> ~ setStringValue foo; setStringValue bar; checkStringValue bar
|
||||
|
||||
> ~ setStringValue foo
|
||||
|
||||
> checkStringValue foo
|
||||
|
||||
# All of the other tests have involved input tasks, so include commands with regular tasks as well.
|
||||
> ~; compile; setStringValue baz; checkStringValue baz
|
||||
# Ensure that trailing semi colons work
|
||||
> ~ compile; setStringValue baz; checkStringValue baz;
|
||||
Loading…
Reference in New Issue