Merge pull request #4664 from eatkins/m3-io-fixes

M3 io fixes
This commit is contained in:
eugene yokota 2019-05-12 18:24:00 -04:00 committed by GitHub
commit c1d2e703b6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 602 additions and 250 deletions

View File

@ -23,7 +23,7 @@ install:
- SET PATH=C:\sbt\sbt\bin;%PATH%
- SET SBT_OPTS=-XX:MaxPermSize=2g -Xmx4g -Dsbt.supershell=never -Dfile.encoding=UTF8
test_script:
- sbt "scripted actions/*" "testOnly sbt.ServerSpec"
- sbt "scripted actions/* classloader-cache/* nio/* watch/*" "testOnly sbt.ServerSpec"
cache:
- '%USERPROFILE%\.ivy2\cache'

View File

@ -20,6 +20,9 @@ final case class Reboot(
) extends xsbti.Reboot {
def arguments = argsList.toArray
}
case object Reload extends Exception
final case class ApplicationID(
groupID: String,
name: String,

View File

@ -37,6 +37,7 @@ import sbt.internal.librarymanagement.mavenint.{
SbtPomExtraProperties
}
import sbt.internal.librarymanagement.{ CustomHttp => _, _ }
import sbt.internal.nio.{ CheckBuildSources, Globs }
import sbt.internal.server.{
Definition,
LanguageServerProtocol,
@ -67,7 +68,7 @@ import sbt.librarymanagement.ivy._
import sbt.librarymanagement.syntax._
import sbt.nio.Watch
import sbt.nio.Keys._
import sbt.nio.file.FileTreeView
import sbt.nio.file.{ FileTreeView, Glob, RecursiveGlob }
import sbt.nio.file.syntax._
import sbt.std.TaskExtra._
import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint }
@ -151,13 +152,15 @@ object Defaults extends BuildCommon {
fileInputs :== Nil,
inputFileStamper :== sbt.nio.FileStamper.Hash,
outputFileStamper :== sbt.nio.FileStamper.LastModified,
watchForceTriggerOnAnyChange :== true,
onChangedBuildSource :== sbt.nio.Keys.WarnOnSourceChanges,
watchForceTriggerOnAnyChange :== false,
watchPersistFileStamps :== true,
watchTriggers :== Nil,
clean := { () },
sbt.nio.Keys.fileAttributeMap := {
sbt.nio.Keys.fileStampCache := {
state.value
.get(sbt.nio.Keys.persistentFileAttributeMap)
.getOrElse(new sbt.nio.Keys.FileAttributeMap)
.get(sbt.nio.Keys.persistentFileStampCache)
.getOrElse(new sbt.nio.FileStamp.Cache)
},
) ++ TaskRepository
.proxy(GlobalScope / classLoaderCache, ClassLoaderCache(4)) ++ globalIvyCore ++ globalJvmCore
@ -253,18 +256,10 @@ object Defaults extends BuildCommon {
outputStrategy :== None, // TODO - This might belong elsewhere.
buildStructure := Project.structure(state.value),
settingsData := buildStructure.value.data,
settingsData / fileInputs := {
val baseDir = file(".").getCanonicalFile
val sourceFilter = ("*.sbt" || "*.scala" || "*.java")
val projectDir = baseDir / "project"
Seq(
baseDir * "*.sbt",
projectDir * sourceFilter,
// We only want to recursively look in source because otherwise we have to search
// the project target directories which is expensive.
projectDir / "src" ** sourceFilter,
)
},
aggregate in checkBuildSources :== false,
checkBuildSources / Continuous.dynamicInputs := None,
checkBuildSources / fileInputs := CheckBuildSources.buildSourceFileInputs.value,
checkBuildSources := CheckBuildSources.needReloadImpl.value,
trapExit :== true,
connectInput :== false,
cancelable :== true,
@ -360,7 +355,6 @@ object Defaults extends BuildCommon {
watchStartMessage :== Watch.defaultStartWatch,
watchTasks := Continuous.continuousTask.evaluated,
aggregate in watchTasks :== false,
watchTrackMetaBuild :== true,
watchTriggeredMessage :== Watch.defaultOnTriggerMessage,
)
)
@ -419,8 +413,11 @@ object Defaults extends BuildCommon {
case NothingFilter | HiddenFileFilter => include
case exclude => include -- exclude
}
val baseSources = if (sourcesInBase.value) baseDirectory.value * filter :: Nil else Nil
unmanagedSourceDirectories.value.map(_ ** filter) ++ baseSources
val baseSources =
if (sourcesInBase.value) Globs(baseDirectory.value.toPath, recursive = false, filter) :: Nil
else Nil
unmanagedSourceDirectories.value
.map(d => Globs(d.toPath, recursive = true, filter)) ++ baseSources
},
unmanagedSources := (unmanagedSources / inputFileStamps).value.map(_._1.toFile),
managedSourceDirectories := Seq(sourceManaged.value),
@ -451,9 +448,9 @@ object Defaults extends BuildCommon {
case NothingFilter | HiddenFileFilter => include
case exclude => include -- exclude
}
unmanagedResourceDirectories.value.map(_ ** filter)
unmanagedResourceDirectories.value.map(d => Globs(d.toPath, recursive = true, filter))
},
unmanagedResources := (unmanagedResources / allInputFiles).value.map(_.toFile),
unmanagedResources := (unmanagedResources / inputFileStamps).value.map(_._1.toFile),
resourceGenerators :== Nil,
resourceGenerators += Def.task {
PluginDiscovery.writeDescriptors(discoveredSbtPlugins.value, resourceManaged.value)
@ -596,7 +593,7 @@ object Defaults extends BuildCommon {
compileInputsSettings
) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
clean := Clean.task(ThisScope, full = false).value,
fileOutputs := Seq(classDirectory.value ** "*.class"),
fileOutputs := Seq(Glob(classDirectory.value, RecursiveGlob / "*.class")),
compile := compileTask.value,
internalDependencyConfigurations := InternalDependencies.configurations.value,
manipulateBytecode := compileIncremental.value,
@ -651,10 +648,8 @@ object Defaults extends BuildCommon {
cleanKeepGlobs := historyPath.value.map(_.toGlob).toSeq,
clean := Def.taskDyn(Clean.task(resolvedScoped.value.scope, full = true)).value,
consoleProject := consoleProjectTask.value,
watchTransitiveSources := watchTransitiveSourcesTask.value,
watch := watchSetting.value,
transitiveDynamicInputs := SettingsGraph.task.value,
)
) ++ sbt.internal.DeprecatedContinuous.taskDefinitions
def generate(generators: SettingKey[Seq[Task[Seq[File]]]]): Initialize[Task[Seq[File]]] =
generators { _.join.map(_.flatten) }
@ -1251,7 +1246,7 @@ object Defaults extends BuildCommon {
): Initialize[Task[Seq[File]]] = Def.task {
val filter = include.toTask.value -- exclude.toTask.value
val view = fileTreeView.value
view.list(dirs.toTask.value.map(_ ** filter)).collect {
view.list(dirs.toTask.value.map(f => Globs(f.toPath, recursive = true, filter))).collect {
case (p, a) if !a.isDirectory => p.toFile
}
}
@ -1619,7 +1614,7 @@ object Defaults extends BuildCommon {
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
store.set(contents)
}
val map = sbt.nio.Keys.fileAttributeMap.value
val map = sbt.nio.Keys.fileStampCache.value
val analysis = analysisResult.analysis
import scala.collection.JavaConverters._
analysis.readStamps.getAllProductStamps.asScala.foreach {
@ -2574,16 +2569,16 @@ object Classpaths {
}
},
ivyConfiguration := InlineIvyConfiguration(
paths = ivyPaths.value,
lock = Option(lock(appConfiguration.value)),
log = Option(streams.value.log),
updateOptions = UpdateOptions(),
paths = Option(ivyPaths.value),
resolvers = externalResolvers.value.toVector,
otherResolvers = Vector.empty,
moduleConfigurations = Vector.empty,
lock = Option(lock(appConfiguration.value)),
checksums = checksums.value.toVector,
managedChecksums = false,
resolutionCacheDir = Some(crossTarget.value / "resolution-cache"),
updateOptions = UpdateOptions(),
log = streams.value.log
),
ivySbt := ivySbt0.value,
classifiersModule := classifiersModuleTask.value,
@ -3065,7 +3060,8 @@ object Classpaths {
Def.taskDyn {
val dirs = productDirectories.value
val view = fileTreeView.value
def containsClassFile(): Boolean = view.list(dirs.map(_ ** "*.class")).nonEmpty
def containsClassFile(): Boolean =
view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).nonEmpty
TrackLevel.intersection(track, exportToInternal.value) match {
case TrackLevel.TrackAlways =>
Def.task {

View File

@ -16,9 +16,10 @@ import sbt.Project.richInitializeTask
import sbt.Scope.Global
import sbt.internal.Aggregation.KeyValue
import sbt.internal.TaskName._
import sbt.internal.util._
import sbt.internal._
import sbt.internal.util._
import sbt.librarymanagement.{ Resolver, UpdateReport }
import sbt.nio.Keys.IgnoreSourceChanges
import sbt.std.Transform.DummyTaskMap
import sbt.util.{ Logger, Show }
@ -274,7 +275,7 @@ object EvaluateTask {
def injectSettings: Seq[Setting[_]] = Seq(
(state in Global) ::= dummyState,
(streamsManager in Global) ::= Def.dummyStreamsManager,
(executionRoots in Global) ::= dummyRoots
(executionRoots in Global) ::= dummyRoots,
)
@deprecated("Use variant which doesn't take a logger", "1.1.1")
@ -346,7 +347,7 @@ object EvaluateTask {
ExceptionCategory(ex) match {
case AlreadyHandled => ()
case m: MessageOnly => if (msg.isEmpty) log.error(m.message)
case f: Full => log.trace(f.exception)
case f: Full => if (f.exception != Reload) log.trace(f.exception)
}
}
@ -439,7 +440,7 @@ object EvaluateTask {
case Some(t: Task[_]) => transformNode(t).isEmpty
case _ => true
}
def run() = {
def run[R](s: State, toRun: Task[R], doShutdown: Boolean) = {
val x = new Execute[Task](
Execute.config(config.checkCycles, overwriteNode),
triggers,
@ -447,12 +448,12 @@ object EvaluateTask {
)(taskToNode)
val (newState, result) =
try {
val results = x.runKeep(root)(service)
storeValuesForPrevious(results, state, streams)
applyResults(results, state, root)
} catch { case inc: Incomplete => (state, Inc(inc)) } finally shutdown()
val results = x.runKeep(toRun)(service)
storeValuesForPrevious(results, s, streams)
applyResults(results, s, toRun)
} catch { case inc: Incomplete => (s, Inc(inc)) } finally if (doShutdown) shutdown()
val replaced = transformInc(result)
logIncResult(replaced, state, streams)
logIncResult(replaced, s, streams)
(newState, replaced)
}
object runningEngine extends RunningTaskEngine {
@ -466,8 +467,24 @@ object EvaluateTask {
// Register with our cancel handler we're about to start.
val strat = config.cancelStrategy
val cancelState = strat.onTaskEngineStart(runningEngine)
try run()
finally {
try {
(state.get(stateBuildStructure), state.get(sessionSettings)) match {
case (Some(structure), Some(settings)) =>
val extracted: Extracted = Project.extract(settings, structure)
if (extracted.get(sbt.nio.Keys.onChangedBuildSource) == IgnoreSourceChanges) {
run(state, root, doShutdown = true)
} else {
run(state, extracted.get(sbt.nio.Keys.checkBuildSources), doShutdown = false) match {
case (newState, r) =>
r.toEither match {
case Left(i) => (newState, Result.fromEither(Left(i)))
case _ => run(newState, root, doShutdown = true)
}
}
}
case _ => run(state, root, doShutdown = true)
}
} finally {
strat.onTaskEngineFinish(cancelState)
currentlyRunningEngine.set(null)
lastEvaluatedState.set(SafeState(state))

View File

@ -69,6 +69,7 @@ final class xMain extends xsbti.AppMain {
override def scalaProvider(): ScalaProvider = appProvider.scalaProvider
override def id(): xsbti.ApplicationID = appProvider.id()
override def loader(): ClassLoader = metaLoader
@deprecated("Implements deprecated api", "1.3.0")
override def mainClass(): Class[_ <: AppMain] = appProvider.mainClass()
override def entryPoint(): Class[_] = appProvider.entryPoint()
override def newMain(): AppMain = appProvider.newMain()
@ -266,7 +267,7 @@ object BuiltinCommands {
BasicCommands.multi,
act,
continuous,
clearCaches
clearCaches,
) ++ allBasicCommands
def DefaultBootCommands: Seq[String] =
@ -879,6 +880,7 @@ object BuiltinCommands {
val session = Load.initialSession(structure, eval, s0)
SessionSettings.checkSession(session, s)
registerGlobalCaches(Project.setProject(session, structure, s))
.put(sbt.nio.Keys.hasCheckedMetaBuild, new AtomicBoolean(false))
}
def registerCompilerCache(s: State): State = {

View File

@ -11,16 +11,15 @@ import java.io.PrintWriter
import java.util.Properties
import jline.TerminalFactory
import sbt.internal.langserver.ErrorCodes
import sbt.internal.util.{ ErrorHandling, GlobalLogBacking }
import sbt.io.{ IO, Using }
import sbt.protocol._
import sbt.util.Logger
import scala.annotation.tailrec
import scala.util.control.NonFatal
import sbt.io.{ IO, Using }
import sbt.internal.util.{ ErrorHandling, GlobalLogBacking }
import sbt.internal.langserver.ErrorCodes
import sbt.util.Logger
import sbt.protocol._
object MainLoop {
/** Entry point to run the remaining commands in State with managed global logging.*/
@ -140,7 +139,10 @@ object MainLoop {
case Right(s) => s
case Left(t: xsbti.FullReload) => throw t
case Left(t: RebootCurrent) => throw t
case Left(t) => state.handleError(t)
case Left(Reload) =>
val remaining = state.currentCommand.toList ::: state.remainingCommands
state.copy(remainingCommands = Exec("reload", None, None) :: remaining)
case Left(t) => state.handleError(t)
}
/** This is the main function State transfer function of the sbt command processing. */

View File

@ -29,7 +29,6 @@ import Keys.{
serverConnectionType,
fullServerHandlers,
logLevel,
watch
}
import Scope.{ Global, ThisScope }
import Def.{ Flattened, Initialize, ScopedKey, Setting }
@ -509,7 +508,6 @@ object Project extends ProjectExtra {
val history = get(historyPath) flatMap idFun
val prompt = get(shellPrompt)
val trs = (templateResolverInfos in Global get structure.data).toList.flatten
val watched = get(watch)
val startSvr: Option[Boolean] = get(autoStartServer)
val host: Option[String] = get(serverHost)
val port: Option[Int] = get(serverPort)
@ -524,7 +522,6 @@ object Project extends ProjectExtra {
)
val newAttrs =
s.attributes
.setCond(Watched.Configuration, watched)
.put(historyPath.key, history)
.setCond(autoStartServer.key, startSvr)
.setCond(serverPort.key, port)

View File

@ -10,20 +10,17 @@ package sbt
import java.io.File
import java.lang.reflect.Method
import sbt.Def._
import sbt.Keys._
import sbt.Project._
import sbt.internal.inc.ModuleUtilities
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.util.complete.{ DefaultParsers, Parser }
import sbt.io._
import sbt.io.syntax._
import sbt.internal.util.complete.{ Parser, DefaultParsers }
import sbt.librarymanagement._
import sbt.librarymanagement.syntax._
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.internal.inc.ModuleUtilities
import Def._
import Keys._
import Project._
import sbt.nio.file.{ Glob, RecursiveGlob }
object ScriptedPlugin extends AutoPlugin {
@ -89,7 +86,7 @@ object ScriptedPlugin extends AutoPlugin {
use(analysis, pub)
},
scripted := scriptedTask.evaluated,
watchTriggers in scripted += sbtTestDirectory.value ** AllPassFilter
watchTriggers in scripted += Glob(sbtTestDirectory.value, RecursiveGlob)
)
private[sbt] def scriptedTestsTask: Initialize[Task[AnyRef]] =

View File

@ -111,6 +111,9 @@ object Aggregation {
val complete = timedRun[T](s, ts, extra)
showRun(complete, show)
complete.results match {
case Inc(i) if i.directCause.contains(Reload) =>
val remaining = s.currentCommand.toList ::: s.remainingCommands
complete.state.copy(remainingCommands = Exec("reload", None, None) :: remaining)
case Inc(i) => complete.state.handleError(i)
case Value(_) => complete.state
}

View File

@ -14,7 +14,6 @@ import java.nio.file.{ DirectoryNotEmptyException, Files, Path }
import sbt.Def._
import sbt.Keys._
import sbt.Project.richInitializeTask
import sbt.io.AllPassFilter
import sbt.io.syntax._
import sbt.nio.Keys._
import sbt.nio.file._
@ -54,7 +53,7 @@ private[sbt] object Clean {
private[this] def cleanFilter(scope: Scope): Def.Initialize[Task[Path => Boolean]] = Def.task {
val excludes = (cleanKeepFiles in scope).value.map {
// This mimics the legacy behavior of cleanFilesTask
case f if f.isDirectory => f * AllPassFilter
case f if f.isDirectory => Glob(f, AnyPath)
case f => f.toGlob
} ++ (cleanKeepGlobs in scope).value
p: Path => excludes.exists(_.matches(p))

View File

@ -9,7 +9,7 @@ package sbt
package internal
import java.io.{ ByteArrayInputStream, InputStream, File => _ }
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger }
import sbt.BasicCommandStrings.{
ContinuousExecutePrefix,
@ -125,8 +125,10 @@ private[sbt] object Continuous extends DeprecatedContinuous {
private[sbt] val dynamicInputs = taskKey[Option[mutable.Set[DynamicInput]]](
"The input globs found during task evaluation that are used in watch."
)
private[sbt] def dynamicInputsImpl: Def.Initialize[Task[Option[mutable.Set[DynamicInput]]]] =
Def.task(Keys.state.value.get(DynamicInputs))
private[sbt] val DynamicInputs =
AttributeKey[mutable.Set[DynamicInput]](
"dynamic-inputs",
@ -136,6 +138,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
private[this] val continuousParser: State => Parser[(Int, String)] = {
def toInt(s: String): Int = Try(s.toInt).getOrElse(0)
// This allows us to re-enter the watch with the previous count.
val digitParser: Parser[Int] =
(Parsers.Space.* ~> matched(Parsers.Digit.+) <~ Parsers.Space.*).map(toInt)
@ -189,6 +192,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
watchSettings
)
}
private def getRepository(state: State): FileTreeRepository[FileAttributes] = {
lazy val exception =
new IllegalStateException("Tried to access FileTreeRepository for uninitialized state")
@ -265,13 +269,45 @@ private[sbt] object Continuous extends DeprecatedContinuous {
f(commands, s, valid, invalid)
}
private[this] def withCharBufferedStdIn[R](f: InputStream => R): R =
if (!Util.isWindows) {
val terminal = JLine.terminal
terminal.init()
terminal.setEchoEnabled(true)
f(terminal.wrapInIfNeeded(System.in))
} else f(System.in)
private[this] def withCharBufferedStdIn[R](f: InputStream => R): R = {
val terminal = JLine.terminal
terminal.init()
terminal.setEchoEnabled(true)
val wrapped = terminal.wrapInIfNeeded(System.in)
if (Util.isNonCygwinWindows) {
val inputStream: InputStream with AutoCloseable = new InputStream with AutoCloseable {
private[this] val buffer = new java.util.LinkedList[Int]
private[this] val closed = new AtomicBoolean(false)
private[this] val thread = new Thread("Continuous-input-stream-reader") {
setDaemon(true)
start()
@tailrec
override def run(): Unit = {
try {
if (!closed.get()) {
buffer.add(wrapped.read())
}
} catch {
case _: InterruptedException =>
}
if (!closed.get()) run()
}
}
override def available(): Int = buffer.size()
override def read(): Int = buffer.poll()
override def close(): Unit = if (closed.compareAndSet(false, true)) {
thread.interrupt()
}
}
try {
f(inputStream)
} finally {
inputStream.close()
}
} else {
f(wrapped)
}
}
private[sbt] def runToTermination(
state: State,
@ -287,17 +323,22 @@ private[sbt] object Continuous extends DeprecatedContinuous {
} else {
FileTreeRepository.default
}
val fileStampCache = new FileStamp.Cache
repo.addObserver(t => fileStampCache.invalidate(t.path))
try {
val stateWithRepo = state
.put(globalFileTreeRepository, repo)
.put(persistentFileAttributeMap, new sbt.nio.Keys.FileAttributeMap)
setup(stateWithRepo, command) { (commands, s, valid, invalid) =>
val stateWithRepo = state.put(globalFileTreeRepository, repo)
val fullState =
if (extracted.get(watchPersistFileStamps))
stateWithRepo.put(persistentFileStampCache, fileStampCache)
else stateWithRepo
setup(fullState, command) { (commands, s, valid, invalid) =>
EvaluateTask.withStreams(extracted.structure, s)(_.use(streams in Global) { streams =>
implicit val logger: Logger = streams.log
if (invalid.isEmpty) {
val currentCount = new AtomicInteger(count)
val configs = getAllConfigs(valid.map(v => v._1 -> v._2))
val callbacks = aggregate(configs, logger, in, s, currentCount, isCommand, commands)
val callbacks =
aggregate(configs, logger, in, s, currentCount, isCommand, commands, fileStampCache)
val task = () => {
currentCount.getAndIncrement()
// abort as soon as one of the tasks fails
@ -307,10 +348,16 @@ private[sbt] object Continuous extends DeprecatedContinuous {
callbacks.onEnter()
// Here we enter the Watched.watch state machine. We will not return until one of the
// state machine callbacks returns Watched.CancelWatch, Watched.Custom, Watched.HandleError
// or Watched.Reload. The task defined above will be run at least once. It will be run
// or Watched.ReloadException. The task defined above will be run at least once. It will be run
// additional times whenever the state transition callbacks return Watched.Trigger.
try {
val terminationAction = Watch(task, callbacks.onStart, callbacks.nextEvent)
terminationAction match {
case e: Watch.HandleUnexpectedError =>
System.err.println("Caught unexpected error running continuous build:")
e.throwable.printStackTrace(System.err)
case _ =>
}
callbacks.onTermination(terminationAction, command, currentCount.get(), state)
} finally {
callbacks.onExit()
@ -340,6 +387,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
case _ => Nil: Seq[ScopedKey[_]]
}
}
private def getAllConfigs(
inputs: Seq[(String, State)]
)(implicit extracted: Extracted, logger: Logger): Seq[Config] = {
@ -386,7 +434,8 @@ private[sbt] object Continuous extends DeprecatedContinuous {
state: State,
count: AtomicInteger,
isCommand: Boolean,
commands: Seq[String]
commands: Seq[String],
fileStampCache: FileStamp.Cache
)(
implicit extracted: Extracted
): Callbacks = {
@ -396,7 +445,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
val onStart: () => Watch.Action = getOnStart(project, commands, configs, rawLogger, count)
val nextInputEvent: () => Watch.Action = parseInputEvents(configs, state, inputStream, logger)
val (nextFileEvent, cleanupFileMonitor): (() => Option[(Watch.Event, Watch.Action)], () => Unit) =
getFileEvents(configs, rawLogger, state, count, commands)
getFileEvents(configs, rawLogger, state, count, commands, fileStampCache)
val nextEvent: () => Watch.Action =
combineInputAndFileEvents(nextInputEvent, nextFileEvent, logger)
val onExit = () => {
@ -454,41 +503,56 @@ private[sbt] object Continuous extends DeprecatedContinuous {
res
}
}
private def getFileEvents(
configs: Seq[Config],
logger: Logger,
state: State,
count: AtomicInteger,
commands: Seq[String]
commands: Seq[String],
fileStampCache: FileStamp.Cache
)(implicit extracted: Extracted): (() => Option[(Watch.Event, Watch.Action)], () => Unit) = {
val attributeMap = state.get(persistentFileAttributeMap).get
val trackMetaBuild = configs.forall(_.watchSettings.trackMetaBuild)
val buildGlobs =
if (trackMetaBuild) extracted.getOpt(fileInputs in settingsData).getOrElse(Nil)
if (trackMetaBuild) extracted.getOpt(fileInputs in checkBuildSources).getOrElse(Nil)
else Nil
val retentionPeriod = configs.map(_.watchSettings.antiEntropyRetentionPeriod).max
val quarantinePeriod = configs.map(_.watchSettings.deletionQuarantinePeriod).max
val onEvent: Event => Seq[(Watch.Event, Watch.Action)] = event => {
val path = event.path
def watchEvent(stamper: FileStamper, forceTrigger: Boolean): Option[Watch.Event] = {
val stamp = FileStamp(path, stamper)
if (!event.exists) {
attributeMap.remove(event.path) match {
Some(Deletion(event))
fileStampCache.remove(event.path) match {
case null => None
case _ => Some(Deletion(event))
}
} else {
import sbt.internal.inc.Stamp.equivStamp
attributeMap.put(path, stamp) match {
case null => Some(Creation(event))
case s =>
if (forceTrigger || !equivStamp.equiv(s.stamp, stamp.stamp))
fileStampCache.update(path, stamper) match {
case (None, Some(_)) => Some(Creation(event))
case (Some(_), None) => Some(Deletion(event))
case (Some(p), Some(c)) =>
if (forceTrigger) {
val msg =
s"Creating forced update event for path $path (previous stamp: $p, current stamp: $c)"
logger.debug(msg)
Some(Update(event))
else None
} else if (p == c) {
logger.debug(s"Dropping event for unmodified path $path")
None
} else {
val msg =
s"Creating update event for modified $path (previous stamp: $p, current stamp: $c)"
logger.debug(msg)
Some(Update(event))
}
case _ => None
}
}
}
if (buildGlobs.exists(_.matches(path))) {
watchEvent(FileStamper.Hash, forceTrigger = false).map(e => e -> Watch.Reload).toSeq
} else {
@ -519,13 +583,27 @@ private[sbt] object Continuous extends DeprecatedContinuous {
private implicit class WatchLogger(val l: Logger) extends sbt.internal.nio.WatchLogger {
override def debug(msg: Any): Unit = l.debug(msg.toString)
}
// TODO make this a normal monitor
private[this] val monitors: Seq[FileEventMonitor[Event]] =
configs.map { config =>
// Create a logger with a scoped key prefix so that we can tell from which
// monitor events occurred.
FileEventMonitor.antiEntropy(
getRepository(state),
new Observable[Event] {
private[this] val repo = getRepository(state)
private[this] val observers = new Observers[Event] {
override def onNext(t: Event): Unit =
if (config.inputs().exists(_.glob.matches(t.path))) super.onNext(t)
}
private[this] val handle = repo.addObserver(observers)
override def addObserver(observer: Observer[Event]): AutoCloseable =
observers.addObserver(observer)
override def close(): Unit = {
handle.close()
observers.close()
}
},
config.watchSettings.antiEntropy,
logger.withPrefix(config.key.show),
config.watchSettings.deletionQuarantinePeriod,
@ -543,11 +621,13 @@ private[sbt] object Continuous extends DeprecatedContinuous {
retentionPeriod
) :: Nil
} else Nil)
override def poll(duration: Duration, filter: Event => Boolean): Seq[Event] = {
val res = monitors.flatMap(_.poll(0.millis, filter)).toSet.toVector
if (res.isEmpty) Thread.sleep(duration.toMillis)
res
}
override def close(): Unit = monitors.foreach(_.close())
}
val watchLogger: WatchLogger = msg => logger.debug(msg.toString)
@ -640,7 +720,9 @@ private[sbt] object Continuous extends DeprecatedContinuous {
val parser = any ~> inputParser ~ matched(any)
// Each parser gets its own copy of System.in that it can modify while parsing.
val systemInBuilder = new StringBuilder
def inputStream(string: String): InputStream = new ByteArrayInputStream(string.getBytes)
// This string is provided in the closure below by reading from System.in
val default: String => Watch.Action =
string => parse(inputStream(string), systemInBuilder, parser)
@ -726,7 +808,9 @@ private[sbt] object Continuous extends DeprecatedContinuous {
*/
new Logger {
override def trace(t: => Throwable): Unit = logger.trace(t)
override def success(message: => String): Unit = logger.success(message)
override def log(level: Level.Value, message: => String): Unit = {
val levelString = if (level < delegateLevel) s"[$level] " else ""
val newMessage = s"[watch] $levelString$message"
@ -786,7 +870,8 @@ private[sbt] object Continuous extends DeprecatedContinuous {
val onTermination: Option[(Watch.Action, String, Int, State) => State] =
key.get(watchOnTermination)
val startMessage: StartMessage = getStartMessage(key)
val trackMetaBuild: Boolean = key.get(watchTrackMetaBuild).getOrElse(true)
val trackMetaBuild: Boolean =
key.get(onChangedBuildSource).fold(false)(_ == ReloadOnSourceChanges)
val triggerMessage: TriggerMessage = getTriggerMessage(key)
// Unlike the rest of the settings, InputStream is a TaskKey which means that if it is set,
@ -812,12 +897,15 @@ private[sbt] object Continuous extends DeprecatedContinuous {
) {
private[sbt] def watchState(count: Int): DeprecatedWatchState =
WatchState.empty(inputs().map(_.glob)).withCount(count)
def arguments(logger: Logger): Arguments = new Arguments(logger, inputs())
}
private def getStartMessage(key: ScopedKey[_])(implicit e: Extracted): StartMessage = Some {
lazy val default = key.get(watchStartMessage).getOrElse(Watch.defaultStartWatch)
key.get(deprecatedWatchingMessage).map(Left(_)).getOrElse(Right(default))
}
private def getTriggerMessage(
key: ScopedKey[_]
)(implicit e: Extracted): TriggerMessage = {
@ -926,9 +1014,12 @@ private[sbt] object Continuous extends DeprecatedContinuous {
*/
def withPrefix(prefix: String): Logger = new Logger {
override def trace(t: => Throwable): Unit = logger.trace(t)
override def success(message: => String): Unit = logger.success(message)
override def log(level: Level.Value, message: => String): Unit =
logger.log(level, s"$prefix - $message")
}
}
}

View File

@ -19,3 +19,9 @@ private[internal] trait DeprecatedContinuous {
protected val deprecatedWatchingMessage = sbt.Keys.watchingMessage
protected val deprecatedTriggeredMessage = sbt.Keys.triggeredMessage
}
private[sbt] object DeprecatedContinuous {
private[sbt] val taskDefinitions = Seq(
sbt.Keys.watchTransitiveSources := sbt.Defaults.watchTransitiveSourcesTask.value,
sbt.Keys.watch := sbt.Defaults.watchSetting.value,
)
}

View File

@ -12,11 +12,13 @@ import java.util.Optional
import sbt.Def
import sbt.Keys._
import sbt.internal.inc.{ EmptyStamp, ExternalLookup, Stamper }
import sbt.internal.inc.ExternalLookup
import sbt.internal.inc.Stamp.equivStamp.equiv
import sbt.io.syntax._
import sbt.nio.Keys._
import sbt.nio.file.RecursiveGlob
import sbt.nio.file.syntax._
import sbt.nio.{ FileStamp, FileStamper }
import xsbti.compile._
import xsbti.compile.analysis.Stamp
@ -25,37 +27,22 @@ import scala.collection.mutable
private[sbt] object ExternalHooks {
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
private[this] implicit class StampOps(val s: Stamp) extends AnyVal {
def hash: String = s.getHash.orElse("")
def lastModified: Long = s.getLastModified.orElse(-1L)
}
def default: Def.Initialize[sbt.Task[ExternalHooks]] = Def.task {
val attributeMap = fileAttributeMap.value
val cache = fileStampCache.value
val cp = dependencyClasspath.value.map(_.data)
cp.foreach { file =>
val path = file.toPath
attributeMap.get(path) match {
case null => attributeMap.put(path, sbt.nio.FileStamp.lastModified(path))
case _ =>
}
cache.getOrElseUpdate(path, FileStamper.LastModified)
}
val classGlob = classDirectory.value.toGlob / RecursiveGlob / "*.class"
fileTreeView.value.list(classGlob).foreach {
case (path, _) => attributeMap.put(path, sbt.nio.FileStamp.lastModified(path))
case (path, _) => cache.update(path, FileStamper.LastModified)
}
apply(
(compileOptions in compile).value,
(file: File) => {
attributeMap.get(file.toPath) match {
case null => EmptyStamp
case s => s.stamp
}
}
)
apply((compileOptions in compile).value, cache)
}
private def apply(
options: CompileOptions,
attributeMap: File => Stamp
fileStampCache: FileStamp.Cache
): DefaultExternalHooks = {
val lookup = new ExternalLookup {
override def changedSources(previousAnalysis: CompileAnalysis): Option[Changes[File]] = Some {
@ -70,16 +57,10 @@ private[sbt] object ExternalHooks {
previousAnalysis.readStamps().getAllSourceStamps.asScala
prevSources.foreach {
case (file: File, s: Stamp) =>
attributeMap(file) match {
case null =>
getRemoved.add(file)
case stamp =>
val hash = (if (stamp.getHash.isPresent) stamp else Stamper.forHash(file)).hash
if (hash == s.hash) {
getUnmodified.add(file)
} else {
getChanged.add(file)
}
fileStampCache.getOrElseUpdate(file.toPath, FileStamper.Hash) match {
case None => getRemoved.add(file)
case Some(stamp) =>
if (equiv(stamp.stamp, s)) getUnmodified.add(file) else getChanged.add(file)
}
}
options.sources.foreach(file => if (!prevSources.contains(file)) getAdded.add(file))
@ -98,8 +79,8 @@ private[sbt] object ExternalHooks {
override def changedBinaries(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
Some(previousAnalysis.readStamps.getAllBinaryStamps.asScala.flatMap {
case (file, stamp) =>
attributeMap(file) match {
case cachedStamp if stamp.getLastModified == cachedStamp.getLastModified => None
fileStampCache.get(file.toPath) match {
case Some(cachedStamp) if equiv(cachedStamp.stamp, stamp) => None
case _ =>
javaHome match {
case Some(h) if file.toPath.startsWith(h) => None
@ -112,9 +93,9 @@ private[sbt] object ExternalHooks {
override def removedProducts(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
Some(previousAnalysis.readStamps.getAllProductStamps.asScala.flatMap {
case (file, stamp) =>
attributeMap(file) match {
case s if s.getLastModified == stamp.getLastModified => None
case _ => Some(file)
fileStampCache.get(file.toPath) match {
case Some(s) if equiv(s.stamp, stamp) => None
case _ => Some(file)
}
}.toSet)
}

View File

@ -57,7 +57,7 @@ object IvyConsole {
depSettings
)
val newStructure = Load.reapply(session.original ++ append, structure, state.log)
val newStructure = Load.reapply(session.original ++ append, structure)
val newState = state.copy(remainingCommands = Exec(Keys.consoleQuick.key.label, None) :: Nil)
Project.setProject(session, newStructure, newState)
}

View File

@ -13,13 +13,20 @@ import org.apache.logging.log4j.core.{ LogEvent => XLogEvent }
import org.apache.logging.log4j.core.appender.AbstractAppender
import org.apache.logging.log4j.core.layout.PatternLayout
import org.apache.logging.log4j.core.async.RingBufferLogEvent
import org.apache.logging.log4j.core.config.Property
import sbt.util.Level
import sbt.internal.util._
import sbt.protocol.LogEvent
import sbt.internal.util.codec._
class RelayAppender(name: String)
extends AbstractAppender(name, null, PatternLayout.createDefaultLayout(), true) {
extends AbstractAppender(
name,
null,
PatternLayout.createDefaultLayout(),
true,
Property.EMPTY_ARRAY
) {
lazy val exchange = StandardMain.exchange
def append(event: XLogEvent): Unit = {

View File

@ -12,9 +12,9 @@ import sbt.Keys._
import sbt.Project.richInitializeTask
import sbt._
import sbt.internal.io.Source
import sbt.internal.nio.Globs
import sbt.internal.util.AttributeMap
import sbt.internal.util.complete.Parser
import sbt.io.syntax._
import sbt.nio.FileStamper
import sbt.nio.Keys._
import sbt.nio.file.Glob
@ -25,7 +25,7 @@ private[sbt] object SettingsGraph {
private implicit class SourceOps(val source: Source) {
def toGlob: Glob = {
val filter = source.includeFilter -- source.excludeFilter
if (source.recursive) source.base ** filter else source.base * filter
Globs.apply(source.base.toPath, source.recursive, filter)
}
}
private[sbt] def task: Def.Initialize[Task[Seq[DynamicInput]]] =
@ -106,7 +106,7 @@ private[sbt] object SettingsGraph {
.map { am =>
am.get(scopedKey.key) match {
case Some(globs: Seq[Glob]) =>
if (trigger) {
if (!trigger) {
val stamper = am.get(inputFileStamper.key).getOrElse(FileStamper.Hash)
val forceTrigger = am.get(watchForceTriggerOnAnyChange.key).getOrElse(false)
globs.map(g => DynamicInput(g, stamper, forceTrigger))

View File

@ -0,0 +1,65 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal.nio
import sbt.Keys.{ baseDirectory, state, streams }
import sbt.SlashSyntax0._
import sbt.io.syntax._
import sbt.nio.Keys._
import sbt.nio.file.{ ChangedFiles, Glob, RecursiveGlob }
private[sbt] object CheckBuildSources {
private[sbt] def needReloadImpl: Def.Initialize[Task[Unit]] = Def.task {
val logger = streams.value.log
val checkMetaBuildParam = state.value.get(hasCheckedMetaBuild)
val firstTime = checkMetaBuildParam.fold(true)(_.get == false)
(onChangedBuildSource in Scope.Global).value match {
case IgnoreSourceChanges => ()
case o =>
logger.debug("Checking for meta build source updates")
(changedInputFiles in checkBuildSources).value match {
case Some(cf: ChangedFiles) if !firstTime =>
val rawPrefix = s"Meta build source files have changed:\n" +
(if (cf.created.nonEmpty) s"creations: ${cf.created.mkString("\n ", " \n", "\n")}"
else "") +
(if (cf.deleted.nonEmpty) s"deletions: ${cf.deleted.mkString("\n ", " \n", "\n")}"
else "") +
(if (cf.updated.nonEmpty) s"updates: ${cf.updated.mkString("\n ", " \n", "\n")}"
else "")
val prefix = rawPrefix.linesIterator.filterNot(_.trim.isEmpty).mkString("\n")
if (o == ReloadOnSourceChanges) {
logger.info(s"$prefix\nReloading sbt...")
throw Reload
} else {
val tail = "Reload sbt with the 'reload' command to apply these changes. " +
"To automatically reload upon meta build source changed detection, set " +
"`Global / onChangedBuildSource := ReloadOnSourceChanges`. To disable this " +
"warning, set `Global / onChangedBuildSource := IgnoreSourceChanges`"
logger.warn(s"$prefix\n$tail")
}
case _ => ()
}
}
checkMetaBuildParam.foreach(_.set(true))
}
private[sbt] def buildSourceFileInputs: Def.Initialize[Seq[Glob]] = Def.setting {
if (onChangedBuildSource.value != IgnoreSourceChanges) {
val baseDir = (LocalRootProject / baseDirectory).value
val sourceFilter = "*.{sbt,scala,java}"
val projectDir = baseDir / "project"
Seq(
Glob(baseDir, "*.sbt"),
Glob(projectDir, sourceFilter),
// We only want to recursively look in source because otherwise we have to search
// the project target directories which is expensive.
Glob(projectDir / "src", RecursiveGlob / sourceFilter),
)
} else Nil
}
}

View File

@ -16,8 +16,6 @@ import sbt.nio.file.FileAttributes
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
import xsbti.compile.analysis.{ Stamp => XStamp }
import scala.util.Try
sealed trait FileStamper
object FileStamper {
case object Hash extends FileStamper
@ -36,12 +34,11 @@ private[sbt] object FileStamp {
}
}
private[sbt] val converter: (Path, FileAttributes) => Try[FileStamp] = (p, a) => Try(apply(p, a))
def apply(path: Path, fileStamper: FileStamper): FileStamp = fileStamper match {
def apply(path: Path, fileStamper: FileStamper): Option[FileStamp] = fileStamper match {
case FileStamper.Hash => hash(path)
case FileStamper.LastModified => lastModified(path)
}
def apply(path: Path, fileAttributes: FileAttributes): FileStamp =
def apply(path: Path, fileAttributes: FileAttributes): Option[FileStamp] =
try {
if (fileAttributes.isDirectory) lastModified(path)
else
@ -51,13 +48,21 @@ private[sbt] object FileStamp {
case _ => hash(path)
}
} catch {
case e: IOException => Error(e)
case e: IOException => Some(Error(e))
}
def hash(string: String): Hash = new FileHashImpl(sbt.internal.inc.Hash.unsafeFromString(string))
def hash(path: Path): Hash = new FileHashImpl(Stamper.forHash(path.toFile))
def lastModified(path: Path): LastModified = LastModified(IO.getModifiedTimeOrZero(path.toFile))
def hash(path: Path): Option[Hash] = Stamper.forHash(path.toFile) match {
case EmptyStamp => None
case s => Some(new FileHashImpl(s))
}
def lastModified(path: Path): Option[LastModified] = IO.getModifiedTimeOrZero(path.toFile) match {
case 0 => None
case l => Some(LastModified(l))
}
private[this] class FileHashImpl(val xstamp: XStamp) extends Hash(xstamp.getHash.orElse(""))
sealed abstract case class Hash private[sbt] (hex: String) extends FileStamp
final case class LastModified private[sbt] (time: Long) extends FileStamp
final case class Error(exception: IOException) extends FileStamp
implicit val pathJsonFormatter: JsonFormat[Seq[Path]] = new JsonFormat[Seq[Path]] {
override def write[J](obj: Seq[Path], builder: Builder[J]): Unit = {
@ -207,6 +212,65 @@ private[sbt] object FileStamp {
}
}
final case class LastModified private[sbt] (time: Long) extends FileStamp
final case class Error(exception: IOException) extends FileStamp
private implicit class EitherOps(val e: Either[FileStamp, FileStamp]) extends AnyVal {
def value: Option[FileStamp] = if (e == null) None else Some(e.fold(identity, identity))
}
private[sbt] class Cache {
private[this] val underlying = new java.util.HashMap[Path, Either[FileStamp, FileStamp]]
/**
* Invalidate the cache entry, but don't re-stamp the file until it's actually used
* in a call to get or update.
*
* @param path the file whose stamp we are invalidating
*/
def invalidate(path: Path): Unit = underlying.get(path) match {
case Right(s) =>
underlying.put(path, Left(s))
()
case _ => ()
}
def get(path: Path): Option[FileStamp] =
underlying.get(path) match {
case null => None
case Left(v) => updateImpl(path, fileStampToStamper(v))
case Right(v) => Some(v)
}
def getOrElseUpdate(path: Path, stamper: FileStamper): Option[FileStamp] =
underlying.get(path) match {
case null => updateImpl(path, stamper)
case Left(v) => updateImpl(path, stamper)
case Right(v) => Some(v)
}
def remove(key: Path): Option[FileStamp] = {
underlying.remove(key).value
}
def put(key: Path, fileStamp: FileStamp): Option[FileStamp] =
underlying.put(key, Right(fileStamp)) match {
case null => None
case e => e.value
}
def update(key: Path, stamper: FileStamper): (Option[FileStamp], Option[FileStamp]) = {
underlying.get(key) match {
case null => (None, updateImpl(key, stamper))
case v => (v.value, updateImpl(key, stamper))
}
}
private def fileStampToStamper(stamp: FileStamp): FileStamper = stamp match {
case _: Hash => FileStamper.Hash
case _ => FileStamper.LastModified
}
private def updateImpl(path: Path, stamper: FileStamper): Option[FileStamp] = {
val stamp = FileStamp(path, stamper)
stamp match {
case None => underlying.remove(path)
case Some(s) => underlying.put(path, Right(s))
}
stamp
}
}
}

View File

@ -9,6 +9,7 @@ package sbt.nio
import java.io.InputStream
import java.nio.file.Path
import java.util.concurrent.atomic.AtomicBoolean
import sbt.BuildSyntax.{ settingKey, taskKey }
import sbt.KeyRanks.{ BMinusSetting, DSetting, Invisible }
@ -22,6 +23,10 @@ import sbt.{ Def, InputKey, State, StateTransform }
import scala.concurrent.duration.FiniteDuration
object Keys {
sealed trait WatchBuildSourceOption
case object IgnoreSourceChanges extends WatchBuildSourceOption
case object WarnOnSourceChanges extends WatchBuildSourceOption
case object ReloadOnSourceChanges extends WatchBuildSourceOption
val allInputFiles =
taskKey[Seq[Path]]("All of the file inputs for a task excluding directories and hidden files.")
val changedInputFiles = taskKey[Option[ChangedFiles]]("The changed files for a task")
@ -44,10 +49,17 @@ object Keys {
val fileTreeView =
taskKey[FileTreeView.Nio[FileAttributes]]("A view of the local file system tree")
val checkBuildSources =
taskKey[Unit]("Check if any meta build sources have changed").withRank(DSetting)
// watch related settings
val watchAntiEntropyRetentionPeriod = settingKey[FiniteDuration](
"Wall clock Duration for which a FileEventMonitor will store anti-entropy events. This prevents spurious triggers when a task takes a long time to run. Higher values will consume more memory but make spurious triggers less likely."
).withRank(BMinusSetting)
val onChangedBuildSource = settingKey[WatchBuildSourceOption](
"Determines what to do if the sbt meta build sources have changed"
).withRank(DSetting)
val watchDeletionQuarantinePeriod = settingKey[FiniteDuration](
"Period for which deletion events will be quarantined. This is to prevent spurious builds when a file is updated with a rename which manifests as a file deletion followed by a file creation. The higher this value is set, the longer the delay will be between a file deletion and a build trigger but the less likely it is for a spurious trigger."
).withRank(DSetting)
@ -85,6 +97,9 @@ object Keys {
val watchOnTermination = settingKey[(Watch.Action, String, Int, State) => State](
"Transforms the state upon completion of a watch. The String argument is the command that was run during the watch. The Int parameter specifies how many times the command was run during the watch."
).withRank(DSetting)
val watchPersistFileStamps = settingKey[Boolean](
"Toggles whether or not the continuous build will reuse the file stamps computed in previous runs. Setting this to true decrease watch startup latency but could cause inconsistent results if many source files are concurrently modified."
).withRank(DSetting)
val watchStartMessage = settingKey[(Int, String, Seq[String]) => Option[String]](
"The message to show when triggered execution waits for sources to change. The parameters are the current watch iteration count, the current project name and the tasks that are being run with each build."
).withRank(DSetting)
@ -93,9 +108,6 @@ object Keys {
"watch",
"Watch a task (or multiple tasks) and rebuild when its file inputs change or user input is received. The semantics are more or less the same as the `~` command except that it cannot transform the state on exit. This means that it cannot be used to reload the build."
).withRank(DSetting)
val watchTrackMetaBuild = settingKey[Boolean](
s"Toggles whether or not changing the build files (e.g. **/*.sbt, project/**/*.{scala,java}) should automatically trigger a project reload"
).withRank(DSetting)
val watchTriggeredMessage = settingKey[(Int, Path, Seq[String]) => Option[String]](
"The message to show before triggered execution executes an action after sources change. The parameters are the path that triggered the build and the current watch iteration count."
).withRank(DSetting)
@ -116,8 +128,6 @@ object Keys {
taskKey[Seq[DynamicInput]]("The transitive inputs and triggers for a key").withRank(Invisible)
private[sbt] val dynamicFileOutputs =
taskKey[Seq[Path]]("The outputs of a task").withRank(Invisible)
private[sbt] val autoClean =
taskKey[Unit]("Automatically clean up a task returning file or path").withRank(Invisible)
private[sbt] val inputFileStamps =
taskKey[Seq[(Path, FileStamp)]]("Retrieves the hashes for a set of task input files")
@ -126,16 +136,25 @@ object Keys {
taskKey[Seq[(Path, FileStamp)]]("Retrieves the hashes for a set of task output files")
.withRank(Invisible)
private[sbt] type FileAttributeMap =
java.util.HashMap[Path, FileStamp]
private[sbt] val persistentFileAttributeMap =
AttributeKey[FileAttributeMap]("persistent-file-attribute-map", Int.MaxValue)
java.util.Map[Path, FileStamp]
private[sbt] val persistentFileStampCache =
AttributeKey[FileStamp.Cache]("persistent-file-stamp-cache", Int.MaxValue)
private[sbt] val allInputPathsAndAttributes =
taskKey[Seq[(Path, FileAttributes)]]("Get all of the file inputs for a task")
.withRank(Invisible)
private[sbt] val fileAttributeMap = taskKey[FileAttributeMap](
private[sbt] val fileStampCache = taskKey[FileStamp.Cache](
"Map of file stamps that may be cleared between task evaluation runs."
).withRank(Invisible)
private[sbt] val pathToFileStamp = taskKey[Path => FileStamp](
private[sbt] val pathToFileStamp = taskKey[Path => Option[FileStamp]](
"A function that computes a file stamp for a path. It may have the side effect of updating a cache."
).withRank(Invisible)
private[this] val hasCheckedMetaBuildMsg =
"Indicates whether or not we have called the checkBuildSources task. This is to avoid warning " +
"user about build source changes if the build sources were changed while sbt was shutdown. " +
" When that occurs, the previous cache reflects the state of the old build files, but by " +
" the time the checkBuildSources task has run, the build will have already been loaded with the " +
" new meta build sources so we should neither warn the user nor automatically restart the build"
private[sbt] val hasCheckedMetaBuild =
AttributeKey[AtomicBoolean]("has-checked-meta-build", hasCheckedMetaBuildMsg, Int.MaxValue)
}

View File

@ -142,8 +142,6 @@ private[sbt] object Settings {
case dynamicDependency.key => (dynamicDependency in scopedKey.scope := { () }) :: Nil
case transitiveClasspathDependency.key =>
(transitiveClasspathDependency in scopedKey.scope := { () }) :: Nil
case allInputFiles.key => allFilesImpl(scopedKey) :: Nil
case changedInputFiles.key => changedInputFilesImpl(scopedKey)
case changedOutputFiles.key =>
changedFilesImpl(scopedKey, changedOutputFiles, outputFileStamps)
case pathToFileStamp.key => stamper(scopedKey) :: Nil
@ -198,7 +196,7 @@ private[sbt] object Settings {
val inputs = (fileInputs in scopedKey.scope).value
val stamper = (inputFileStamper in scopedKey.scope).value
val forceTrigger = (watchForceTriggerOnAnyChange in scopedKey.scope).value
val dynamicInputs = Continuous.dynamicInputs.value
val dynamicInputs = (Continuous.dynamicInputs in scopedKey.scope).value
// This makes watch work by ensuring that the input glob is registered with the
// repository used by the watch process.
sbt.Keys.state.value.get(globalFileTreeRepository).foreach { repo =>
@ -206,7 +204,8 @@ private[sbt] object Settings {
}
dynamicInputs.foreach(_ ++= inputs.map(g => DynamicInput(g, stamper, forceTrigger)))
view.list(inputs)
}) :: fileStamps(scopedKey) :: allFilesImpl(scopedKey) :: Nil
}) :: fileStamps(scopedKey) :: allFilesImpl(scopedKey) :: Nil ++
changedInputFilesImpl(scopedKey)
}
private[this] val taskClass = classOf[Task[_]]
@ -320,8 +319,9 @@ private[sbt] object Settings {
private[sbt] def fileStamps(scopedKey: Def.ScopedKey[_]): Def.Setting[_] =
addTaskDefinition(Keys.inputFileStamps in scopedKey.scope := {
val stamper = (Keys.pathToFileStamp in scopedKey.scope).value
(Keys.allInputPathsAndAttributes in scopedKey.scope).value.collect {
case (p, a) if a.isRegularFile && !Files.isHidden(p) => p -> stamper(p)
(Keys.allInputPathsAndAttributes in scopedKey.scope).value.flatMap {
case (p, a) if a.isRegularFile && !Files.isHidden(p) => stamper(p).map(p -> _)
case _ => None
}
})
private[this] def outputsAndStamps[T: JsonFormat: ToSeqPath](
@ -341,11 +341,11 @@ private[sbt] object Settings {
})
private[this] def outputFileStampsImpl(scope: Scope): Def.Setting[_] =
addTaskDefinition(outputFileStamps in scope := {
val stamper: Path => FileStamp = (outputFileStamper in scope).value match {
val stamper: Path => Option[FileStamp] = (outputFileStamper in scope).value match {
case LastModified => FileStamp.lastModified
case Hash => FileStamp.hash
}
(allOutputFiles in scope).value.map(p => p -> stamper(p))
(allOutputFiles in scope).value.flatMap(p => stamper(p).map(p -> _))
})
/**
@ -357,18 +357,8 @@ private[sbt] object Settings {
*/
private[this] def stamper(scopedKey: Def.ScopedKey[_]): Def.Setting[_] =
addTaskDefinition((Keys.pathToFileStamp in scopedKey.scope) := {
val attributeMap = Keys.fileAttributeMap.value
val attributeMap = Keys.fileStampCache.value
val stamper = (Keys.inputFileStamper in scopedKey.scope).value
path: Path =>
attributeMap.get(path) match {
case null =>
val stamp = stamper match {
case Hash => FileStamp.hash(path)
case LastModified => FileStamp.lastModified(path)
}
attributeMap.put(path, stamp)
stamp
case s => s
}
path: Path => attributeMap.getOrElseUpdate(path, stamper)
})
}

View File

@ -110,7 +110,7 @@ object Watch {
override val occurredAt: FiniteDuration
) extends Event
with Event.Impl {
override def toString: String = s"Update(path, ${occurredAt.toEpochString})"
override def toString: String = s"Update($path, ${occurredAt.toEpochString})"
}
object Update {
def apply(event: FileEvent[FileAttributes]): Update =
@ -217,7 +217,7 @@ object Watch {
* Action that indicates that an error has occurred. The watch will be terminated when this action
* is produced.
*/
final class HandleError(val throwable: Throwable) extends CancelWatch {
sealed class HandleError(val throwable: Throwable) extends CancelWatch {
override def equals(o: Any): Boolean = o match {
case that: HandleError => this.throwable == that.throwable
case _ => false
@ -226,6 +226,15 @@ object Watch {
override def toString: String = s"HandleError($throwable)"
}
/**
* Action that indicates that an error has occurred. The watch will be terminated when this action
* is produced.
*/
private[sbt] final class HandleUnexpectedError(override val throwable: Throwable)
extends HandleError(throwable) {
override def toString: String = s"HandleUnexpectedError($throwable)"
}
/**
* Action that indicates that the watch should continue as though nothing happened. This may be
* because, for example, no user input was yet available.
@ -236,7 +245,7 @@ object Watch {
* Action that indicates that the watch should pause while the build is reloaded. This is used to
* automatically reload the project when the build files (e.g. build.sbt) are changed.
*/
case object Reload extends CancelWatch
private[sbt] case object Reload extends CancelWatch
/**
* Action that indicates that we should exit and run the provided command.
@ -279,7 +288,12 @@ object Watch {
def apply(task: () => Unit, onStart: NextAction, nextAction: NextAction): Watch.Action = {
def safeNextAction(delegate: NextAction): Watch.Action =
try delegate()
catch { case NonFatal(t) => new HandleError(t) }
catch {
case NonFatal(t) =>
System.err.println(s"Watch caught unexpected error:")
t.printStackTrace(System.err)
new HandleError(t)
}
@tailrec def next(): Watch.Action = safeNextAction(nextAction) match {
// This should never return Ignore due to this condition.
case Ignore => next()
@ -379,11 +393,10 @@ object Watch {
private[this] val options = {
val enter = "<enter>"
val newLine = if (Util.isWindows) enter else ""
val opts = Seq(
s"$enter: return to the shell",
s"'r$newLine': repeat the current command",
s"'x$newLine': exit sbt"
s"'r': repeat the current command",
s"'x': exit sbt"
)
s"Options:\n${opts.mkString(" ", "\n ", "")}"
}

View File

@ -15,10 +15,10 @@ import org.scalatest.{ FlatSpec, Matchers }
import sbt.WatchSpec._
import sbt.internal.nio.{ FileEvent, FileEventMonitor, FileTreeRepository }
import sbt.io._
import sbt.io.syntax._
import sbt.nio.Watch
import sbt.nio.Watch.{ NullLogger, _ }
import sbt.nio.file.{ FileAttributes, Glob }
import sbt.nio.file.{ FileAttributes, Glob, RecursiveGlob }
import sbt.nio.file.syntax._
import sbt.util.Logger
import scala.collection.mutable
@ -84,13 +84,13 @@ class WatchSpec extends FlatSpec with Matchers {
}
"Watch" should "stop" in IO.withTemporaryDirectory { dir =>
val task = new Task
watch(task, TestDefaults.callbacks(inputs = Seq(dir.toRealPath ** AllPassFilter))) shouldBe CancelWatch
watch(task, TestDefaults.callbacks(inputs = Seq(dir.toRealPath.toGlob / RecursiveGlob))) shouldBe CancelWatch
}
it should "trigger" in IO.withTemporaryDirectory { dir =>
val triggered = new AtomicBoolean(false)
val task = new Task
val callbacks = TestDefaults.callbacks(
inputs = Seq(dir.toRealPath ** AllPassFilter),
inputs = Seq(dir.toRealPath.toGlob / RecursiveGlob),
onStartWatch = () => if (task.getCount == 2) CancelWatch else Ignore,
onWatchEvent = _ => { triggered.set(true); Trigger },
watchingMessage = () => {
@ -107,7 +107,7 @@ class WatchSpec extends FlatSpec with Matchers {
val bar = realDir.toPath.resolve("bar")
val task = new Task
val callbacks = TestDefaults.callbacks(
inputs = Seq(realDir ** AllPassFilter),
inputs = Seq(realDir.toGlob / RecursiveGlob),
onStartWatch = () => if (task.getCount == 2) CancelWatch else Ignore,
onWatchEvent = e => if (e.path == foo) Trigger else Ignore,
triggeredMessage = e => { queue += e.path; None },
@ -126,7 +126,7 @@ class WatchSpec extends FlatSpec with Matchers {
val bar = realDir.toPath.resolve("bar")
val task = new Task
val callbacks = TestDefaults.callbacks(
inputs = Seq(realDir ** AllPassFilter),
inputs = Seq(realDir.toGlob / RecursiveGlob),
onStartWatch = () => if (task.getCount == 3) CancelWatch else Ignore,
onWatchEvent = e => if (e.path != realDir.toPath) Trigger else Ignore,
triggeredMessage = e => { queue += e.path; None },
@ -148,19 +148,19 @@ class WatchSpec extends FlatSpec with Matchers {
val exception = new IllegalStateException("halt")
val task = new Task { override def apply(): Unit = throw exception }
val callbacks = TestDefaults.callbacks(
Seq(dir.toRealPath ** AllPassFilter),
Seq(dir.toRealPath.toGlob / RecursiveGlob),
)
watch(task, callbacks) shouldBe new HandleError(exception)
}
it should "reload" in IO.withTemporaryDirectory { dir =>
val task = new Task
val callbacks = TestDefaults.callbacks(
inputs = Seq(dir.toRealPath ** AllPassFilter),
inputs = Seq(dir.toRealPath.toGlob / RecursiveGlob),
onStartWatch = () => Ignore,
onWatchEvent = _ => Reload,
onWatchEvent = _ => Watch.Reload,
watchingMessage = () => { new File(dir, "file").createNewFile(); None }
)
watch(task, callbacks) shouldBe Reload
watch(task, callbacks) shouldBe Watch.Reload
}
}

View File

@ -45,6 +45,6 @@ class FileStampJsonSpec extends FlatSpec {
val both: Seq[(Path, FileStamp)] = hashes ++ lastModifiedTimes
val json = Converter.toJsonUnsafe(both)(fileStampJsonFormatter)
val deserialized = Converter.fromJsonUnsafe(json)(fileStampJsonFormatter)
assert(both.sameElements(deserialized))
assert(both == deserialized)
}
}

View File

@ -9,7 +9,7 @@ object Dependencies {
val baseScalaVersion = scala212
// sbt modules
private val ioVersion = "1.3.0-M9"
private val ioVersion = "1.3.0-M10"
private val utilVersion = "1.3.0-M6"
private val lmVersion =
sys.props.get("sbt.build.lm.version") match {

View File

@ -5,6 +5,6 @@ cleanKeepFiles ++= Seq(
target.value / "keepfile"
)
cleanKeepGlobs += target.value / "keepdir" ** AllPassFilter
cleanKeepGlobs += target.value.toGlob / "keepdir" / **
// This is necessary because recursive globs do not include the base directory.
cleanKeepGlobs += Glob(target.value / "keepdir")

View File

@ -2,8 +2,8 @@ import sbt.nio.Keys._
val foo = taskKey[Unit]("foo")
foo / fileInputs := Seq(
(baseDirectory.value / "base").toGlob / "*.md",
(baseDirectory.value / "base").toGlob / "*.txt",
baseDirectory.value.toGlob / "base" / "*.md",
baseDirectory.value.toGlob / "base" / "*.txt",
)
val checkModified = taskKey[Unit]("check that modified files are returned")

View File

@ -3,7 +3,7 @@
// Check that we can correctly extract Foo.txt with a recursive source
val foo = taskKey[Seq[File]]("Retrieve Foo.txt")
foo / fileInputs += baseDirectory.value ** "*.txt"
foo / fileInputs += baseDirectory.value.toGlob / ** / "*.txt"
foo := (foo / allInputFiles).value.map(_.toFile)
@ -14,24 +14,24 @@ checkFoo := assert(foo.value == Seq(baseDirectory.value / "base/subdir/nested-su
// Check that we can correctly extract Bar.md with a non-recursive source
val bar = taskKey[Seq[File]]("Retrieve Bar.md")
bar / fileInputs += baseDirectory.value / "base/subdir/nested-subdir" * "*.md"
bar / fileInputs += baseDirectory.value.toGlob / "base" / "subdir" / "nested-subdir" / "*.md"
bar := (bar / allInputFiles).value.map(_.toFile)
val checkBar = taskKey[Unit]("Check that the Bar.md file is retrieved")
checkBar := assert(bar.value == Seq(baseDirectory.value / "base/subdir/nested-subdir/Bar.md"))
checkBar := assert(bar.value == Seq(baseDirectory.value / "base" / "subdir" / "nested-subdir" / "Bar.md"))
// Check that we can correctly extract Bar.md and Foo.md with a non-recursive source
val all = taskKey[Seq[File]]("Retrieve all files")
all / fileInputs += baseDirectory.value / "base" / "subdir" / "nested-subdir" * AllPassFilter
all / fileInputs += baseDirectory.value.toGlob / "base" / "subdir" / "nested-subdir" / *
val checkAll = taskKey[Unit]("Check that the Bar.md file is retrieved")
checkAll := {
import sbt.dsl.LinterLevel.Ignore
val expected = Set("Foo.txt", "Bar.md").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
val expected = Set("Foo.txt", "Bar.md").map(baseDirectory.value / "base" / "subdir" / "nested-subdir" / _)
val actual = (all / allInputFiles).value.map(_.toFile).toSet
assert(actual == expected)
}
@ -39,8 +39,8 @@ checkAll := {
val set = taskKey[Seq[File]]("Specify redundant sources in a set")
set / fileInputs ++= Seq(
baseDirectory.value / "base" ** -DirectoryFilter,
baseDirectory.value / "base" / "subdir" / "nested-subdir" * -DirectoryFilter
baseDirectory.value.toGlob / "base" / **,
baseDirectory.value.toGlob / "base" / "subdir" / "nested-subdir" / *
)
val depth = taskKey[Seq[File]]("Specify redundant sources with limited depth")

View File

@ -21,7 +21,7 @@ object Build {
(Compile / unmanagedResources / fileInputs).value,
Test / cached / fileInputs := (Test / unmanagedSources / fileInputs).value ++
(Test / unmanagedResources / fileInputs).value,
Compile / newInputs / fileInputs += baseDirectory.value * "*.sc",
Compile / newInputs / fileInputs += baseDirectory.value.toGlob / "*.sc",
Compile / unmanagedSources / fileInputs ++= (Compile / newInputs / fileInputs).value,
checkCompile := {
val actual = (Compile / compile / transitiveDynamicInputs).value.map(_.glob).toSet

View File

@ -0,0 +1,4 @@
val foo = taskKey[Unit]("working task")
foo := { println("foo") }
Global / onChangedBuildSource := ReloadOnSourceChanges

View File

@ -0,0 +1,4 @@
val foo = taskKey[Unit]("broken task")
foo := { throw new IllegalStateException("foo") }
Global / onChangedBuildSource := ReloadOnSourceChanges

View File

@ -0,0 +1,4 @@
val foo = taskKey[Unit]("working task")
foo := { println("foo") }
Global / onChangedBuildSource := ReloadOnSourceChanges

View File

@ -0,0 +1,9 @@
> foo
$ copy-file changes/broken.sbt build.sbt
-> foo
$ copy-file changes/working.sbt build.sbt
> foo

View File

@ -8,6 +8,6 @@ setStringValue := setStringValueImpl.evaluated
checkStringValue := checkStringValueImpl.evaluated
setStringValue / watchTriggers := baseDirectory.value * "string.txt" :: Nil
setStringValue / watchTriggers := baseDirectory.value.toGlob / "string.txt" :: Nil
watchOnFileInputEvent := { (_, _) => sbt.nio.Watch.CancelWatch }

View File

@ -18,7 +18,7 @@ object Build {
}
lazy val foo = project.settings(
watchStartMessage := { (count: Int, _, _) => Some(s"FOO $count") },
Compile / compile / watchTriggers += baseDirectory.value * "foo.txt",
Compile / compile / watchTriggers += baseDirectory.value.toGlob / "foo.txt",
Compile / compile / watchStartMessage := { (count: Int, _, _) =>
// this checks that Compile / compile / watchStartMessage
// is preferred to Compile / watchStartMessage
@ -35,7 +35,9 @@ object Build {
checkStringValue := checkStringValueImpl.evaluated,
watchOnFileInputEvent := { (_, _) => Watch.CancelWatch }
)
lazy val bar = project.settings(fileInputs in setStringValue += baseDirectory.value * "foo.txt")
lazy val bar = project.settings(
fileInputs in setStringValue += baseDirectory.value.toGlob / "foo.txt"
)
lazy val root = (project in file(".")).aggregate(foo, bar).settings(
watchOnFileInputEvent := { (_, _) => Watch.CancelWatch }
)

View File

@ -23,7 +23,7 @@ object Build {
}
lazy val root = (project in file(".")).settings(
reloadFile := baseDirectory.value / "reload",
foo / fileInputs += baseDirectory.value * "foo.txt",
foo / fileInputs += baseDirectory.value.toGlob / "foo.txt",
foo := (foo / allInputFiles).value,
setStringValue := Def.taskDyn {
// This hides foo / fileInputs from the input graph

View File

@ -4,4 +4,4 @@ val root = Build.root
val foo = Build.foo
val bar = Build.bar
Global / watchTriggers += baseDirectory.value * "baz.txt"
Global / watchTriggers += baseDirectory.value.toGlob / "baz.txt"

View File

@ -46,14 +46,14 @@ object Build {
},
checkTriggers := {
val actual = triggers((Compile / compile / transitiveDynamicInputs).value).toSet
val base = baseDirectory.value.getParentFile
val base = baseDirectory.value.getParentFile.toGlob
// This checks that since foo depends on bar there is a transitive trigger generated
// for the "bar.txt" trigger added to bar / Compile / unmanagedResources (which is a
// transitive dependency of
val expected: Set[Glob] = Set(base * "baz.txt", (base / "bar") * "bar.txt")
val expected: Set[Glob] = Set(base / "baz.txt", base / "bar" / "bar.txt")
assert(actual == expected)
},
Test / test / watchTriggers += (baseDirectory.value / "test.txt").toGlob,
Test / test / watchTriggers += baseDirectory.value.toGlob / "test.txt",
Test / checkTriggers := {
val testTriggers = triggers((Test / test / transitiveDynamicInputs).value).toSet
// This validates that since the "test.txt" trigger is only added to the Test / test task,
@ -61,34 +61,34 @@ object Build {
// are found in the test above for the compile configuration because of the transitive
// classpath dependency that is added in Defaults.internalDependencies.
val compileTriggers = triggers((Test / compile / transitiveDynamicInputs).value).toSet
val base = baseDirectory.value.getParentFile
val base = baseDirectory.value.getParentFile.toGlob
val expected: Set[Glob] =
Set(base * "baz.txt", (base / "bar") * "bar.txt", (base / "foo") * "test.txt")
Set(base / "baz.txt", base / "bar" / "bar.txt", base / "foo" / "test.txt")
assert(testTriggers == expected)
assert((testTriggers - ((base / "foo") * "test.txt")) == compileTriggers)
assert((testTriggers - (base / "foo" / "test.txt")) == compileTriggers)
},
)
.dependsOn(bar)
lazy val bar = project.settings(
fileInputs in setStringValue += baseDirectory.value * "foo.txt",
setStringValue / watchTriggers += baseDirectory.value * "bar.txt",
fileInputs in setStringValue += baseDirectory.value.toGlob / "foo.txt",
setStringValue / watchTriggers += baseDirectory.value.toGlob / "bar.txt",
// This trigger should transitively propagate to foo / compile and foo / Test / compile
Compile / unmanagedResources / watchTriggers += baseDirectory.value * "bar.txt",
Compile / unmanagedResources / watchTriggers += baseDirectory.value.toGlob / "bar.txt",
checkTriggers := {
val base = baseDirectory.value.getParentFile
val base = baseDirectory.value.getParentFile.toGlob
val actual = triggers((Compile / compile / transitiveDynamicInputs).value).toSet
val expected: Set[Glob] = Set((base / "bar") * "bar.txt", base * "baz.txt")
val expected: Set[Glob] = Set(base / "bar" / "bar.txt", base / "baz.txt")
assert(actual == expected)
},
// This trigger should not transitively propagate to any foo task
Test / unmanagedResources / watchTriggers += baseDirectory.value * "bar-test.txt",
Test / unmanagedResources / watchTriggers += baseDirectory.value.toGlob / "bar-test.txt",
Test / checkTriggers := {
val testTriggers = triggers((Test / test / transitiveDynamicInputs).value).toSet
val compileTriggers = triggers((Test / compile / transitiveDynamicInputs).value).toSet
val base = baseDirectory.value.getParentFile
val base = baseDirectory.value.getParentFile.toGlob
val expected: Set[Glob] =
Set(base * "baz.txt", (base / "bar") * "bar.txt", (base / "bar") * "bar-test.txt")
Set(base / "baz.txt", base / "bar" / "bar.txt", base / "bar" / "bar-test.txt")
assert(testTriggers == expected)
assert(testTriggers == compileTriggers)
},
@ -101,7 +101,7 @@ object Build {
},
checkTriggers := {
val actual = triggers((Compile / compile / transitiveDynamicInputs).value)
val expected: Seq[Glob] = baseDirectory.value * "baz.txt" :: Nil
val expected: Seq[Glob] = baseDirectory.value.toGlob / "baz.txt" :: Nil
assert(actual == expected)
},
)

View File

@ -0,0 +1,36 @@
import java.nio.file._
import sbt.nio.Keys._
import sbt.nio._
import scala.concurrent.duration._
import StandardCopyOption.{ REPLACE_EXISTING => replace }
watchTriggeredMessage := { (i, path: Path, c) =>
val prev = watchTriggeredMessage.value
if (path.getFileName.toString == "C.scala")
throw new IllegalStateException("C.scala should not trigger")
prev(i, path, c)
}
watchOnIteration := { i: Int =>
val base = baseDirectory.value.toPath
val src =
base.resolve("src").resolve("main").resolve("scala").resolve("sbt").resolve("test")
val changes = base.resolve("changes")
Files.copy(changes.resolve("C.scala"), src.resolve("C.scala"), replace)
if (i < 5) {
val content =
new String(Files.readAllBytes(changes.resolve("A.scala"))) + "\n" + ("//" * i)
Files.write(src.resolve("A.scala"), content.getBytes)
} else {
Files.copy(changes.resolve("B.scala"), src.resolve("B.scala"), replace)
}
println(s"Waiting for changes...")
Watch.Ignore
}
watchOnFileInputEvent := { (_, event: Watch.Event) =>
if (event.path.getFileName.toString == "B.scala") Watch.CancelWatch
else Watch.Trigger
}
watchAntiEntropy := 0.millis

View File

@ -0,0 +1,3 @@
package sbt.test
class A

View File

@ -0,0 +1,3 @@
package sbt.test
class B

View File

@ -0,0 +1,3 @@
package sbt.test
class C

View File

@ -0,0 +1,3 @@
package sbt.test
class A {

View File

@ -0,0 +1,3 @@
package sbt.test
class B {

View File

@ -0,0 +1,3 @@
package sbt.test
class C

View File

@ -0,0 +1,3 @@
> ~compile
> compile

View File

@ -5,7 +5,7 @@ val resetCount = taskKey[Unit]("reset compile count")
checkCount := {
val expected = Def.spaceDelimited().parsed.head.toInt
if (Count.get != expected)
throw new IllegalStateException(s"Expected ${expected} compilation runs, got ${Count.get}")
throw new IllegalStateException(s"Expected $expected compilation runs, got ${Count.get}")
}
resetCount := {
@ -16,9 +16,4 @@ failingTask := {
throw new IllegalStateException("failed")
}
Compile / compile := {
Count.increment()
// Trigger a new build by updating the last modified time
((Compile / scalaSource).value / "A.scala").setLastModified(5000)
(Compile / compile).value
}
onChangedBuildSource := ReloadOnSourceChanges

View File

@ -2,3 +2,11 @@ val checkReloaded = taskKey[Unit]("Asserts that the build was reloaded")
checkReloaded := { () }
watchOnIteration := { _ => sbt.nio.Watch.CancelWatch }
Compile / compile := {
Count.increment()
// Trigger a new build by updating the last modified time
val file = (Compile / scalaSource).value / "A.scala"
IO.write(file, IO.read(file) + ("\n" * Count.get))
(Compile / compile).value
}

View File

@ -1 +1,7 @@
watchOnIteration := { _ => sbt.nio.Watch.Reload }
Compile / compile := {
Count.increment()
// Trigger a new build by updating the last modified time
val extra = baseDirectory.value / "extra.sbt"
IO.copyFile(baseDirectory.value / "changes" / "extra.sbt", extra, CopyOptions().withOverwrite(true))
(Compile / compile).value
}

View File

@ -1,6 +1,3 @@
# verify that reloading occurs if watchOnStart returns Watch.Reload
$ copy-file changes/extra.sbt extra.sbt
> ~compile
> checkReloaded

View File

@ -19,7 +19,7 @@ object Build {
}
lazy val root = (project in file(".")).settings(
reloadFile := baseDirectory.value / "reload",
setStringValue / watchTriggers += baseDirectory.value * "foo.txt",
setStringValue / watchTriggers += baseDirectory.value.toGlob / "foo.txt",
setStringValue := setStringValueImpl.evaluated,
checkStringValue := checkStringValueImpl.evaluated,
watchOnFileInputEvent := { (_, _) => Watch.CancelWatch },

View File

@ -17,7 +17,7 @@ object Build {
assert(IO.read(file(stringFile)) == string)
}
lazy val root = (project in file(".")).settings(
setStringValue / watchTriggers += baseDirectory.value * "foo.txt",
setStringValue / watchTriggers += baseDirectory.value.toGlob / "foo.txt",
setStringValue := setStringValueImpl.evaluated,
checkStringValue := checkStringValueImpl.evaluated,
watchStartMessage := { (_, _, _) =>

View File

@ -19,7 +19,7 @@ object Build {
}
lazy val root = (project in file(".")).settings(
reloadFile := baseDirectory.value / "reload",
setStringValue / watchTriggers += baseDirectory.value * "foo.txt",
setStringValue / watchTriggers += baseDirectory.value.toGlob / "foo.txt",
setStringValue := setStringValueImpl.evaluated,
checkStringValue := checkStringValueImpl.evaluated,
watchStartMessage := { (_, _, _) =>

View File

@ -129,13 +129,16 @@ final class ScriptedTests(
case s => s
}
val (launcherBasedTests, runFromSourceBasedTests) = labelsAndDirs.partition {
case (testName, _) =>
determineRemoteSbtCreatorKind(testName) match {
case RemoteSbtCreatorKind.LauncherBased => true
case RemoteSbtCreatorKind.RunFromSourceBased => false
}
}
val (launcherBasedTestsUnfiltered, runFromSourceBasedTestsUnfiltered) =
labelsAndDirs.partition {
case (testName, _) =>
determineRemoteSbtCreatorKind(testName) match {
case RemoteSbtCreatorKind.LauncherBased => true
case RemoteSbtCreatorKind.RunFromSourceBased => false
}
}
val launcherBasedTests = launcherBasedTestsUnfiltered.filterNot(windowsExclude)
val runFromSourceBasedTests = runFromSourceBasedTestsUnfiltered.filterNot(windowsExclude)
def logTests(size: Int, how: String) =
log.info(
@ -163,6 +166,17 @@ final class ScriptedTests(
}
}
private[this] val windowsExclude: (((String, String), File)) => Boolean =
if (scala.util.Properties.isWin) {
case (testName, _) =>
testName match {
case ("classloader-cache", "jni") => true // no native lib is built for windows
case ("classloader-cache", "snapshot") =>
true // the test overwrites a jar that is being used which is verboten in windows
case ("nio", "make-clone") => true // uses gcc which isn't set up on all systems
case _ => false
}
} else _ => false
private def determineRemoteSbtCreatorKind(testName: (String, String)): RemoteSbtCreatorKind = {
import RemoteSbtCreatorKind._
val (group, name) = testName