Merge pull request #4627 from eatkins/WIP-file-inputs

Add support for managed task inputs
This commit is contained in:
eugene yokota 2019-05-02 22:03:42 -04:00 committed by GitHub
commit 5aeaa2981a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
114 changed files with 2321 additions and 1468 deletions

View File

@ -718,6 +718,20 @@ lazy val sbtIgnoredProblems = {
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$WatchSource_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.WatchSource"),
exclude[ReversedMissingMethodProblem]("sbt.Import.AnyPath"),
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$**_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$*_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$ChangedFiles_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$AnyPath_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$Glob_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$RecursiveGlob_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.sbt$Import$_setter_$RelativeGlob_="),
exclude[ReversedMissingMethodProblem]("sbt.Import.*"),
exclude[ReversedMissingMethodProblem]("sbt.Import.**"),
exclude[ReversedMissingMethodProblem]("sbt.Import.ChangedFiles"),
exclude[ReversedMissingMethodProblem]("sbt.Import.RecursiveGlob"),
exclude[ReversedMissingMethodProblem]("sbt.Import.Glob"),
exclude[ReversedMissingMethodProblem]("sbt.Import.RelativeGlob"),
// Dropped in favour of kind-projector's polymorphic lambda literals
exclude[DirectMissingMethodProblem]("sbt.Import.Param"),
exclude[DirectMissingMethodProblem]("sbt.package.Param"),

View File

@ -1,37 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util.appmacro
import scala.reflect.macros.blackbox
object MacroDefaults {
/**
* Macro to generated default file tree repository. It must be defined as an untyped tree because
* sbt.Keys is not available in this project. This is meant for internal use only, but must be
* public because its a macro.
* @param c the macro context
* @return the tree expressing the default file tree repository.
*/
def fileTreeRepository(c: blackbox.Context): c.Tree = {
import c.universe._
q"sbt.Keys.fileTreeRepository.value: @sbtUnchecked"
}
/**
* Macro to generated default file tree repository. It must be defined as an untyped tree because
* sbt.Keys is not available in this project. This is meant for internal use only, but must be
* public because its a macro.
* @param c the macro context
* @return the tree expressing the default file tree repository.
*/
def dynamicInputs(c: blackbox.Context): c.Tree = {
import c.universe._
q"sbt.internal.Continuous.dynamicInputs.value: @sbtUnchecked"
}
}

View File

@ -12,6 +12,7 @@ import jline.console.history.{ FileHistory, MemoryHistory }
import java.io.{ File, FileDescriptor, FileInputStream, FilterInputStream, InputStream }
import complete.Parser
import jline.Terminal
import scala.concurrent.duration._
import scala.annotation.tailrec
@ -119,7 +120,7 @@ private[sbt] object JLine {
// When calling this, ensure that enableEcho has been or will be called.
// TerminalFactory.get will initialize the terminal to disable echo.
private[sbt] def terminal = jline.TerminalFactory.get
private[sbt] def terminal: Terminal = jline.TerminalFactory.get
private def withTerminal[T](f: jline.Terminal => T): T =
synchronized {

View File

@ -8,8 +8,8 @@
package sbt
import sbt.Tests.{ Output, Summary }
import sbt.util.{ Level, Logger }
import sbt.protocol.testing.TestResult
import sbt.util.{ Level, Logger }
/**
* Logs information about tests after they finish.

View File

@ -1,75 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
import java.io.{ File => JFile }
import java.nio.file.Path
import sbt.internal.FileAttributes
import sbt.internal.inc.{ EmptyStamp, Stamper }
import sbt.io.TypedPath
import xsbti.compile.analysis.Stamp
/**
* A File that has a compile analysis Stamp value associated with it. In general, the stamp method
* should be a cached value that can be read without doing any io. This can be used to improve
* performance anywhere where we need to check if files have changed before doing potentially
* expensive work.
*/
private[sbt] trait Stamped {
private[sbt] def stamp: Stamp
}
/**
* Provides converter functions from TypedPath to [[Stamped]].
*/
private[sbt] object Stamped {
type File = JFile with Stamped
private[sbt] val file: ((Path, FileAttributes)) => JFile with Stamped = {
case (path: Path, attributes: FileAttributes) =>
new StampedFileImpl(path, attributes.stamp)
}
/**
* Converts a TypedPath instance to a [[Stamped]] by calculating the file hash.
*/
private[sbt] val sourceConverter: TypedPath => Stamp = tp => Stamper.forHash(tp.toPath.toFile)
/**
* Converts a TypedPath instance to a [[Stamped]] using the last modified time.
*/
private[sbt] val binaryConverter: TypedPath => Stamp = tp =>
Stamper.forLastModified(tp.toPath.toFile)
/**
* A combined convert that converts TypedPath instances representing *.jar and *.class files
* using the last modified time and all other files using the file hash.
*/
private[sbt] val converter: TypedPath => Stamp = (_: TypedPath) match {
case typedPath if !typedPath.exists => EmptyStamp
case typedPath if typedPath.isDirectory => binaryConverter(typedPath)
case typedPath =>
typedPath.toPath.toString match {
case s if s.endsWith(".jar") => binaryConverter(typedPath)
case s if s.endsWith(".class") => binaryConverter(typedPath)
case _ => sourceConverter(typedPath)
}
}
/**
* Adds a default ordering that just delegates to the java.io.File.compareTo method.
*/
private[sbt] implicit case object ordering extends Ordering[Stamped.File] {
override def compare(left: Stamped.File, right: Stamped.File): Int = left.compareTo(right)
}
private final class StampedImpl(override val stamp: Stamp) extends Stamped
private final class StampedFileImpl(path: Path, override val stamp: Stamp)
extends java.io.File(path.toString)
with Stamped
}

View File

@ -51,7 +51,6 @@ trait Watched {
object Watched {
@deprecated("WatchSource is replaced by sbt.io.Glob", "1.3.0")
type WatchSource = Source
def terminateWatch(key: Int): Boolean = Watched.isEnter(key)
@ -60,7 +59,6 @@ object Watched {
def clearScreen: String = "\u001b[2J\u001b[0;0H"
@deprecated("WatchSource has been replaced by sbt.io.Glob", "1.3.0")
object WatchSource {
/**

View File

@ -1,101 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal
import java.lang
import java.nio.file.Path
import java.util.Optional
import sbt.Stamped
import sbt.internal.inc.{ EmptyStamp, LastModified, Stamp }
import sbt.io.FileEventMonitor.{ Creation, Deletion, Update }
import sbt.io.{ FileEventMonitor, TypedPath }
import xsbti.compile.analysis.{ Stamp => XStamp }
/**
* Represents the FileAttributes of a file. This will be moved to io before 1.3.0 is released.
*/
trait FileAttributes {
def hash: Option[String]
def lastModified: Option[Long]
def isRegularFile: Boolean
def isDirectory: Boolean
def isSymbolicLink: Boolean
}
object FileAttributes {
trait Event {
def path: Path
def previous: Option[FileAttributes]
def current: Option[FileAttributes]
}
private[sbt] class EventImpl(event: FileEventMonitor.Event[FileAttributes]) extends Event {
override def path: Path = event.entry.typedPath.toPath
override def previous: Option[FileAttributes] = event match {
case Deletion(entry, _) => entry.value.toOption
case Update(previous, _, _) => previous.value.toOption
case _ => None
}
override def current: Option[FileAttributes] = event match {
case Creation(entry, _) => entry.value.toOption
case Update(_, current, _) => current.value.toOption
case _ => None
}
override def equals(o: Any): Boolean = o match {
case that: Event =>
this.path == that.path && this.previous == that.previous && this.current == that.current
case _ => false
}
override def hashCode(): Int =
((path.hashCode * 31) ^ previous.hashCode() * 31) ^ current.hashCode()
override def toString: String = s"Event($path, $previous, $current)"
}
private[sbt] def default(typedPath: TypedPath): FileAttributes =
DelegateFileAttributes(Stamped.converter(typedPath), typedPath)
private[sbt] implicit class FileAttributesOps(val e: FileAttributes) extends AnyVal {
private[sbt] def stamp: XStamp = e match {
case DelegateFileAttributes(s, _) => s
case _ =>
e.hash
.map(Stamp.fromString)
.orElse(e.lastModified.map(new LastModified(_)))
.getOrElse(EmptyStamp)
}
}
private implicit class Equiv(val xstamp: XStamp) extends AnyVal {
def equiv(that: XStamp): Boolean = Stamp.equivStamp.equiv(xstamp, that)
}
private case class DelegateFileAttributes(
private val stamp: XStamp,
private val typedPath: TypedPath
) extends FileAttributes
with XStamp {
override def getValueId: Int = stamp.getValueId
override def writeStamp(): String = stamp.writeStamp()
override def getHash: Optional[String] = stamp.getHash
override def getLastModified: Optional[lang.Long] = stamp.getLastModified
override def hash: Option[String] = getHash match {
case h if h.isPresent => Some(h.get)
case _ => None
}
override def lastModified: Option[Long] = getLastModified match {
case l if l.isPresent => Some(l.get)
case _ => None
}
override def equals(o: Any): Boolean = o match {
case DelegateFileAttributes(thatStamp, thatTypedPath) =>
(this.stamp equiv thatStamp) && (this.typedPath == thatTypedPath)
case _ => false
}
override def hashCode: Int = stamp.hashCode
override def toString: String = s"FileAttributes(hash = $hash, lastModified = $lastModified)"
override def isRegularFile: Boolean = typedPath.isFile
override def isDirectory: Boolean = typedPath.isDirectory
override def isSymbolicLink: Boolean = typedPath.isSymbolicLink
}
}

View File

@ -8,12 +8,13 @@
package sbt.internal
import sbt.BasicCommandStrings.{ ClearOnFailure, FailureWall }
import sbt.Watched.ContinuousEventMonitor
import sbt.internal.io.{ EventMonitor, WatchState }
import sbt.internal.nio.{ FileEventMonitor, FileTreeRepository, WatchLogger }
import sbt.{ State, Watched }
import scala.annotation.tailrec
import Watched.ContinuousEventMonitor
import scala.concurrent.duration._
import scala.util.control.NonFatal
private[sbt] object LegacyWatched {
@ -22,43 +23,63 @@ private[sbt] object LegacyWatched {
@tailrec def shouldTerminate: Boolean =
(System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate)
val log = s.log
val logger = new EventMonitor.Logger {
override def debug(msg: => Any): Unit = log.debug(msg.toString)
}
s get ContinuousEventMonitor match {
case None =>
val watchState = WatchState.empty(watched.watchService(), watched.watchSources(s))
// This is the first iteration, so run the task and create a new EventMonitor
val logger: WatchLogger = (a: Any) => log.debug(a.toString)
val repo = FileTreeRepository.legacy(logger, watched.watchService())
val fileEventMonitor = FileEventMonitor.antiEntropy(
repo,
watched.antiEntropy,
logger,
watched.antiEntropy,
10.minutes
)
val monitor = new EventMonitor {
override def awaitEvent(): Boolean = fileEventMonitor.poll(2.millis).nonEmpty
override def state(): WatchState = watchState
override def close(): Unit = watchState.close()
}
(ClearOnFailure :: next :: FailureWall :: repeat :: s)
.put(
ContinuousEventMonitor,
EventMonitor(
WatchState.empty(watched.watchService(), watched.watchSources(s)),
watched.pollInterval,
watched.antiEntropy,
shouldTerminate,
logger
)
)
.put(ContinuousEventMonitor, monitor)
case Some(eventMonitor) =>
Watched.printIfDefined(watched watchingMessage eventMonitor.state)
val triggered = try eventMonitor.awaitEvent()
catch {
case NonFatal(e) =>
log.error(
"Error occurred obtaining files to watch. Terminating continuous execution..."
)
s.handleError(e)
false
}
if (triggered) {
Watched.printIfDefined(watched triggeredMessage eventMonitor.state)
ClearOnFailure :: next :: FailureWall :: repeat :: s
} else {
while (System.in.available() > 0) System.in.read()
eventMonitor.close()
s.remove(ContinuousEventMonitor)
@tailrec def impl(): State = {
val triggered = try eventMonitor.awaitEvent()
catch {
case NonFatal(e) =>
log.error(
"Error occurred obtaining files to watch. Terminating continuous execution..."
)
s.handleError(e)
false
}
if (triggered) {
Watched.printIfDefined(watched triggeredMessage eventMonitor.state)
ClearOnFailure :: next :: FailureWall :: repeat :: s
} else if (shouldTerminate) {
while (System.in.available() > 0) System.in.read()
eventMonitor.close()
s.remove(ContinuousEventMonitor)
} else {
impl()
}
}
impl()
}
}
}
package io {
@deprecated("No longer used", "1.3.0")
private[sbt] trait EventMonitor extends AutoCloseable {
/** Block indefinitely until the monitor receives a file event or the user stops the watch. */
def awaitEvent(): Boolean
/** A snapshot of the WatchState that includes the number of build triggers and watch sources. */
def state(): WatchState
}
}

View File

@ -31,11 +31,11 @@ import sbt.internal._
import sbt.internal.inc.JavaInterfaceUtil._
import sbt.internal.inc.{ ZincLmUtil, ZincUtil }
import sbt.internal.io.{ Source, WatchState }
import sbt.internal.librarymanagement.{ CustomHttp => _, _ }
import sbt.internal.librarymanagement.mavenint.{
PomExtraDependencyAttributes,
SbtPomExtraProperties
}
import sbt.internal.librarymanagement.{ CustomHttp => _, _ }
import sbt.internal.server.{
Definition,
LanguageServerProtocol,
@ -43,7 +43,6 @@ import sbt.internal.server.{
ServerHandler
}
import sbt.internal.testing.TestLogger
import sbt.internal.TransitiveGlobs._
import sbt.internal.util.Attributed.data
import sbt.internal.util.Types._
import sbt.internal.util._
@ -65,6 +64,10 @@ import sbt.librarymanagement.CrossVersion.{ binarySbtVersion, binaryScalaVersion
import sbt.librarymanagement._
import sbt.librarymanagement.ivy._
import sbt.librarymanagement.syntax._
import sbt.nio.Watch
import sbt.nio.Keys._
import sbt.nio.file.FileTreeView
import sbt.nio.file.syntax._
import sbt.std.TaskExtra._
import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint }
import sbt.util.CacheImplicits._
@ -82,7 +85,6 @@ import scala.xml.NodeSeq
// incremental compiler
import sbt.SlashSyntax0._
import sbt.internal.GlobLister._
import sbt.internal.inc.{
Analysis,
AnalyzingCompiler,
@ -143,9 +145,19 @@ object Defaults extends BuildCommon {
private[sbt] lazy val globalCore: Seq[Setting[_]] = globalDefaults(
defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq(
excludeFilter :== HiddenFileFilter,
pathToFileStamp :== sbt.nio.FileStamp.hash,
classLoaderCache := ClassLoaderCache(4),
fileInputs :== Nil,
inputFileStamper :== sbt.nio.FileStamper.Hash,
outputFileStamper :== sbt.nio.FileStamper.LastModified,
watchForceTriggerOnAnyChange :== true,
watchTriggers :== Nil,
clean := { () },
sbt.nio.Keys.fileAttributeMap := {
state.value
.get(sbt.nio.Keys.persistentFileAttributeMap)
.getOrElse(new sbt.nio.Keys.FileAttributeMap)
},
) ++ TaskRepository
.proxy(GlobalScope / classLoaderCache, ClassLoaderCache(4)) ++ globalIvyCore ++ globalJvmCore
) ++ globalSbtCore
@ -184,14 +196,14 @@ object Defaults extends BuildCommon {
artifactClassifier in packageSrc :== Some(SourceClassifier),
artifactClassifier in packageDoc :== Some(DocClassifier),
includeFilter :== NothingFilter,
includeFilter in unmanagedSources :== ("*.java" | "*.scala") -- DirectoryFilter,
includeFilter in unmanagedSources :== ("*.java" | "*.scala"),
includeFilter in unmanagedJars :== "*.jar" | "*.so" | "*.dll" | "*.jnilib" | "*.zip",
includeFilter in unmanagedResources :== AllPassFilter,
bgList := { bgJobService.value.jobs },
ps := psTask.value,
bgStop := bgStopTask.evaluated,
bgWaitFor := bgWaitForTask.evaluated,
bgCopyClasspath :== true
bgCopyClasspath :== true,
)
private[sbt] lazy val globalIvyCore: Seq[Setting[_]] =
@ -242,10 +254,14 @@ object Defaults extends BuildCommon {
settingsData := buildStructure.value.data,
settingsData / fileInputs := {
val baseDir = file(".").getCanonicalFile
val sourceFilter = ("*.sbt" || "*.scala" || "*.java") -- HiddenFileFilter
val sourceFilter = ("*.sbt" || "*.scala" || "*.java")
val projectDir = baseDir / "project"
Seq(
Glob(baseDir, "*.sbt" -- HiddenFileFilter, 0),
Glob(baseDir / "project", sourceFilter, Int.MaxValue)
baseDir * "*.sbt",
projectDir * sourceFilter,
// We only want to recursively look in source because otherwise we have to search
// the project target directories which is expensive.
projectDir / "src" ** sourceFilter,
)
},
trapExit :== true,
@ -271,7 +287,6 @@ object Defaults extends BuildCommon {
extraLoggers :== { _ =>
Nil
},
pollingGlobs :== Nil,
watchSources :== Nil, // Although this is deprecated, it can't be removed or it breaks += for legacy builds.
skip :== false,
taskTemporaryDirectory := { val dir = IO.createTemporaryDirectory; dir.deleteOnExit(); dir },
@ -296,15 +311,8 @@ object Defaults extends BuildCommon {
Previous.references :== new Previous.References,
concurrentRestrictions := defaultRestrictions.value,
parallelExecution :== true,
fileTreeRepository := state.value
.get(globalFileTreeRepository)
.map(FileTree.repository)
.getOrElse(FileTree.Repository.polling),
fileTreeView :== FileTreeView.default,
Continuous.dynamicInputs := Continuous.dynamicInputsImpl.value,
externalHooks := {
val repository = fileTreeRepository.value
compileOptions => Some(ExternalHooks(compileOptions, repository))
},
logBuffered :== false,
commands :== Nil,
showSuccess :== true,
@ -345,9 +353,7 @@ object Defaults extends BuildCommon {
watchAntiEntropyRetentionPeriod :== Watch.defaultAntiEntropyRetentionPeriod,
watchLogLevel :== Level.Info,
watchOnEnter :== Watch.defaultOnEnter,
watchOnMetaBuildEvent :== Watch.ifChanged(Watch.Reload),
watchOnInputEvent :== Watch.trigger,
watchOnTriggerEvent :== Watch.trigger,
watchOnFileInputEvent :== Watch.trigger,
watchDeletionQuarantinePeriod :== Watch.defaultDeletionQuarantinePeriod,
watchService :== Watched.newWatchService,
watchStartMessage :== Watch.defaultStartWatch,
@ -406,16 +412,24 @@ object Defaults extends BuildCommon {
)
},
unmanagedSources / fileInputs := {
val filter =
(includeFilter in unmanagedSources).value -- (excludeFilter in unmanagedSources).value
val include = (includeFilter in unmanagedSources).value
val filter = (excludeFilter in unmanagedSources).value match {
// Hidden files are already filtered out by the FileStamps method
case NothingFilter | HiddenFileFilter => include
case exclude => include -- exclude
}
val baseSources = if (sourcesInBase.value) baseDirectory.value * filter :: Nil else Nil
unmanagedSourceDirectories.value.map(_ ** filter) ++ baseSources
},
unmanagedSources := (unmanagedSources / fileInputs).value.all.map(Stamped.file),
unmanagedSources := (unmanagedSources / inputFileStamps).value.map(_._1.toFile),
managedSourceDirectories := Seq(sourceManaged.value),
managedSources := generate(sourceGenerators).value,
managedSources := {
val stamper = sbt.nio.Keys.pathToFileStamp.value
val res = generate(sourceGenerators).value
res.foreach(f => stamper(f.toPath))
res
},
sourceGenerators :== Nil,
sourceGenerators / fileOutputs := Seq(managedDirectory.value ** AllPassFilter),
sourceDirectories := Classpaths
.concatSettings(unmanagedSourceDirectories, managedSourceDirectories)
.value,
@ -430,11 +444,15 @@ object Defaults extends BuildCommon {
.concatSettings(unmanagedResourceDirectories, managedResourceDirectories)
.value,
unmanagedResources / fileInputs := {
val filter =
(includeFilter in unmanagedResources).value -- (excludeFilter in unmanagedResources).value
val include = (includeFilter in unmanagedResources).value
val filter = (excludeFilter in unmanagedResources).value match {
// Hidden files are already filtered out by the FileStamps method
case NothingFilter | HiddenFileFilter => include
case exclude => include -- exclude
}
unmanagedResourceDirectories.value.map(_ ** filter)
},
unmanagedResources := (unmanagedResources / fileInputs).value.all.map(Stamped.file),
unmanagedResources := (unmanagedResources / allInputFiles).value.map(_.toFile),
resourceGenerators :== Nil,
resourceGenerators += Def.task {
PluginDiscovery.writeDescriptors(discoveredSbtPlugins.value, resourceManaged.value)
@ -574,14 +592,11 @@ object Defaults extends BuildCommon {
globalDefaults(enableBinaryCompileAnalysis := true)
lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++ inTask(compile)(
compileInputsSettings :+ (clean := Clean.taskIn(ThisScope).value)
compileInputsSettings
) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
fileOutputs := Seq(
compileAnalysisFileTask.value.toGlob,
classDirectory.value ** "*.class"
) ++ (sourceGenerators / fileOutputs).value,
clean := Clean.task(ThisScope, full = false).value,
fileOutputs := Seq(classDirectory.value ** "*.class"),
compile := compileTask.value,
clean := Clean.taskIn(ThisScope).value,
internalDependencyConfigurations := InternalDependencies.configurations.value,
manipulateBytecode := compileIncremental.value,
compileIncremental := (compileIncrementalTask tag (Tags.Compile, Tags.CPU)).value,
@ -595,7 +610,12 @@ object Defaults extends BuildCommon {
else ""
s"inc_compile$extra.zip"
},
compileIncSetup := compileIncSetupTask.value,
compileIncSetup := {
val base = compileIncSetupTask.value
val incOptions =
base.incrementalCompilerOptions.withExternalHooks(ExternalHooks.default.value)
base.withIncrementalCompilerOptions(incOptions)
},
console := consoleTask.value,
collectAnalyses := Definition.collectAnalysesTask.map(_ => ()).value,
consoleQuick := consoleQuickTask.value,
@ -628,14 +648,11 @@ object Defaults extends BuildCommon {
cleanFiles := cleanFilesTask.value,
cleanKeepFiles := Vector.empty,
cleanKeepGlobs := historyPath.value.map(_.toGlob).toSeq,
clean := Clean.taskIn(ThisScope).value,
clean := Def.taskDyn(Clean.task(resolvedScoped.value.scope, full = true)).value,
consoleProject := consoleProjectTask.value,
watchTransitiveSources := watchTransitiveSourcesTask.value,
watch := watchSetting.value,
fileOutputs += target.value ** AllPassFilter,
transitiveGlobs := InputGraph.task.value,
transitiveInputs := InputGraph.inputsTask.value,
transitiveTriggers := InputGraph.triggersTask.value,
transitiveDynamicInputs := SettingsGraph.task.value,
)
def generate(generators: SettingKey[Seq[Task[Seq[File]]]]): Initialize[Task[Seq[File]]] =
@ -1228,7 +1245,10 @@ object Defaults extends BuildCommon {
exclude: ScopedTaskable[FileFilter]
): Initialize[Task[Seq[File]]] = Def.task {
val filter = include.toTask.value -- exclude.toTask.value
dirs.toTask.value.map(_ ** filter).all.map(Stamped.file)
val view = fileTreeView.value
view.list(dirs.toTask.value.map(_ ** filter)).collect {
case (p, a) if !a.isDirectory => p.toFile
}
}
def artifactPathSetting(art: SettingKey[Artifact]): Initialize[File] =
Def.setting {
@ -1594,7 +1614,14 @@ object Defaults extends BuildCommon {
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
store.set(contents)
}
analysisResult.analysis
val map = sbt.nio.Keys.fileAttributeMap.value
val analysis = analysisResult.analysis
import scala.collection.JavaConverters._
analysis.readStamps.getAllProductStamps.asScala.foreach {
case (f, s) =>
map.put(f.toPath, sbt.nio.FileStamp.LastModified(s.getLastModified.orElse(-1L)))
}
analysis
}
def compileIncrementalTask = Def.task {
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
@ -1603,13 +1630,14 @@ object Defaults extends BuildCommon {
private val incCompiler = ZincUtil.defaultIncrementalCompiler
private[this] def compileIncrementalTaskImpl(s: TaskStreams, ci: Inputs): CompileResult = {
lazy val x = s.text(ExportStream)
def onArgs(cs: Compilers) =
def onArgs(cs: Compilers) = {
cs.withScalac(
cs.scalac match {
case ac: AnalyzingCompiler => ac.onArgs(exported(x, "scalac"))
case x => x
}
)
}
// .withJavac(
// cs.javac.onArgs(exported(x, "javac"))
//)
@ -1677,13 +1705,7 @@ object Defaults extends BuildCommon {
Inputs.of(
compilers.value,
options,
externalHooks
.value(options)
.map { hooks =>
val newOptions = setup.incrementalCompilerOptions.withExternalHooks(hooks)
setup.withIncrementalCompilerOptions(newOptions)
}
.getOrElse(setup),
setup,
previousCompile.value
)
}
@ -2056,6 +2078,8 @@ object Classpaths {
transitiveClassifiers :== Seq(SourceClassifier, DocClassifier),
sourceArtifactTypes :== Artifact.DefaultSourceTypes.toVector,
docArtifactTypes :== Artifact.DefaultDocTypes.toVector,
cleanKeepFiles :== Nil,
cleanKeepGlobs :== Nil,
fileOutputs :== Nil,
sbtDependency := {
val app = appConfiguration.value
@ -2073,9 +2097,7 @@ object Classpaths {
shellPrompt := shellPromptFromState,
dynamicDependency := { (): Unit },
transitiveClasspathDependency := { (): Unit },
transitiveGlobs := { (Nil: Seq[Glob], Nil: Seq[Glob]) },
transitiveInputs := Nil,
transitiveTriggers := Nil,
transitiveDynamicInputs :== Nil,
)
)
@ -3037,31 +3059,21 @@ object Classpaths {
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
Def.taskDyn {
val dirs = productDirectories.value
def containsClassFile(fs: List[File]): Boolean =
(fs exists { dir =>
(dir ** DirectoryFilter).get exists { d =>
(d * "*.class").get.nonEmpty
}
})
val view = fileTreeView.value
def containsClassFile(): Boolean = view.list(dirs.map(_ ** "*.class")).nonEmpty
TrackLevel.intersection(track, exportToInternal.value) match {
case TrackLevel.TrackAlways =>
Def.task {
products.value map { (_, compile.value) }
}
case TrackLevel.TrackIfMissing if !containsClassFile(dirs.toList) =>
case TrackLevel.TrackIfMissing if !containsClassFile() =>
Def.task {
products.value map { (_, compile.value) }
}
case _ =>
Def.task {
val analysisOpt = previousCompile.value.analysis.toOption
dirs map { x =>
(
x,
if (analysisOpt.isDefined) analysisOpt.get
else Analysis.empty
)
}
val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty)
dirs.map(_ -> analysis)
}
}
}
@ -3394,8 +3406,9 @@ object Classpaths {
base: File,
filter: FileFilter,
excl: FileFilter
): Classpath =
): Classpath = {
(base * (filter -- excl) +++ (base / config.name).descendantsExcept(filter, excl)).classpath
}
@deprecated(
"The method only works for Scala 2, use the overloaded version to support both Scala 2 and Scala 3",
"1.1.5"

View File

@ -16,9 +16,8 @@ import sbt.Project.richInitializeTask
import sbt.Scope.Global
import sbt.internal.Aggregation.KeyValue
import sbt.internal.TaskName._
import sbt.internal.TransitiveGlobs._
import sbt.internal.util._
import sbt.internal.{ BuildStructure, GCUtil, Load, TaskProgress, TaskTimings, TaskTraceEvent, _ }
import sbt.internal._
import sbt.librarymanagement.{ Resolver, UpdateReport }
import sbt.std.Transform.DummyTaskMap
import sbt.util.{ Logger, Show }
@ -572,25 +571,6 @@ object EvaluateTask {
stream
}).value
})
} else if (scoped.key == transitiveInputs.key) {
scoped.scope.task.toOption.toSeq.map { key =>
val updatedKey = ScopedKey(scoped.scope.copy(task = Zero), key)
transitiveInputs in scoped.scope := InputGraph.inputsTask(updatedKey).value
}
} else if (scoped.key == transitiveTriggers.key) {
scoped.scope.task.toOption.toSeq.map { key =>
val updatedKey = ScopedKey(scoped.scope.copy(task = Zero), key)
transitiveTriggers in scoped.scope := InputGraph.triggersTask(updatedKey).value
}
} else if (scoped.key == transitiveGlobs.key) {
scoped.scope.task.toOption.toSeq.map { key =>
val updatedKey = ScopedKey(scoped.scope.copy(task = Zero), key)
transitiveGlobs in scoped.scope := InputGraph.task(updatedKey).value
}
} else if (scoped.key == dynamicDependency.key) {
(dynamicDependency in scoped.scope := { () }) :: Nil
} else if (scoped.key == transitiveClasspathDependency.key) {
(transitiveClasspathDependency in scoped.scope := { () }) :: Nil
} else {
Nil
}

View File

@ -7,7 +7,7 @@
package sbt
import java.io.{ File, InputStream }
import java.io.File
import java.net.URL
import org.apache.ivy.core.module.descriptor.ModuleDescriptor
@ -21,14 +21,13 @@ import sbt.internal.inc.ScalaInstance
import sbt.internal.io.WatchState
import sbt.internal.librarymanagement.{ CompatibilityWarningOptions, IvySbt }
import sbt.internal.server.ServerHandler
import sbt.internal.util.complete.Parser
import sbt.internal.util.{ AttributeKey, SourcePosition }
import sbt.io.FileEventMonitor.Event
import sbt.io._
import sbt.librarymanagement.Configurations.CompilerPlugin
import sbt.librarymanagement.LibraryManagementCodec._
import sbt.librarymanagement._
import sbt.librarymanagement.ivy.{ Credentials, IvyConfiguration, IvyPaths, UpdateOptions }
import sbt.nio.file.Glob
import sbt.testing.Framework
import sbt.util.{ Level, Logger }
import xsbti.compile._
@ -94,37 +93,13 @@ object Keys {
val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
val suppressSbtShellNotification = settingKey[Boolean]("""True to suppress the "Executing in batch mode.." message.""").withRank(CSetting)
val fileTreeRepository = taskKey[FileTree.Repository]("A repository of the file system.").withRank(DSetting)
val pollInterval = settingKey[FiniteDuration]("Interval between checks for modified sources by the continuous execution command.").withRank(BMinusSetting)
val pollingGlobs = settingKey[Seq[Glob]]("Directories that cannot be cached and must always be rescanned. Typically these will be NFS mounted or something similar.").withRank(DSetting)
val watchAntiEntropy = settingKey[FiniteDuration]("Duration for which the watch EventMonitor will ignore events for a file after that file has triggered a build.").withRank(BMinusSetting)
val watchAntiEntropyRetentionPeriod = settingKey[FiniteDuration]("Wall clock Duration for which a FileEventMonitor will store anti-entropy events. This prevents spurious triggers when a task takes a long time to run. Higher values will consume more memory but make spurious triggers less likely.").withRank(BMinusSetting)
val watchDeletionQuarantinePeriod = settingKey[FiniteDuration]("Period for which deletion events will be quarantined. This is to prevent spurious builds when a file is updated with a rename which manifests as a file deletion followed by a file creation. The higher this value is set, the longer the delay will be between a file deletion and a build trigger but the less likely it is for a spurious trigger.").withRank(DSetting)
val watchLogLevel = settingKey[sbt.util.Level.Value]("Transform the default logger in continuous builds.").withRank(DSetting)
val watchInputHandler = settingKey[InputStream => Watch.Action]("Function that is periodically invoked to determine if the continuous build should be stopped or if a build should be triggered. It will usually read from stdin to respond to user commands. This is only invoked if watchInputStream is set.").withRank(DSetting)
val watchInputStream = taskKey[InputStream]("The input stream to read for user input events. This will usually be System.in").withRank(DSetting)
val watchInputParser = settingKey[Parser[Watch.Action]]("A parser of user input that can be used to trigger or exit a continuous build").withRank(DSetting)
val watchOnEnter = settingKey[() => Unit]("Function to run prior to beginning a continuous build. This will run before the continuous task(s) is(are) first evaluated.").withRank(DSetting)
val watchOnExit = settingKey[() => Unit]("Function to run upon exit of a continuous build. It can be used to cleanup resources used during the watch.").withRank(DSetting)
val watchOnInputEvent = settingKey[(Int, Event[FileAttributes]) => Watch.Action]("Callback to invoke if an event is triggered in a continuous build by one of the transitive inputs. This is only invoked if watchOnEvent is not explicitly set.").withRank(DSetting)
val watchOnEvent = settingKey[Continuous.Arguments => Event[FileAttributes] => Watch.Action]("Determines how to handle a file event. The Seq[Glob] contains all of the transitive inputs for the task(s) being run by the continuous build.").withRank(DSetting)
val watchOnMetaBuildEvent = settingKey[(Int, Event[FileAttributes]) => Watch.Action]("Callback to invoke if an event is triggered in a continuous build by one of the meta build triggers.").withRank(DSetting)
val watchOnTermination = settingKey[(Watch.Action, String, Int, State) => State]("Transforms the state upon completion of a watch. The String argument is the command that was run during the watch. The Int parameter specifies how many times the command was run during the watch.").withRank(DSetting)
val watchOnTrigger = settingKey[Continuous.Arguments => Event[FileAttributes] => Unit]("Callback to invoke when a continuous build triggers. The first parameter is the number of previous watch task invocations. The second parameter is the Event that triggered this build").withRank(DSetting)
val watchOnTriggerEvent = settingKey[(Int, Event[FileAttributes]) => Watch.Action]("Callback to invoke if an event is triggered in a continuous build by one of the transitive triggers. This is only invoked if watchOnEvent is not explicitly set.").withRank(DSetting)
val watchOnIteration = settingKey[Int => Watch.Action]("Function that is invoked before waiting for file system events or user input events. This is only invoked if watchOnStart is not explicitly set.").withRank(DSetting)
val watchOnStart = settingKey[Continuous.Arguments => () => Watch.Action]("Function is invoked before waiting for file system or input events. The returned Action is used to either trigger the build, terminate the watch or wait for events.").withRank(DSetting)
val watchService = settingKey[() => WatchService]("Service to use to monitor file system changes.").withRank(BMinusSetting).withRank(DSetting)
val watchStartMessage = settingKey[(Int, String, Seq[String]) => Option[String]]("The message to show when triggered execution waits for sources to change. The parameters are the current watch iteration count, the current project name and the tasks that are being run with each build.").withRank(DSetting)
// The watchTasks key should really be named watch, but that is already taken by the deprecated watch key. I'd be surprised if there are any plugins that use it so I think we should consider breaking binary compatibility to rename this task.
val watchTasks = InputKey[StateTransform]("watch", "Watch a task (or multiple tasks) and rebuild when its file inputs change or user input is received. The semantics are more or less the same as the `~` command except that it cannot transform the state on exit. This means that it cannot be used to reload the build.").withRank(DSetting)
val watchTrackMetaBuild = settingKey[Boolean]("Toggles whether or not changing the build files (e.g. **/*.sbt, project/**/(*.scala | *.java)) should automatically trigger a project reload").withRank(DSetting)
val watchTriggeredMessage = settingKey[(Int, Event[FileAttributes], Seq[String]) => Option[String]]("The message to show before triggered execution executes an action after sources change. The parameters are the path that triggered the build and the current watch iteration count.").withRank(DSetting)
// Deprecated watch apis
@deprecated("This is no longer used for continuous execution", "1.3.0")
val watch = SettingKey(BasicKeys.watch)
@deprecated("WatchSource has been replaced by Glob. To add file triggers to a task with key: Key, set `Key / watchTriggers := Seq[Glob](...)`.", "1.3.0")
val watchSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources in this project for continuous execution to watch for changes.").withRank(BMinusSetting)
@deprecated("This is for legacy builds only and will be removed in a future version of sbt", "1.3.0")
val watchTransitiveSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources in all projects for continuous execution to watch.").withRank(CSetting)
@ -150,7 +125,6 @@ object Keys {
val managedSources = taskKey[Seq[File]]("Sources generated by the build.").withRank(BTask)
val sources = taskKey[Seq[File]]("All sources, both managed and unmanaged.").withRank(BTask)
val sourcesInBase = settingKey[Boolean]("If true, sources from the project's base directory are included as main sources.")
val fileInputs = settingKey[Seq[Glob]]("The file globs that are used by a task. This setting will generally be scoped per task. It will also be used to determine the sources to watch during continuous execution.")
val watchTriggers = settingKey[Seq[Glob]]("Describes files that should trigger a new continuous build.")
// Filters
@ -169,14 +143,11 @@ object Keys {
// Output paths
val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting)
@deprecated("Clean is now implemented using globs.", "1.3.0")
val cleanFiles = taskKey[Seq[File]]("The files to recursively delete during a clean.").withRank(BSetting)
@deprecated("Clean is now implemented using globs. Prefer the cleanKeepGlobs task", "1.3.0")
val cleanKeepFiles = settingKey[Seq[File]]("Files or directories to keep during a clean. Must be direct children of target.").withRank(CSetting)
val cleanKeepGlobs = settingKey[Seq[Glob]]("Globs to keep during a clean. Must be direct children of target.").withRank(CSetting)
val crossPaths = settingKey[Boolean]("If true, enables cross paths, which distinguish input and output directories for cross-building.").withRank(ASetting)
val taskTemporaryDirectory = settingKey[File]("Directory used for temporary files for tasks that is deleted after each task execution.").withRank(DSetting)
val fileOutputs = taskKey[Seq[Glob]]("Describes the output files of a task")
// Generators
val sourceGenerators = settingKey[Seq[Task[Seq[File]]]]("List of tasks that generate sources.").withRank(CSetting)
@ -240,7 +211,6 @@ object Keys {
val copyResources = taskKey[Seq[(File, File)]]("Copies resources to the output directory.").withRank(AMinusTask)
val aggregate = settingKey[Boolean]("Configures task aggregation.").withRank(BMinusSetting)
val sourcePositionMappers = taskKey[Seq[xsbti.Position => Option[xsbti.Position]]]("Maps positions in generated source files to the original source it was generated from").withRank(DTask)
val externalHooks = taskKey[CompileOptions => Option[ExternalHooks]]("External hooks for modifying the internal behavior of the incremental compiler.").withRank(BMinusSetting)
// package keys
val packageBin = taskKey[File]("Produces a main artifact, such as a binary jar.").withRank(ATask)
@ -508,14 +478,6 @@ object Keys {
@deprecated("No longer used", "1.3.0")
private[sbt] val executeProgress = settingKey[State => TaskProgress]("Experimental task execution listener.").withRank(DTask)
private[sbt] val globalFileTreeRepository = AttributeKey[FileTreeRepository[FileAttributes]](
"global-file-tree-repository",
"Provides a view into the file system that may or may not cache the tree in memory",
1000
)
private[sbt] val dynamicDependency = settingKey[Unit]("Leaves a breadcrumb that the scoped task is evaluated inside of a dynamic task")
private[sbt] val transitiveClasspathDependency = settingKey[Unit]("Leaves a breadcrumb that the scoped task has transitive classpath dependencies")
val stateStreams = AttributeKey[Streams]("stateStreams", "Streams manager, which provides streams for different contexts. Setting this on State will override the default Streams implementation.")
val resolvedScoped = Def.resolvedScoped
val pluginData = taskKey[PluginData]("Information from the plugin build needed in the main build definition.").withRank(DTask)

View File

@ -18,7 +18,6 @@ import sbt.Project.LoadAction
import sbt.compiler.EvalImports
import sbt.internal.Aggregation.AnyKeys
import sbt.internal.CommandStrings.BootCommand
import sbt.internal.FileManagement.CopiedFileTreeRepository
import sbt.internal._
import sbt.internal.inc.ScalaInstance
import sbt.internal.util.Types.{ const, idFun }
@ -893,31 +892,16 @@ object BuiltinCommands {
}
s.put(Keys.stateCompilerCache, cache)
}
private[sbt] val rawGlobalFileTreeRepository = AttributeKey[FileTreeRepository[FileAttributes]](
"raw-global-file-tree-repository",
"Provides a view into the file system that may or may not cache the tree in memory",
1000
)
private[sbt] def registerGlobalCaches(s: State): State =
try {
val cleanedUp = new AtomicBoolean(false)
def cleanup(): Unit = {
s.get(rawGlobalFileTreeRepository).foreach(_.close())
s.get(Keys.taskRepository).foreach(_.close())
()
}
cleanup()
val fileTreeRepository = FileTreeRepository.default(FileAttributes.default)
val fileCache = System.getProperty("sbt.io.filecache", "validate")
val newState = s
.addExitHook(if (cleanedUp.compareAndSet(false, true)) cleanup())
s.addExitHook(if (cleanedUp.compareAndSet(false, true)) cleanup())
.put(Keys.taskRepository, new TaskRepository.Repr)
.put(rawGlobalFileTreeRepository, fileTreeRepository)
if (fileCache == "false" || (fileCache != "true" && Util.isWindows)) newState
else {
val copied = new CopiedFileTreeRepository(fileTreeRepository)
newState.put(Keys.globalFileTreeRepository, copied)
}
} catch {
case NonFatal(_) => s
}

View File

@ -86,7 +86,7 @@ object BuildUtil {
}
def baseImports: Seq[String] =
"import _root_.scala.xml.{TopScope=>$scope}" :: "import _root_.sbt._" :: "import _root_.sbt.Keys._" :: Nil
"import _root_.scala.xml.{TopScope=>$scope}" :: "import _root_.sbt._" :: "import _root_.sbt.Keys._" :: "import _root_.sbt.nio.Keys._" :: Nil
def getImports(unit: BuildUnit): Seq[String] =
unit.plugins.detected.imports ++ unit.definitions.dslDefinitions.imports

View File

@ -9,86 +9,150 @@ package sbt
package internal
import java.io.IOException
import java.nio.file.{ DirectoryNotEmptyException, Files }
import java.nio.file.{ DirectoryNotEmptyException, Files, Path }
import sbt.Def._
import sbt.Keys._
import sbt.Project.richInitializeTask
import sbt.io.AllPassFilter
import sbt.io.syntax._
import sbt.io.{ AllPassFilter, FileTreeView, TypedPath }
import sbt.nio.Keys._
import sbt.nio.file._
import sbt.nio.file.syntax._
import sbt.util.Level
import sjsonnew.JsonFormat
object Clean {
private[sbt] object Clean {
def deleteContents(file: File, exclude: TypedPath => Boolean): Unit =
deleteContents(file, exclude, FileTreeView.DEFAULT, tryDelete((_: String) => {}))
def deleteContents(
file: File,
exclude: TypedPath => Boolean,
view: FileTreeView,
delete: File => Unit
private[sbt] def deleteContents(file: File, exclude: File => Boolean): Unit =
deleteContents(
file.toPath,
path => exclude(path.toFile),
FileTreeView.default,
tryDelete((_: String) => {})
)
private[this] def deleteContents(
path: Path,
exclude: Path => Boolean,
view: FileTreeView.Nio[FileAttributes],
delete: Path => Unit
): Unit = {
def deleteRecursive(file: File): Unit = {
view.list(file * AllPassFilter).filterNot(exclude).foreach {
case dir if dir.isDirectory =>
deleteRecursive(dir.toPath.toFile)
delete(dir.toPath.toFile)
case f => delete(f.toPath.toFile)
}
def deleteRecursive(path: Path): Unit = {
view
.list(Glob(path, AnyPath))
.filterNot { case (p, _) => exclude(p) }
.foreach {
case (dir, attrs) if attrs.isDirectory =>
deleteRecursive(dir)
delete(dir)
case (file, _) => delete(file)
}
}
deleteRecursive(file)
deleteRecursive(path)
}
/**
* Provides an implementation for the clean task. It delegates to [[taskIn]] using the
* resolvedScoped key to set the scope.
* @return the clean task definition.
*/
def task: Def.Initialize[Task[Unit]] =
Def.taskDyn(taskIn(Keys.resolvedScoped.value.scope)) tag Tags.Clean
private[this] def cleanFilter(scope: Scope): Def.Initialize[Task[Path => Boolean]] = Def.task {
val excludes = (cleanKeepFiles in scope).value.map {
// This mimics the legacy behavior of cleanFilesTask
case f if f.isDirectory => f * AllPassFilter
case f => f.toGlob
} ++ (cleanKeepGlobs in scope).value
p: Path => excludes.exists(_.matches(p))
}
private[this] def cleanDelete(scope: Scope): Def.Initialize[Task[Path => Unit]] = Def.task {
// Don't use a regular logger because the logger actually writes to the target directory.
val debug = (logLevel in scope).?.value.orElse(state.value.get(logLevel.key)) match {
case Some(Level.Debug) =>
(string: String) => println(s"[debug] $string")
case _ =>
(_: String) => {}
}
tryDelete(debug)
}
/**
* Implements the clean task in a given scope. It uses the outputs task value in the provided
* scope to determine which files to delete.
*
* @param scope the scope in which the clean task is implemented
* @return the clean task definition.
*/
def taskIn(scope: Scope): Def.Initialize[Task[Unit]] =
Def.task {
val excludes = cleanKeepFiles.value.map {
// This mimics the legacy behavior of cleanFilesTask
case f if f.isDirectory => f * AllPassFilter
case f => f.toGlob
} ++ cleanKeepGlobs.value
val excludeFilter: TypedPath => Boolean = excludes.toTypedPathFilter
// Don't use a regular logger because the logger actually writes to the target directory.
val debug = (logLevel in scope).?.value.orElse(state.value.get(logLevel.key)) match {
case Some(Level.Debug) =>
(string: String) => println(s"[debug] $string")
case _ =>
(_: String) => {}
}
val delete = tryDelete(debug)
cleanFiles.value.sorted.reverseIterator.foreach(delete)
(fileOutputs in scope).value.foreach { g =>
val filter: TypedPath => Boolean = {
val globFilter = g.toTypedPathFilter
tp => !globFilter(tp) || excludeFilter(tp)
private[sbt] def task(
scope: Scope,
full: Boolean
): Def.Initialize[Task[Unit]] =
Def.taskDyn {
val state = Keys.state.value
val extracted = Project.extract(state)
val view = fileTreeView.value
val manager = streamsManager.value
Def.task {
val excludeFilter = cleanFilter(scope).value
val delete = cleanDelete(scope).value
val targetDir = (target in scope).?.value.map(_.toPath)
val targetFiles = (if (full) targetDir else None).fold(Nil: Seq[Path]) { t =>
view.list(t.toGlob / **).collect { case (p, _) if !excludeFilter(p) => p }
}
deleteContents(g.base.toFile, filter, FileTreeView.DEFAULT, delete)
delete(g.base.toFile)
val allFiles = (cleanFiles in scope).?.value.toSeq
.flatMap(_.map(_.toPath)) ++ targetFiles
allFiles.sorted.reverseIterator.foreach(delete)
// This is the special portion of the task where we clear out the relevant streams
// and file outputs of a task.
val streamsKey = scope.task.toOption.map(k => ScopedKey(scope.copy(task = Zero), k))
val stampsKey =
extracted.structure.data.getDirect(scope, inputFileStamps.key) match {
case Some(_) => ScopedKey(scope, inputFileStamps.key) :: Nil
case _ => Nil
}
val streamsGlobs =
(streamsKey.toSeq ++ stampsKey).map(k => manager(k).cacheDirectory.toGlob / **)
((fileOutputs in scope).value.filter(g => targetDir.fold(true)(g.base.startsWith)) ++ streamsGlobs)
.foreach { g =>
val filter: Path => Boolean = { path =>
!g.matches(path) || excludeFilter(path)
}
deleteContents(g.base, filter, FileTreeView.default, delete)
delete(g.base)
}
}
} tag Tags.Clean
private def tryDelete(debug: String => Unit): File => Unit = file => {
private[sbt] trait ToSeqPath[T] {
def apply(t: T): Seq[Path]
}
private[sbt] object ToSeqPath {
implicit val identitySeqPath: ToSeqPath[Seq[Path]] = identity _
implicit val seqFile: ToSeqPath[Seq[File]] = _.map(_.toPath)
implicit val path: ToSeqPath[Path] = _ :: Nil
implicit val file: ToSeqPath[File] = _.toPath :: Nil
}
private[this] implicit class ToSeqPathOps[T](val t: T) extends AnyVal {
def toSeqPath(implicit toSeqPath: ToSeqPath[T]): Seq[Path] = toSeqPath(t)
}
private[sbt] def cleanFileOutputTask[T: JsonFormat: ToSeqPath](
taskKey: TaskKey[T]
): Def.Initialize[Task[Unit]] =
Def.taskDyn {
val scope = taskKey.scope in taskKey.key
Def.task {
val targetDir = (target in scope).value.toPath
val filter = cleanFilter(scope).value
// We do not want to inadvertently delete files that are not in the target directory.
val excludeFilter: Path => Boolean = path => !path.startsWith(targetDir) || filter(path)
val delete = cleanDelete(scope).value
taskKey.previous.foreach(_.toSeqPath.foreach(p => if (!excludeFilter(p)) delete(p)))
}
} tag Tags.Clean
private[this] def tryDelete(debug: String => Unit): Path => Unit = path => {
try {
debug(s"clean -- deleting file $file")
Files.deleteIfExists(file.toPath)
debug(s"clean -- deleting file $path")
Files.deleteIfExists(path)
()
} catch {
case _: DirectoryNotEmptyException =>
debug(s"clean -- unable to delete non-empty directory $file")
debug(s"clean -- unable to delete non-empty directory $path")
case e: IOException =>
debug(s"Caught unexpected exception $e deleting $file")
debug(s"Caught unexpected exception $e deleting $path")
}
}
}

View File

@ -14,7 +14,7 @@ import java.util.concurrent.ConcurrentLinkedQueue
import java.util.concurrent.atomic._
import sbt.BasicKeys._
import sbt.Watch.NullLogger
import sbt.nio.Watch.NullLogger
import sbt.internal.langserver.{ LogMessageParams, MessageType }
import sbt.internal.server._
import sbt.internal.util.codec.JValueFormats

View File

@ -8,7 +8,7 @@
package sbt
package internal
import java.io.{ ByteArrayInputStream, InputStream }
import java.io.{ ByteArrayInputStream, InputStream, File => _ }
import java.util.concurrent.atomic.AtomicInteger
import sbt.BasicCommandStrings.{
@ -19,17 +19,22 @@ import sbt.BasicCommandStrings.{
}
import sbt.BasicCommands.otherCommandParser
import sbt.Def._
import sbt.Keys._
import sbt.Scope.Global
import sbt.internal.FileManagement.CopiedFileTreeRepository
import sbt.internal.LabeledFunctions._
import sbt.internal.io.WatchState
import sbt.internal.nio._
import sbt.internal.util.complete.Parser._
import sbt.internal.util.complete.{ Parser, Parsers }
import sbt.internal.util.{ AttributeKey, Util }
import sbt.io._
import sbt.internal.util.{ AttributeKey, JLine, Util }
import sbt.nio.Keys.{ fileInputs, _ }
import sbt.nio.Watch.{ Creation, Deletion, Update }
import sbt.nio.file.FileAttributes
import sbt.nio.{ FileStamp, FileStamper, Watch }
import sbt.util.{ Level, _ }
import scala.annotation.tailrec
import scala.collection.mutable
import scala.concurrent.duration.FiniteDuration.FiniteDurationIsOrdered
import scala.concurrent.duration._
import scala.util.Try
@ -60,43 +65,29 @@ import scala.util.Try
* the deprecated apis should no longer be supported.
*
*/
object Continuous extends DeprecatedContinuous {
private[sbt] object Continuous extends DeprecatedContinuous {
private type Event = FileEvent[FileAttributes]
/**
* Provides the dynamic inputs to the continuous build callbacks that cannot be stored as
* settings. This wouldn't need to exist if there was a notion of a lazy setting in sbt.
*
* @param logger the Logger
* @param inputs the transitive task inputs
* @param triggers the transitive task triggers
*/
final class Arguments private[Continuous] (
private[sbt] final class Arguments private[Continuous] (
val logger: Logger,
val inputs: Seq[Glob],
val triggers: Seq[Glob]
val inputs: Seq[DynamicInput]
)
/**
* Provides a copy of System.in that can be scanned independently from System.in itself. This task
* will only be valid during a continuous build started via `~` or the `watch` task. The
* motivation is that a plugin may want to completely override the parsing of System.in which
* is not straightforward since the default implementation is hard-wired to read from and
* parse System.in. If an invalid parser is provided by [[Keys.watchInputParser]] and
* [[Keys.watchInputStream]] is set to this task, then a custom parser can be provided via
* [[Keys.watchInputHandler]] and the default System.in processing will not occur.
*
* @return the duplicated System.in
*/
def dupedSystemIn: Def.Initialize[Task[InputStream]] = Def.task {
Keys.state.value.get(DupedSystemIn).map(_.duped).getOrElse(System.in)
}
/**
* Create a function from InputStream => [[Watch.Action]] from a [[Parser]]. This is intended
* to be used to set the watchInputHandler setting for a task.
*
* @param parser the parser
* @return the function
*/
def defaultInputHandler(parser: Parser[Watch.Action]): InputStream => Watch.Action = {
private def defaultInputHandler(parser: Parser[Watch.Action]): InputStream => Watch.Action = {
val builder = new StringBuilder
val any = matched(Parsers.any.*)
val fullParser = any ~> parser ~ any
@ -108,41 +99,36 @@ object Continuous extends DeprecatedContinuous {
* Implements continuous execution. It works by first parsing the command and generating a task to
* run with each build. It can run multiple commands that are separated by ";" in the command
* input. If any of these commands are invalid, the watch will immediately exit.
*
* @return a Command that can be used by sbt to implement continuous builds.
*/
private[sbt] def continuous: Command =
Command(ContinuousExecutePrefix, continuousBriefHelp, continuousDetail)(continuousParser) {
case (state, (initialCount, command)) =>
runToTermination(state, command, initialCount, isCommand = true)
case (s, (initialCount, command)) =>
runToTermination(s, command, initialCount, isCommand = true)
}
/**
* The task implementation is quite similar to the command implementation. The tricky part is that
* we have to modify the Task.info to apply the state transformation after the task completes.
*
* @return the [[InputTask]]
*/
private[sbt] def continuousTask: Def.Initialize[InputTask[StateTransform]] =
Def.inputTask {
val (initialCount, command) = continuousParser.parsed
new StateTransform(
runToTermination(Keys.state.value, command, initialCount, isCommand = false)
runToTermination(state.value, command, initialCount, isCommand = false)
)
}
private[this] val DupedSystemIn =
AttributeKey[DupedInputStream](
"duped-system-in",
"Receives a copy of all of the bytes from System.in.",
10000
)
val dynamicInputs = taskKey[FileTree.DynamicInputs](
private[sbt] val dynamicInputs = taskKey[Option[mutable.Set[DynamicInput]]](
"The input globs found during task evaluation that are used in watch."
)
def dynamicInputsImpl: Def.Initialize[Task[FileTree.DynamicInputs]] = Def.task {
Keys.state.value.get(DynamicInputs).getOrElse(FileTree.DynamicInputs.none)
}
private[sbt] def dynamicInputsImpl: Def.Initialize[Task[Option[mutable.Set[DynamicInput]]]] =
Def.task(Keys.state.value.get(DynamicInputs))
private[sbt] val DynamicInputs =
AttributeKey[FileTree.DynamicInputs](
AttributeKey[mutable.Set[DynamicInput]](
"dynamic-inputs",
"Stores the inputs (dynamic and regular) for a task",
10000
@ -179,24 +165,27 @@ object Continuous extends DeprecatedContinuous {
)(implicit extracted: Extracted, logger: Logger): Config = {
// Extract all of the globs that we will monitor during the continuous build.
val (inputs, triggers) = {
val configs = scopedKey.get(Keys.internalDependencyConfigurations).getOrElse(Nil)
val args = new InputGraph.Arguments(scopedKey, extracted, compiledMap, logger, configs, state)
InputGraph.transitiveGlobs(args)
} match {
case (i: Seq[Glob], t: Seq[Glob]) => (i.distinct.sorted, t.distinct.sorted)
val inputs = {
val configs = scopedKey.get(internalDependencyConfigurations).getOrElse(Nil)
val args =
new SettingsGraph.Arguments(scopedKey, extracted, compiledMap, logger, configs, state)
SettingsGraph.transitiveDynamicInputs(args)
}
val repository = getRepository(state)
val registeringSet = state.get(DynamicInputs).get
registeringSet.value.foreach(_ ++= inputs)
(inputs ++ triggers).foreach(repository.register(_: Glob))
val dynamicInputs = state
.get(DynamicInputs)
.getOrElse {
val msg = "Uninitialized dynamic inputs in continuous build (should be unreachable!)"
throw new IllegalStateException(msg)
}
dynamicInputs ++= inputs
logger.debug(s"[watch] [${scopedKey.show}] Found inputs: ${inputs.map(_.glob).mkString(",")}")
inputs.foreach(i => repository.register(i.glob))
val watchSettings = new WatchSettings(scopedKey)
new Config(
scopedKey,
repository,
() => registeringSet.value.fold(Nil: Seq[Glob])(_.toSeq).sorted,
triggers,
() => dynamicInputs.toSeq.sorted,
watchSettings
)
}
@ -204,7 +193,7 @@ object Continuous extends DeprecatedContinuous {
lazy val exception =
new IllegalStateException("Tried to access FileTreeRepository for uninitialized state")
state
.get(Keys.globalFileTreeRepository)
.get(globalFileTreeRepository)
.getOrElse(throw exception)
}
@ -232,7 +221,7 @@ object Continuous extends DeprecatedContinuous {
* if they are not visible in the input graph due to the use of Def.taskDyn.
*/
def makeTask(cmd: String): (String, State, () => Boolean) = {
val newState = s.put(DynamicInputs, FileTree.DynamicInputs.empty)
val newState = s.put(DynamicInputs, mutable.Set.empty[DynamicInput])
val task = Parser
.parse(cmd, Command.combine(newState.definedCommands)(newState))
.getOrElse(
@ -276,32 +265,34 @@ object Continuous extends DeprecatedContinuous {
f(commands, s, valid, invalid)
}
private[this] def withCharBufferedStdIn[R](f: InputStream => R): R =
if (!Util.isWindows) {
val terminal = JLine.terminal
terminal.init()
terminal.setEchoEnabled(true)
f(terminal.wrapInIfNeeded(System.in))
} else f(System.in)
private[sbt] def runToTermination(
state: State,
command: String,
count: Int,
isCommand: Boolean
): State = Watch.withCharBufferedStdIn { in =>
val duped = new DupedInputStream(in)
): State = withCharBufferedStdIn { in =>
implicit val extracted: Extracted = Project.extract(state)
val (stateWithRepo, repo) = state.get(Keys.globalFileTreeRepository) match {
case Some(r) => (state, r)
case _ =>
val repo = if ("polling" == System.getProperty("sbt.watch.mode")) {
val service =
new PollingWatchService(extracted.getOpt(Keys.pollInterval).getOrElse(500.millis))
FileTreeRepository.legacy(FileAttributes.default _, (_: Any) => {}, service)
} else {
state
.get(BuiltinCommands.rawGlobalFileTreeRepository)
.map(new CopiedFileTreeRepository(_))
.getOrElse(FileTreeRepository.default(FileAttributes.default))
}
(state.put(Keys.globalFileTreeRepository, repo), repo)
val repo = if ("polling" == System.getProperty("sbt.watch.mode")) {
val service = new PollingWatchService(extracted.getOpt(pollInterval).getOrElse(500.millis))
FileTreeRepository
.legacy((_: Any) => {}, service)
} else {
FileTreeRepository.default
}
try {
setup(stateWithRepo.put(DupedSystemIn, duped), command) { (commands, s, valid, invalid) =>
EvaluateTask.withStreams(extracted.structure, s)(_.use(Keys.streams in Global) { streams =>
val stateWithRepo = state
.put(globalFileTreeRepository, repo)
.put(persistentFileAttributeMap, new sbt.nio.Keys.FileAttributeMap)
setup(stateWithRepo, command) { (commands, s, valid, invalid) =>
EvaluateTask.withStreams(extracted.structure, s)(_.use(streams in Global) { streams =>
implicit val logger: Logger = streams.log
if (invalid.isEmpty) {
val currentCount = new AtomicInteger(count)
@ -322,7 +313,6 @@ object Continuous extends DeprecatedContinuous {
val terminationAction = Watch(task, callbacks.onStart, callbacks.nextEvent)
callbacks.onTermination(terminationAction, command, currentCount.get(), state)
} finally {
configs.foreach(_.repository.close())
callbacks.onExit()
}
} else {
@ -354,7 +344,7 @@ object Continuous extends DeprecatedContinuous {
inputs: Seq[(String, State)]
)(implicit extracted: Extracted, logger: Logger): Seq[Config] = {
val commandKeys = inputs.map { case (c, s) => s -> parseCommand(c, s) }
val compiledMap = InputGraph.compile(extracted.structure)
val compiledMap = SettingsGraph.compile(extracted.structure)
commandKeys.flatMap {
case (s, scopedKeys) => scopedKeys.map(getConfig(s, _, compiledMap))
}
@ -372,14 +362,14 @@ object Continuous extends DeprecatedContinuous {
* Aggregates a collection of [[Config]] instances into a single instance of [[Callbacks]].
* This allows us to monitor and respond to changes for all of
* the inputs and triggers for each of the tasks that we are monitoring in the continuous build.
* To monitor all of the inputs and triggers, it creates a [[FileEventMonitor]] for each task
* and then aggregates each of the individual [[FileEventMonitor]] instances into an aggregated
* To monitor all of the inputs and triggers, it creates a monitor for each task
* and then aggregates each of the individual monitor instances into an aggregated
* instance. It aggregates all of the event callbacks into a single callback that delegates
* to each of the individual callbacks. For the callbacks that return a [[Watch.Action]],
* the aggregated callback will select the minimum [[Watch.Action]] returned where the ordering
* is such that the highest priority [[Watch.Action]] have the lowest values. Finally, to
* handle user input, we read from the provided input stream and buffer the result. Each
* task's input parser is then applied to the buffered result and, again, we return the mimimum
* task's input parser is then applied to the buffered result and, again, we return the minimum
* [[Watch.Action]] returned by the parsers (when the parsers fail, they just return
* [[Watch.Ignore]], which is the lowest priority [[Watch.Action]].
*
@ -405,7 +395,7 @@ object Continuous extends DeprecatedContinuous {
val onEnter = () => configs.foreach(_.watchSettings.onEnter())
val onStart: () => Watch.Action = getOnStart(project, commands, configs, rawLogger, count)
val nextInputEvent: () => Watch.Action = parseInputEvents(configs, state, inputStream, logger)
val (nextFileEvent, cleanupFileMonitor): (() => Option[(Event, Watch.Action)], () => Unit) =
val (nextFileEvent, cleanupFileMonitor): (() => Option[(Watch.Event, Watch.Action)], () => Unit) =
getFileEvents(configs, rawLogger, state, count, commands)
val nextEvent: () => Watch.Action =
combineInputAndFileEvents(nextInputEvent, nextFileEvent, logger)
@ -440,9 +430,9 @@ object Continuous extends DeprecatedContinuous {
logger: Logger,
count: AtomicInteger
): () => Watch.Action = {
val f = configs.map { params =>
val ws = params.watchSettings
ws.onStart.map(_.apply(params.arguments(logger))).getOrElse { () =>
val f: () => Seq[Watch.Action] = () => {
configs.map { params =>
val ws = params.watchSettings
ws.onIteration.map(_(count.get)).getOrElse {
if (configs.size == 1) { // Only allow custom start messages for single tasks
ws.startMessage match {
@ -457,7 +447,7 @@ object Continuous extends DeprecatedContinuous {
}
}
() => {
val res = f.view.map(_()).min
val res = f().min
// Print the default watch message if there are multiple tasks
if (configs.size > 1)
Watch.defaultStartWatch(count.get(), project, commands).foreach(logger.info(_))
@ -470,88 +460,98 @@ object Continuous extends DeprecatedContinuous {
state: State,
count: AtomicInteger,
commands: Seq[String]
)(implicit extracted: Extracted): (() => Option[(Event, Watch.Action)], () => Unit) = {
)(implicit extracted: Extracted): (() => Option[(Watch.Event, Watch.Action)], () => Unit) = {
val attributeMap = state.get(persistentFileAttributeMap).get
val trackMetaBuild = configs.forall(_.watchSettings.trackMetaBuild)
val buildGlobs =
if (trackMetaBuild) extracted.getOpt(Keys.fileInputs in Keys.settingsData).getOrElse(Nil)
if (trackMetaBuild) extracted.getOpt(fileInputs in settingsData).getOrElse(Nil)
else Nil
val buildFilter = buildGlobs.toEntryFilter
val defaultTrigger = if (Util.isWindows) Watch.ifChanged(Watch.Trigger) else Watch.trigger
val onEvent: Event => (Event, Watch.Action) = {
val f = configs.map { params =>
val ws = params.watchSettings
val oe = ws.onEvent
.map(_.apply(params.arguments(logger)))
.getOrElse {
val onInputEvent = ws.onInputEvent.getOrElse(defaultTrigger)
val onTriggerEvent = ws.onTriggerEvent.getOrElse(defaultTrigger)
val onMetaBuildEvent = ws.onMetaBuildEvent.getOrElse(Watch.ifChanged(Watch.Reload))
val triggerFilter = params.triggers.toEntryFilter
val excludedBuildFilter = buildFilter
event: Event =>
val inputFilter = params.inputs().toEntryFilter
val c = count.get()
val entry = event.entry
Seq[Watch.Action](
if (inputFilter(entry)) onInputEvent(c, event) else Watch.Ignore,
if (triggerFilter(entry)) onTriggerEvent(c, event) else Watch.Ignore,
if (excludedBuildFilter(entry)) onMetaBuildEvent(c, event) else Watch.Ignore
).min
val retentionPeriod = configs.map(_.watchSettings.antiEntropyRetentionPeriod).max
val quarantinePeriod = configs.map(_.watchSettings.deletionQuarantinePeriod).max
val onEvent: Event => Seq[(Watch.Event, Watch.Action)] = event => {
val path = event.path
def watchEvent(stamper: FileStamper, forceTrigger: Boolean): Option[Watch.Event] = {
val stamp = FileStamp(path, stamper)
if (!event.exists) {
attributeMap.remove(event.path) match {
case null => None
case _ => Some(Deletion(event))
}
} else {
import sbt.internal.inc.Stamp.equivStamp
attributeMap.put(path, stamp) match {
case null => Some(Creation(event))
case s =>
if (forceTrigger || !equivStamp.equiv(s.stamp, stamp.stamp))
Some(Update(event))
else None
}
event: Event => event -> oe(event)
}
event: Event => f.view.map(_.apply(event)).minBy(_._2)
}
val monitor: FileEventMonitor[FileAttributes] = new FileEventMonitor[FileAttributes] {
/**
* Create a filtered monitor that only accepts globs that have been registered for the
* task at hand.
* @param monitor the file event monitor to filter
* @param globs the globs to accept. This must be a function because we want to be able
* to accept globs that are added dynamically as part of task evaluation.
* @return the filtered FileEventMonitor.
*/
private def filter(
monitor: FileEventMonitor[FileAttributes],
globs: () => Seq[Glob]
): FileEventMonitor[FileAttributes] = {
new FileEventMonitor[FileAttributes] {
override def poll(duration: Duration): Seq[FileEventMonitor.Event[FileAttributes]] =
monitor.poll(duration).filter(e => globs().toEntryFilter(e.entry))
override def close(): Unit = monitor.close()
}
}
if (buildGlobs.exists(_.matches(path))) {
watchEvent(FileStamper.Hash, forceTrigger = false).map(e => e -> Watch.Reload).toSeq
} else {
configs
.flatMap { config =>
config
.inputs()
.collectFirst {
case d if d.glob.matches(path) => (d.forceTrigger, true, d.fileStamper)
}
.flatMap {
case (forceTrigger, accepted, stamper) =>
if (accepted) {
watchEvent(stamper, forceTrigger).flatMap { e =>
val action = config.watchSettings.onFileInputEvent(count.get(), e)
if (action != Watch.Ignore) Some(e -> action) else None
}
} else None
}
} match {
case events if events.isEmpty => Nil
case events => events.minBy(_._2) :: Nil
}
}
}
val monitor: FileEventMonitor[Event] = new FileEventMonitor[Event] {
private implicit class WatchLogger(val l: Logger) extends sbt.internal.nio.WatchLogger {
override def debug(msg: Any): Unit = l.debug(msg.toString)
}
// TODO make this a normal monitor
private[this] val monitors: Seq[FileEventMonitor[FileAttributes]] =
private[this] val monitors: Seq[FileEventMonitor[Event]] =
configs.map { config =>
// Create a logger with a scoped key prefix so that we can tell from which
// monitor events occurred.
val l = logger.withPrefix(config.key.show)
val monitor: FileEventMonitor[FileAttributes] =
FileManagement.monitor(config.repository, config.watchSettings.antiEntropy, l)
val allGlobs: () => Seq[Glob] = () => (config.inputs() ++ config.triggers).distinct.sorted
filter(monitor, allGlobs)
FileEventMonitor.antiEntropy(
getRepository(state),
config.watchSettings.antiEntropy,
logger.withPrefix(config.key.show),
config.watchSettings.deletionQuarantinePeriod,
config.watchSettings.antiEntropyRetentionPeriod
)
} ++ (if (trackMetaBuild) {
val l = logger.withPrefix("meta-build")
val antiEntropy = configs.map(_.watchSettings.antiEntropy).max
val repo = getRepository(state)
buildGlobs.foreach(repo.register)
val monitor = FileManagement.monitor(repo, antiEntropy, l)
filter(monitor, () => buildGlobs) :: Nil
FileEventMonitor.antiEntropy(
repo,
antiEntropy,
logger.withPrefix("meta-build"),
quarantinePeriod,
retentionPeriod
) :: Nil
} else Nil)
override def poll(duration: Duration): Seq[FileEventMonitor.Event[FileAttributes]] = {
val res = monitors.flatMap(_.poll(0.millis)).toSet.toVector
override def poll(duration: Duration, filter: Event => Boolean): Seq[Event] = {
val res = monitors.flatMap(_.poll(0.millis, filter)).toSet.toVector
if (res.isEmpty) Thread.sleep(duration.toMillis)
res
}
override def close(): Unit = monitors.foreach(_.close())
}
val watchLogger: WatchLogger = msg => logger.debug(msg.toString)
val retentionPeriod = configs.map(_.watchSettings.antiEntropyRetentionPeriod).max
val antiEntropy = configs.map(_.watchSettings.antiEntropy).max
val quarantinePeriod = configs.map(_.watchSettings.deletionQuarantinePeriod).max
val antiEntropyMonitor = FileEventMonitor.antiEntropy(
monitor,
antiEntropy,
@ -564,29 +564,26 @@ object Continuous extends DeprecatedContinuous {
* motivation is to allow the user to specify this callback via setting so that, for example,
* they can clear the screen when the build triggers.
*/
val onTrigger: Event => Unit = { event: Event =>
configs.foreach { params =>
params.watchSettings.onTrigger.foreach(ot => ot(params.arguments(logger))(event))
}
val onTrigger: Watch.Event => Unit = { event: Watch.Event =>
if (configs.size == 1) {
val config = configs.head
config.watchSettings.triggerMessage match {
case Left(tm) => logger.info(tm(config.watchState(count.get())))
case Right(tm) => tm(count.get(), event, commands).foreach(logger.info(_))
case Right(tm) => tm(count.get(), event.path, commands).foreach(logger.info(_))
}
} else {
Watch.defaultOnTriggerMessage(count.get(), event, commands).foreach(logger.info(_))
Watch.defaultOnTriggerMessage(count.get(), event.path, commands).foreach(logger.info(_))
}
}
(() => {
val actions = antiEntropyMonitor.poll(2.milliseconds).map(onEvent)
val actions = antiEntropyMonitor.poll(2.milliseconds).flatMap(onEvent)
if (actions.exists(_._2 != Watch.Ignore)) {
val builder = new StringBuilder
val min = actions.minBy {
case (e, a) =>
if (builder.nonEmpty) builder.append(", ")
val path = e.entry.typedPath.toPath.toString
val path = e.path
builder.append(path)
builder.append(" -> ")
builder.append(a.toString)
@ -672,10 +669,10 @@ object Continuous extends DeprecatedContinuous {
private def combineInputAndFileEvents(
nextInputAction: () => Watch.Action,
nextFileEvent: () => Option[(Event, Watch.Action)],
nextFileEvent: () => Option[(Watch.Event, Watch.Action)],
logger: Logger
): () => Watch.Action = () => {
val (inputAction: Watch.Action, fileEvent: Option[(Event, Watch.Action)] @unchecked) =
val (inputAction: Watch.Action, fileEvent: Option[(Watch.Event, Watch.Action)] @unchecked) =
Seq(nextInputAction, nextFileEvent).map(_.apply()).toIndexedSeq match {
case Seq(ia: Watch.Action, fe @ Some(_)) => (ia, fe)
case Seq(ia: Watch.Action, None) => (ia, None)
@ -688,7 +685,7 @@ object Continuous extends DeprecatedContinuous {
fileEvent
.collect {
case (event, action) if action != Watch.Ignore =>
s"Received file event $action for ${event.entry.typedPath.toPath}." +
s"Received file event $action for $event." +
(if (action != min) s" Dropping in favor of input event: $min" else "")
}
.foreach(logger.debug(_))
@ -715,6 +712,7 @@ object Continuous extends DeprecatedContinuous {
/**
* Generates a custom logger for the watch process that is able to log at a different level
* from the provided logger.
*
* @param logger the delegate logger.
* @param logLevel the log level for watch events
* @return the wrapped logger.
@ -738,7 +736,7 @@ object Continuous extends DeprecatedContinuous {
}
}
private type WatchOnEvent = (Int, Event) => Watch.Action
private type WatchOnEvent = (Int, Watch.Event) => Watch.Action
/**
* Contains all of the user defined settings that will be used to build a [[Callbacks]]
@ -769,30 +767,26 @@ object Continuous extends DeprecatedContinuous {
implicit extracted: Extracted
) {
val antiEntropy: FiniteDuration =
key.get(Keys.watchAntiEntropy).getOrElse(Watch.defaultAntiEntropy)
key.get(watchAntiEntropy).getOrElse(Watch.defaultAntiEntropy)
val antiEntropyRetentionPeriod: FiniteDuration =
key
.get(Keys.watchAntiEntropyRetentionPeriod)
.get(watchAntiEntropyRetentionPeriod)
.getOrElse(Watch.defaultAntiEntropyRetentionPeriod)
val deletionQuarantinePeriod: FiniteDuration =
key.get(Keys.watchDeletionQuarantinePeriod).getOrElse(Watch.defaultDeletionQuarantinePeriod)
val inputHandler: Option[InputStream => Watch.Action] = key.get(Keys.watchInputHandler)
key.get(watchDeletionQuarantinePeriod).getOrElse(Watch.defaultDeletionQuarantinePeriod)
val inputHandler: Option[InputStream => Watch.Action] = key.get(watchInputHandler)
val inputParser: Parser[Watch.Action] =
key.get(Keys.watchInputParser).getOrElse(Watch.defaultInputParser)
val logLevel: Level.Value = key.get(Keys.watchLogLevel).getOrElse(Level.Info)
val onEnter: () => Unit = key.get(Keys.watchOnEnter).getOrElse(() => {})
val onEvent: Option[Arguments => Event => Watch.Action] = key.get(Keys.watchOnEvent)
val onExit: () => Unit = key.get(Keys.watchOnExit).getOrElse(() => {})
val onInputEvent: Option[WatchOnEvent] = key.get(Keys.watchOnInputEvent)
val onIteration: Option[Int => Watch.Action] = key.get(Keys.watchOnIteration)
val onMetaBuildEvent: Option[WatchOnEvent] = key.get(Keys.watchOnMetaBuildEvent)
val onStart: Option[Arguments => () => Watch.Action] = key.get(Keys.watchOnStart)
key.get(watchInputParser).getOrElse(Watch.defaultInputParser)
val logLevel: Level.Value = key.get(watchLogLevel).getOrElse(Level.Info)
val onEnter: () => Unit = key.get(watchOnEnter).getOrElse(() => {})
val onExit: () => Unit = key.get(watchOnExit).getOrElse(() => {})
val onFileInputEvent: WatchOnEvent =
key.get(watchOnFileInputEvent).getOrElse(Watch.trigger)
val onIteration: Option[Int => Watch.Action] = key.get(watchOnIteration)
val onTermination: Option[(Watch.Action, String, Int, State) => State] =
key.get(Keys.watchOnTermination)
val onTrigger: Option[Arguments => Event => Unit] = key.get(Keys.watchOnTrigger)
val onTriggerEvent: Option[WatchOnEvent] = key.get(Keys.watchOnTriggerEvent)
key.get(watchOnTermination)
val startMessage: StartMessage = getStartMessage(key)
val trackMetaBuild: Boolean = key.get(Keys.watchTrackMetaBuild).getOrElse(true)
val trackMetaBuild: Boolean = key.get(watchTrackMetaBuild).getOrElse(true)
val triggerMessage: TriggerMessage = getTriggerMessage(key)
// Unlike the rest of the settings, InputStream is a TaskKey which means that if it is set,
@ -800,36 +794,35 @@ object Continuous extends DeprecatedContinuous {
// logical that users may want to use a different InputStream on each task invocation. The
// alternative would be SettingKey[() => InputStream], but that doesn't feel right because
// one might want the InputStream to depend on other tasks.
val inputStream: Option[TaskKey[InputStream]] = key.get(Keys.watchInputStream)
val inputStream: Option[TaskKey[InputStream]] = key.get(watchInputStream)
}
/**
* Container class for all of the components we need to setup a watch for a particular task or
* input task.
* @param key the [[ScopedKey]] instance for the task we will watch
* @param repository the task [[FileTreeRepository]] instance
* @param inputs the transitive task inputs (see [[InputGraph]])
* @param triggers the transitive triggers (see [[InputGraph]])
*
* @param key the [[ScopedKey]] instance for the task we will watch
* @param inputs the transitive task inputs (see [[SettingsGraph]])
* @param watchSettings the [[WatchSettings]] instance for the task
*/
private final class Config private[internal] (
val key: ScopedKey[_],
val repository: FileTreeRepository[FileAttributes],
val inputs: () => Seq[Glob],
val triggers: Seq[Glob],
val inputs: () => Seq[DynamicInput],
val watchSettings: WatchSettings
) {
private[sbt] def watchState(count: Int): DeprecatedWatchState =
WatchState.empty(inputs() ++ triggers).withCount(count)
def arguments(logger: Logger): Arguments = new Arguments(logger, inputs(), triggers)
WatchState.empty(inputs().map(_.glob)).withCount(count)
def arguments(logger: Logger): Arguments = new Arguments(logger, inputs())
}
private def getStartMessage(key: ScopedKey[_])(implicit e: Extracted): StartMessage = Some {
lazy val default = key.get(Keys.watchStartMessage).getOrElse(Watch.defaultStartWatch)
lazy val default = key.get(watchStartMessage).getOrElse(Watch.defaultStartWatch)
key.get(deprecatedWatchingMessage).map(Left(_)).getOrElse(Right(default))
}
private def getTriggerMessage(key: ScopedKey[_])(implicit e: Extracted): TriggerMessage = {
private def getTriggerMessage(
key: ScopedKey[_]
)(implicit e: Extracted): TriggerMessage = {
lazy val default =
key.get(Keys.watchTriggeredMessage).getOrElse(Watch.defaultOnTriggerMessage)
key.get(watchTriggeredMessage).getOrElse(Watch.defaultOnTriggerMessage)
key.get(deprecatedWatchingMessage).map(Left(_)).getOrElse(Right(default))
}
@ -841,6 +834,7 @@ object Continuous extends DeprecatedContinuous {
* foo/Compile/compile will pretty print as "foo / Compile / compile", not
* "ProjectRef($URI, foo) / compile / compile", where the ProjectRef part is just noise that
* is rarely relevant for debugging.
*
* @return the pretty printed output.
*/
def show: String = {
@ -915,6 +909,7 @@ object Continuous extends DeprecatedContinuous {
* foo/Compile/compile will pretty print as "foo / Compile / compile", not
* "ProjectRef($URI, foo) / compile / compile", where the ProjectRef part is just noise that
* is rarely relevant for debugging.
*
* @return the pretty printed output.
*/
def show: String = s"${scopedKey.scope.show} / ${scopedKey.key}"
@ -925,6 +920,7 @@ object Continuous extends DeprecatedContinuous {
/**
* Creates a logger that adds a prefix to the messages that it logs. The motivation is so that
* we can tell from which FileEventMonitor an event originated.
*
* @param prefix the string to prefix the message with
* @return the wrapped Logger.
*/

View File

@ -7,13 +7,14 @@
package sbt.internal
import java.nio.file.Path
import sbt.internal.io.{ WatchState => WS }
private[internal] trait DeprecatedContinuous {
protected type Event = sbt.io.FileEventMonitor.Event[FileAttributes]
protected type StartMessage =
Option[Either[WS => String, (Int, String, Seq[String]) => Option[String]]]
protected type TriggerMessage = Either[WS => String, (Int, Event, Seq[String]) => Option[String]]
protected type TriggerMessage = Either[WS => String, (Int, Path, Seq[String]) => Option[String]]
protected type DeprecatedWatchState = WS
protected val deprecatedWatchingMessage = sbt.Keys.watchingMessage
protected val deprecatedTriggeredMessage = sbt.Keys.triggeredMessage

View File

@ -1,73 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal
import java.io.{ InputStream, PipedInputStream, PipedOutputStream }
import java.util.concurrent.LinkedBlockingQueue
import scala.annotation.tailrec
import scala.collection.JavaConverters._
/**
* Creates a copy of the provided [[InputStream]] that forwards its contents to an arbitrary
* number of connected [[InputStream]] instances via pipe.
* @param in the [[InputStream]] to wrap.
*/
private[internal] class DupedInputStream(val in: InputStream)
extends InputStream
with AutoCloseable {
/**
* Returns a copied [[InputStream]] that will receive the same bytes as System.in.
* @return
*/
def duped: InputStream = {
val pipedOutputStream = new PipedOutputStream()
pipes += pipedOutputStream
val res = new PollingInputStream(new PipedInputStream(pipedOutputStream))
buffer.forEach(pipedOutputStream.write(_))
res
}
private[this] val pipes = new java.util.Vector[PipedOutputStream].asScala
private[this] val buffer = new LinkedBlockingQueue[Int]
private class PollingInputStream(val pipedInputStream: PipedInputStream) extends InputStream {
override def available(): Int = {
fillBuffer()
pipedInputStream.available()
}
override def read(): Int = {
fillBuffer()
pipedInputStream.read
}
}
override def available(): Int = {
fillBuffer()
buffer.size
}
override def read(): Int = {
fillBuffer()
buffer.take()
}
private[this] def fillBuffer(): Unit = synchronized {
@tailrec
def impl(): Unit = in.available match {
case i if i > 0 =>
val res = in.read()
buffer.add(res)
pipes.foreach { p =>
p.write(res)
p.flush()
}
impl()
case _ =>
}
impl()
}
}

View File

@ -0,0 +1,45 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import java.nio.file.{ WatchService => _ }
import sbt.nio.FileStamper
import sbt.nio.file.Glob
private[sbt] final case class DynamicInput(
glob: Glob,
fileStamper: FileStamper,
forceTrigger: Boolean
)
private[sbt] object DynamicInput {
implicit object ordering extends Ordering[DynamicInput] {
private implicit val globOrdering: Ordering[Glob] = Glob.ordering
private implicit object fileStamperOrdering extends Ordering[FileStamper] {
override def compare(left: FileStamper, right: FileStamper): Int = left match {
case FileStamper.Hash =>
right match {
case FileStamper.Hash => 0
case _ => -1
}
case FileStamper.LastModified =>
right match {
case FileStamper.LastModified => 0
case _ => 1
}
}
}
override def compare(left: DynamicInput, right: DynamicInput): Int = {
globOrdering.compare(left.glob, right.glob) match {
case 0 => fileStamperOrdering.compare(left.fileStamper, right.fileStamper)
case i => i
}
}
}
}

View File

@ -10,34 +10,53 @@ package sbt.internal
import java.nio.file.Paths
import java.util.Optional
import sbt.Stamped
import sbt.internal.inc.ExternalLookup
import sbt.Def
import sbt.Keys._
import sbt.internal.inc.{ EmptyStamp, ExternalLookup, Stamper }
import sbt.io.syntax._
import sbt.io.{ AllPassFilter, TypedPath }
import sbt.nio.Keys._
import sbt.nio.file.RecursiveGlob
import sbt.nio.file.syntax._
import xsbti.compile._
import xsbti.compile.analysis.Stamp
import scala.collection.JavaConverters._
import scala.collection.mutable
private[sbt] object ExternalHooks {
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
def apply(options: CompileOptions, repo: FileTree.Repository): DefaultExternalHooks = {
import scala.collection.JavaConverters._
val sources = options.sources()
val cachedSources = new java.util.HashMap[File, Stamp]
val converter: File => Stamp = f => Stamped.sourceConverter(TypedPath(f.toPath))
sources.foreach {
case sf: Stamped => cachedSources.put(sf, sf.stamp)
case f: File => cachedSources.put(f, converter(f))
private[this] implicit class StampOps(val s: Stamp) extends AnyVal {
def hash: String = s.getHash.orElse("")
def lastModified: Long = s.getLastModified.orElse(-1L)
}
def default: Def.Initialize[sbt.Task[ExternalHooks]] = Def.task {
val attributeMap = fileAttributeMap.value
val cp = dependencyClasspath.value.map(_.data)
cp.foreach { file =>
val path = file.toPath
attributeMap.get(path) match {
case null => attributeMap.put(path, sbt.nio.FileStamp.lastModified(path))
case _ =>
}
}
val allBinaries = new java.util.HashMap[File, Stamp]
options.classpath.foreach {
case f if f.getName.endsWith(".jar") =>
repo.get(f.toGlob) foreach { case (p, a) => allBinaries.put(p.toFile, a.stamp) }
case f =>
repo.get(f ** AllPassFilter) foreach { case (p, a) => allBinaries.put(p.toFile, a.stamp) }
val classGlob = classDirectory.value.toGlob / RecursiveGlob / "*.class"
fileTreeView.value.list(classGlob).foreach {
case (path, _) => attributeMap.put(path, sbt.nio.FileStamp.lastModified(path))
}
apply(
(compileOptions in compile).value,
(file: File) => {
attributeMap.get(file.toPath) match {
case null => EmptyStamp
case s => s.stamp
}
}
)
}
private def apply(
options: CompileOptions,
attributeMap: File => Stamp
): DefaultExternalHooks = {
val lookup = new ExternalLookup {
override def changedSources(previousAnalysis: CompileAnalysis): Option[Changes[File]] = Some {
new Changes[File] {
@ -51,19 +70,19 @@ private[sbt] object ExternalHooks {
previousAnalysis.readStamps().getAllSourceStamps.asScala
prevSources.foreach {
case (file: File, s: Stamp) =>
cachedSources.get(file) match {
attributeMap(file) match {
case null =>
getRemoved.add(file)
case stamp =>
if ((stamp.getHash.orElse("") == s.getHash.orElse("")) && (stamp.getLastModified
.orElse(-1L) == s.getLastModified.orElse(-1L))) {
val hash = (if (stamp.getHash.isPresent) stamp else Stamper.forHash(file)).hash
if (hash == s.hash) {
getUnmodified.add(file)
} else {
getChanged.add(file)
}
}
}
sources.foreach(file => if (!prevSources.contains(file)) getAdded.add(file))
options.sources.foreach(file => if (!prevSources.contains(file)) getAdded.add(file))
}
}
@ -79,26 +98,23 @@ private[sbt] object ExternalHooks {
override def changedBinaries(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
Some(previousAnalysis.readStamps.getAllBinaryStamps.asScala.flatMap {
case (file, stamp) =>
allBinaries.get(file) match {
case null =>
attributeMap(file) match {
case cachedStamp if stamp.getLastModified == cachedStamp.getLastModified => None
case _ =>
javaHome match {
case Some(h) if file.toPath.startsWith(h) => None
case _ => Some(file)
}
case cachedStamp if stamp == cachedStamp => None
case _ => Some(file)
}
}.toSet)
}
override def removedProducts(previousAnalysis: CompileAnalysis): Option[Set[File]] = {
Some(previousAnalysis.readStamps.getAllProductStamps.asScala.flatMap {
case (file, s) =>
allBinaries get file match {
case null => Some(file)
case stamp if stamp.getLastModified.orElse(0L) != s.getLastModified.orElse(0L) =>
Some(file)
case _ => None
case (file, stamp) =>
attributeMap(file) match {
case s if s.getLastModified == stamp.getLastModified => None
case _ => Some(file)
}
}.toSet)
}

View File

@ -9,61 +9,22 @@ package sbt
package internal
import java.io.IOException
import java.nio.file.Path
import sbt.internal.io.HybridPollingFileTreeRepository
import sbt.io.FileTreeDataView.{ Entry, Observable, Observer, Observers }
import sbt.io.{ FileTreeRepository, _ }
import sbt.util.Logger
import scala.concurrent.duration._
import sbt.internal.nio.{ FileEvent, FileTreeRepository, Observable, Observer }
import sbt.nio.file.Glob
private[sbt] object FileManagement {
private[sbt] def monitor(
repository: FileTreeRepository[FileAttributes],
antiEntropy: FiniteDuration,
logger: Logger
): FileEventMonitor[FileAttributes] = {
// Forwards callbacks to the repository. The close method removes all of these
// callbacks.
val copied: Observable[FileAttributes] = new Observable[FileAttributes] {
private[this] val observers = new Observers[FileAttributes]
val underlying = repository match {
case h: HybridPollingFileTreeRepository[FileAttributes] =>
h.toPollingRepository(antiEntropy, (msg: Any) => logger.debug(msg.toString))
case r => r
}
private[this] val handle = underlying.addObserver(observers)
override def addObserver(observer: Observer[FileAttributes]): Int =
observers.addObserver(observer)
override def removeObserver(handle: Int): Unit = observers.removeObserver(handle)
override def close(): Unit = {
underlying.removeObserver(handle)
underlying.close()
}
}
new FileEventMonitor[FileAttributes] {
val monitor =
FileEventMonitor.antiEntropy(
copied,
antiEntropy,
new WatchLogger { override def debug(msg: => Any): Unit = logger.debug(msg.toString) },
50.millis,
10.minutes
)
override def poll(duration: Duration): Seq[FileEventMonitor.Event[FileAttributes]] =
monitor.poll(duration)
override def close(): Unit = monitor.close()
}
}
private[sbt] class CopiedFileTreeRepository[T](underlying: FileTreeRepository[T])
private[sbt] def copy[T](fileTreeRepository: FileTreeRepository[T]): FileTreeRepository[T] =
new CopiedFileTreeRepository[T](fileTreeRepository)
private[this] class CopiedFileTreeRepository[T](underlying: FileTreeRepository[T])
extends FileTreeRepository[T] {
def addObserver(observer: Observer[T]) = underlying.addObserver(observer)
def close(): Unit = {} // Don't close the underlying observable
def list(glob: Glob): Seq[TypedPath] = underlying.list(glob)
def listEntries(glob: Glob): Seq[Entry[T]] = underlying.listEntries(glob)
def removeObserver(handle: Int): Unit = underlying.removeObserver(handle)
def register(glob: Glob): Either[IOException, Boolean] = underlying.register(glob)
def unregister(glob: Glob): Unit = underlying.unregister(glob)
override def list(path: Path): Seq[(Path, T)] = underlying.list(path)
override def close(): Unit = {}
override def register(glob: Glob): Either[IOException, Observable[FileEvent[T]]] =
underlying.register(glob)
override def addObserver(observer: Observer[FileEvent[T]]): AutoCloseable =
underlying.addObserver(observer)
override def toString: String = s"CopiedFileTreeRepository($underlying)"
}
}

View File

@ -1,104 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import java.nio.file.{ Path, WatchService => _ }
import sbt.internal.util.appmacro.MacroDefaults
import sbt.io.FileTreeDataView.Entry
import sbt.io._
import scala.collection.mutable
import scala.language.experimental.macros
object FileTree {
private sealed trait CacheOptions
private case object NoCache extends CacheOptions
private case object UseCache extends CacheOptions
private case object Validate extends CacheOptions
private def toPair(
filter: Entry[FileAttributes] => Boolean
)(e: Entry[FileAttributes]): Option[(Path, FileAttributes)] =
e.value.toOption.flatMap(a => if (filter(e)) Some(e.typedPath.toPath -> a) else None)
trait Repository extends sbt.internal.Repository[Seq, Glob, (Path, FileAttributes)]
private[sbt] trait DynamicInputs {
def value: Option[mutable.Set[Glob]]
}
private[sbt] object DynamicInputs {
def empty: DynamicInputs = new impl(Some(mutable.Set.empty[Glob]))
final val none: DynamicInputs = new impl(None)
private final class impl(override val value: Option[mutable.Set[Glob]]) extends DynamicInputs
implicit def default: DynamicInputs = macro MacroDefaults.dynamicInputs
}
private[sbt] object Repository {
/**
* Provide a default [[Repository]] that works within a task definition, e.g. Def.task. It's
* implemented as a macro so that it can call `.value` on a TaskKey. Using a macro also allows
* us to use classes that aren't actually available in this project, e.g. sbt.Keys.
* @return a [[Repository]] instance
*/
implicit def default: FileTree.Repository = macro MacroDefaults.fileTreeRepository
private[sbt] object polling extends Repository {
val view = FileTreeView.DEFAULT.asDataView(FileAttributes.default)
override def get(key: Glob): Seq[(Path, FileAttributes)] =
view.listEntries(key).flatMap(toPair(key.toEntryFilter))
override def close(): Unit = {}
}
}
private class CachingRepository(underlying: FileTreeRepository[FileAttributes])
extends Repository {
lazy val cacheOptions = System.getProperty("sbt.io.filecache") match {
case "true" => UseCache
case "validate" => Validate
case _ => NoCache
}
override def get(key: Glob): Seq[(Path, FileAttributes)] = {
underlying.register(key)
cacheOptions match {
case Validate =>
val res = Repository.polling.get(key)
val filter = key.toEntryFilter
val cacheRes = underlying
.listEntries(key)
.flatMap(e => if (filter(e)) Some(e.typedPath.toPath) else None)
.toSet
val resSet = res.map(_._1).toSet
if (cacheRes != resSet) {
val msg = "Warning: got different files when using the internal file cache compared " +
s"to polling the file system for key: $key.\n"
val fileDiff = cacheRes diff resSet match {
case d if d.nonEmpty =>
new Exception("hmm").printStackTrace()
s"Cache had files not found in the file system:\n${d.mkString("\n")}.\n"
case _ => ""
}
val cacheDiff = resSet diff cacheRes match {
case d if d.nonEmpty =>
(if (fileDiff.isEmpty) "" else " ") +
s"File system had files not in the cache:\n${d.mkString("\n")}.\n"
case _ => ""
}
val diff = fileDiff + cacheDiff
val instructions = "Please open an issue at https://github.com/sbt/sbt. To disable " +
"this warning, run sbt with -Dsbt.io.filecache=false"
System.err.println(msg + diff + instructions)
}
res
case UseCache =>
underlying.listEntries(key).flatMap(toPair(key.toEntryFilter))
case NoCache =>
Repository.polling.get(key)
}
}
override def close(): Unit = underlying.close()
}
private[sbt] def repository(underlying: FileTreeRepository[FileAttributes]): Repository =
new CachingRepository(underlying)
}

View File

@ -1,123 +0,0 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import java.io.File
import java.nio.file.Path
import java.util.concurrent.ConcurrentSkipListMap
import sbt.io.{ FileFilter, Glob, SimpleFileFilter }
import scala.collection.JavaConverters._
import scala.collection.mutable
/**
* Retrieve files from a repository. This should usually be an extension class for
* sbt.io.internal.Glob (or a Traversable collection of source instances) that allows us to
* actually retrieve the files corresponding to those sources.
*/
private[sbt] sealed trait GlobLister extends Any {
final def all(repository: FileTree.Repository): Seq[(Path, FileAttributes)] =
all(repository, FileTree.DynamicInputs.empty)
/**
* Get the sources described this `GlobLister`. The results should not return any duplicate
* entries for each path in the result set.
*
* @param repository the file tree repository for retrieving the files for a given glob.
* @param dynamicInputs the task dynamic inputs to track for watch.
* @return the files described by this `GlobLister`.
*/
def all(
implicit repository: FileTree.Repository,
dynamicInputs: FileTree.DynamicInputs
): Seq[(Path, FileAttributes)]
}
/**
* Provides implicit definitions to provide a `GlobLister` given a Glob or
* Traversable[Glob].
*/
private[sbt] object GlobLister extends GlobListers
/**
* Provides implicit definitions to provide a `GlobLister` given a Glob or
* Traversable[Glob].
*/
private[sbt] trait GlobListers {
import GlobListers._
/**
* Generate a GlobLister given a particular [[Glob]]s.
*
* @param source the input Glob
*/
implicit def fromGlob(source: Glob): GlobLister = new impl(source :: Nil)
/**
* Generate a GlobLister given a collection of Globs.
*
* @param sources the collection of sources
* @tparam T the source collection type
*/
implicit def fromTraversableGlob[T <: Traversable[Glob]](sources: T): GlobLister =
new impl(sources)
}
private[internal] object GlobListers {
private def covers(left: Glob, right: Glob): Boolean = {
right.base.startsWith(left.base) && {
left.depth == Int.MaxValue || {
val depth = left.base.relativize(right.base).getNameCount - 1
depth <= left.depth - right.depth
}
}
}
private def aggregate(globs: Traversable[Glob]): Seq[(Glob, Traversable[Glob])] = {
val sorted = globs.toSeq.sorted
val map = new ConcurrentSkipListMap[Path, (Glob, mutable.Set[Glob])]
if (sorted.size > 1) {
sorted.foreach { glob =>
map.subMap(glob.base.getRoot, glob.base.resolve(Char.MaxValue.toString)).asScala.find {
case (_, (g, _)) => covers(g, glob)
} match {
case Some((_, (_, globs))) => globs += glob
case None =>
val globs = mutable.Set(glob)
val filter: FileFilter = new SimpleFileFilter((file: File) => {
globs.exists(_.toFileFilter.accept(file))
})
map.put(glob.base, (Glob(glob.base, filter, glob.depth), globs))
}
}
map.asScala.values.toIndexedSeq
} else sorted.map(g => g -> (g :: Nil))
}
/**
* Implements `GlobLister` given a collection of Globs. If the input collection type
* preserves uniqueness, e.g. `Set[Glob]`, then the output will be the unique source list.
* Otherwise duplicates are possible.
*
* @param globs the input globs
* @tparam T the collection type
*/
private class impl[T <: Traversable[Glob]](val globs: T) extends AnyVal with GlobLister {
override def all(
implicit repository: FileTree.Repository,
dynamicInputs: FileTree.DynamicInputs
): Seq[(Path, FileAttributes)] = {
aggregate(globs).flatMap {
case (glob, allGlobs) =>
dynamicInputs.value.foreach(_ ++= allGlobs)
repository.get(glob)
}.toIndexedSeq
}
}
}

View File

@ -8,47 +8,32 @@
package sbt
package internal
import BuildPaths._
import BuildStreams._
import collection.mutable
import compiler.Eval
import Def.{ isDummy, ScopedKey, ScopeLocal, Setting }
import java.io.File
import java.net.URI
import Keys.{
appConfiguration,
baseDirectory,
configuration,
exportedProducts,
fullClasspath,
fullResolvers,
isMetaBuild,
loadedBuild,
onLoadMessage,
pluginData,
resolvedScoped,
sbtPlugin,
scalacOptions,
streams,
thisProject,
thisProjectRef,
update
}
import Project.inScope
import sbt.BuildPaths._
import sbt.Def.{ ScopeLocal, ScopedKey, Setting, isDummy }
import sbt.Keys._
import sbt.Project.inScope
import sbt.Scope.GlobalScope
import sbt.compiler.Eval
import sbt.internal.BuildStreams._
import sbt.internal.inc.classpath.ClasspathUtilities
import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyDependencyResolution, IvyPaths }
import sbt.internal.inc.{ ZincLmUtil, ZincUtil, ScalaInstance }
import sbt.internal.inc.{ ScalaInstance, ZincLmUtil, ZincUtil }
import sbt.internal.util.Attributed.data
import sbt.internal.util.Types.const
import sbt.internal.util.{ Attributed, Settings, ~> }
import sbt.io.{ GlobFilter, IO, Path }
import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyDependencyResolution, IvyPaths }
import sbt.librarymanagement.{ Configuration, Configurations, Resolver }
import sbt.util.{ Show, Logger }
import scala.annotation.tailrec
import scala.tools.nsc.reporters.ConsoleReporter
import Scope.GlobalScope
import sbt.nio.Settings
import sbt.util.{ Logger, Show }
import xsbti.compile.{ ClasspathOptionsUtil, Compilers }
import scala.annotation.tailrec
import scala.collection.mutable
import scala.tools.nsc.reporters.ConsoleReporter
private[sbt] object Load {
// note that there is State passed in but not pulled out
def defaultLoad(
@ -122,7 +107,7 @@ private[sbt] object Load {
compilers,
evalPluginDef,
delegates,
EvaluateTask.injectStreams,
s => EvaluateTask.injectStreams(s) ++ Settings.inject(s),
pluginMgmt,
inject,
None,
@ -260,7 +245,9 @@ private[sbt] object Load {
val settings = timed("Load.apply: finalTransforms", log) {
finalTransforms(buildConfigurations(loaded, getRootProject(projects), config.injectSettings))
}
val delegates = timed("Load.apply: config.delegates", log) { config.delegates(loaded) }
val delegates = timed("Load.apply: config.delegates", log) {
config.delegates(loaded)
}
val data = timed("Load.apply: Def.make(settings)...", log) {
// When settings.size is 100000, Def.make takes around 10s.
if (settings.size > 10000) {
@ -415,8 +402,10 @@ private[sbt] object Load {
uri: URI,
rootProject: URI => String,
settings: Seq[Setting[_]]
): Seq[Setting[_]] =
Project.transform(Scope.resolveScope(thisScope, uri, rootProject), settings)
): Seq[Setting[_]] = {
val transformed = Project.transform(Scope.resolveScope(thisScope, uri, rootProject), settings)
Settings.inject(transformed)
}
def projectScope(project: Reference): Scope = Scope(Select(project), Zero, Zero, Zero)
@ -836,7 +825,6 @@ private[sbt] object Load {
* @param makeOrDiscoverRoot True if we should autogenerate a root project.
* @param buildUri The URI of the build this is loading
* @param context The plugin management context for autogenerated IDs.
*
* @return The completely resolved/updated sequence of projects defined, with all settings expanded.
*
* TODO - We want to attach the known (at this time) vals/lazy vals defined in each project's
@ -1030,7 +1018,6 @@ private[sbt] object Load {
*
* Ordering all Setting[_]s for the project
*
*
* @param p The project with manipulation.
* @param projectPlugins The deduced list of plugins for the given project.
* @param loadedPlugins The project definition (and classloader) of the build.
@ -1152,7 +1139,7 @@ private[sbt] object Load {
merge(fs.sortBy(_.getName).map(memoLoadSettingsFile))
// Finds all the build files associated with this project
import AddSettings.{ SbtFiles, DefaultSbtFiles, Sequence }
import AddSettings.{ DefaultSbtFiles, SbtFiles, Sequence }
def associatedFiles(auto: AddSettings): Seq[File] = auto match {
case sf: SbtFiles => sf.files.map(f => IO.resolve(projectBase, f)).filterNot(_.isHidden)
case sf: DefaultSbtFiles => defaultSbtFiles.filter(sf.include).filterNot(_.isHidden)

View File

@ -33,8 +33,9 @@ import scala.collection.JavaConverters._
* @tparam K the key type
* @tparam V the value type
*/
trait Repository[M[_], K, V] extends AutoCloseable {
private[sbt] trait Repository[M[_], K, V] extends AutoCloseable {
def get(key: K): M[V]
override def close(): Unit = {}
}
private[sbt] final class MutableRepository[K, V] extends Repository[Option, K, V] {

View File

@ -14,38 +14,26 @@ import sbt._
import sbt.internal.io.Source
import sbt.internal.util.AttributeMap
import sbt.internal.util.complete.Parser
import sbt.io.Glob
import sbt.io.syntax._
import sbt.nio.FileStamper
import sbt.nio.Keys._
import sbt.nio.file.Glob
import scala.annotation.tailrec
object TransitiveGlobs {
val transitiveTriggers = Def.taskKey[Seq[Glob]]("The transitive triggers for a key")
val transitiveInputs = Def.taskKey[Seq[Glob]]("The transitive inputs for a key")
val transitiveGlobs =
Def.taskKey[(Seq[Glob], Seq[Glob])]("The transitive inputs and triggers for a key")
}
private[sbt] object InputGraph {
@deprecated("Source is also deprecated.", "1.3.0")
private[sbt] object SettingsGraph {
private implicit class SourceOps(val source: Source) {
def toGlob: Glob =
Glob(
source.base,
source.includeFilter -- source.excludeFilter,
if (source.recursive) Int.MaxValue else 0
)
def toGlob: Glob = {
val filter = source.includeFilter -- source.excludeFilter
if (source.recursive) source.base ** filter else source.base * filter
}
}
private[sbt] def inputsTask: Def.Initialize[Task[Seq[Glob]]] =
Def.task(transitiveGlobs(arguments.value)._1.sorted)
private[sbt] def inputsTask(key: ScopedKey[_]): Def.Initialize[Task[Seq[Glob]]] =
withParams((e, cm) => Def.task(transitiveGlobs(argumentsImpl(key, e, cm).value)._1.sorted))
private[sbt] def triggersTask: Def.Initialize[Task[Seq[Glob]]] =
Def.task(transitiveGlobs(arguments.value)._2.sorted)
private[sbt] def triggersTask(key: ScopedKey[_]): Def.Initialize[Task[Seq[Glob]]] =
withParams((e, cm) => Def.task(transitiveGlobs(argumentsImpl(key, e, cm).value)._2.sorted))
private[sbt] def task: Def.Initialize[Task[(Seq[Glob], Seq[Glob])]] =
Def.task(transitiveGlobs(arguments.value))
private[sbt] def task(key: ScopedKey[_]): Def.Initialize[Task[(Seq[Glob], Seq[Glob])]] =
withParams((e, cm) => Def.task(transitiveGlobs(argumentsImpl(key, e, cm).value)))
private[sbt] def task: Def.Initialize[Task[Seq[DynamicInput]]] =
Def.task(transitiveDynamicInputs(arguments.value))
private[sbt] def task(
key: ScopedKey[_]
): Def.Initialize[Task[Seq[DynamicInput]]] =
withParams((e, cm) => Def.task(transitiveDynamicInputs(argumentsImpl(key, e, cm).value)))
private def withParams[R](
f: (Extracted, CompiledMap) => Def.Initialize[Task[R]]
): Def.Initialize[Task[R]] = Def.taskDyn {
@ -101,7 +89,7 @@ private[sbt] object InputGraph {
}
}.value
}
private[sbt] def transitiveGlobs(args: Arguments): (Seq[Glob], Seq[Glob]) = {
private[sbt] def transitiveDynamicInputs(args: Arguments): Seq[DynamicInput] = {
import args._
val taskScope = Project.fillTaskAxis(scopedKey).scope
def delegates(sk: ScopedKey[_]): Seq[ScopedKey[_]] =
@ -112,15 +100,35 @@ private[sbt] object InputGraph {
val allKeys: Seq[ScopedKey[_]] =
(delegates(scopedKey).toSet ++ delegates(ScopedKey(taskScope, watchTriggers.key))).toSeq
val keys = collectKeys(args, allKeys, Set.empty, Set.empty)
def getGlobs(scopedKey: ScopedKey[Seq[Glob]]): Seq[Glob] =
data.get(scopedKey.scope).flatMap(_.get(scopedKey.key)).getOrElse(Nil)
val (inputGlobs, triggerGlobs) = keys.partition(_.key == fileInputs.key) match {
case (i, t) => (i.flatMap(getGlobs), t.flatMap(getGlobs))
def getDynamicInputs(scopedKey: ScopedKey[Seq[Glob]], trigger: Boolean): Seq[DynamicInput] = {
data
.get(scopedKey.scope)
.map { am =>
am.get(scopedKey.key) match {
case Some(globs: Seq[Glob]) =>
if (trigger) {
val stamper = am.get(inputFileStamper.key).getOrElse(FileStamper.Hash)
val forceTrigger = am.get(watchForceTriggerOnAnyChange.key).getOrElse(false)
globs.map(g => DynamicInput(g, stamper, forceTrigger))
} else {
globs.map(g => DynamicInput(g, FileStamper.LastModified, forceTrigger = true))
}
case None => Nil: Seq[DynamicInput]
}
}
.getOrElse(Nil)
}
(inputGlobs.distinct, (triggerGlobs ++ legacy(keys :+ scopedKey, args)).distinct)
val (inputGlobs, triggerGlobs) = keys.partition(_.key == fileInputs.key) match {
case (inputs, triggers) =>
(
inputs.flatMap(getDynamicInputs(_, trigger = false)),
triggers.flatMap(getDynamicInputs(_, trigger = true))
)
}
(inputGlobs ++ triggerGlobs ++ legacy(keys :+ scopedKey, args)).distinct.sorted
}
private def legacy(keys: Seq[ScopedKey[_]], args: Arguments): Seq[Glob] = {
private def legacy(keys: Seq[ScopedKey[_]], args: Arguments): Seq[DynamicInput] = {
import args._
val projectScopes =
keys.view
@ -144,10 +152,12 @@ private[sbt] object InputGraph {
None
}
}.toSeq
def toDynamicInput(glob: Glob): DynamicInput =
DynamicInput(glob, FileStamper.LastModified, forceTrigger = true)
scopes.flatMap {
case Left(scope) =>
extracted.runTask(Keys.watchSources in scope, state)._2.map(_.toGlob)
case Right(globs) => globs
extracted.runTask(Keys.watchSources in scope, state)._2.map(s => toDynamicInput(s.toGlob))
case Right(globs) => globs.map(toDynamicInput)
}
}
@tailrec

View File

@ -0,0 +1,212 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.nio
import java.io.{ File, IOException }
import java.nio.file.{ Path, Paths }
import sbt.internal.inc.{ EmptyStamp, Stamper, LastModified => IncLastModified }
import sbt.io.IO
import sbt.nio.file.FileAttributes
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
import xsbti.compile.analysis.{ Stamp => XStamp }
import scala.util.Try
sealed trait FileStamper
object FileStamper {
case object Hash extends FileStamper
case object LastModified extends FileStamper
}
private[sbt] sealed trait FileStamp
private[sbt] object FileStamp {
private[sbt] type Id[T] = T
private[sbt] implicit class Ops(val fileStamp: FileStamp) {
private[sbt] def stamp: XStamp = fileStamp match {
case f: FileHashImpl => f.xstamp
case LastModified(time) => new IncLastModified(time)
case _ => EmptyStamp
}
}
private[sbt] val converter: (Path, FileAttributes) => Try[FileStamp] = (p, a) => Try(apply(p, a))
def apply(path: Path, fileStamper: FileStamper): FileStamp = fileStamper match {
case FileStamper.Hash => hash(path)
case FileStamper.LastModified => lastModified(path)
}
def apply(path: Path, fileAttributes: FileAttributes): FileStamp =
try {
if (fileAttributes.isDirectory) lastModified(path)
else
path.toString match {
case s if s.endsWith(".jar") => lastModified(path)
case s if s.endsWith(".class") => lastModified(path)
case _ => hash(path)
}
} catch {
case e: IOException => Error(e)
}
def hash(string: String): Hash = new FileHashImpl(sbt.internal.inc.Hash.unsafeFromString(string))
def hash(path: Path): Hash = new FileHashImpl(Stamper.forHash(path.toFile))
def lastModified(path: Path): LastModified = LastModified(IO.getModifiedTimeOrZero(path.toFile))
private[this] class FileHashImpl(val xstamp: XStamp) extends Hash(xstamp.getHash.orElse(""))
sealed abstract case class Hash private[sbt] (hex: String) extends FileStamp
implicit val pathJsonFormatter: JsonFormat[Seq[Path]] = new JsonFormat[Seq[Path]] {
override def write[J](obj: Seq[Path], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach { path =>
builder.writeString(path.toString)
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[Path] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
Paths.get(unbuilder.readString(unbuilder.nextElement))
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
implicit val fileJsonFormatter: JsonFormat[Seq[File]] = new JsonFormat[Seq[File]] {
override def write[J](obj: Seq[File], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach { file =>
builder.writeString(file.toString)
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[File] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
new File(unbuilder.readString(unbuilder.nextElement))
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
implicit val fileJson: JsonFormat[File] = new JsonFormat[File] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): File =
fileJsonFormatter.read(jsOpt, unbuilder).head
override def write[J](obj: File, builder: Builder[J]): Unit =
fileJsonFormatter.write(obj :: Nil, builder)
}
implicit val pathJson: JsonFormat[Path] = new JsonFormat[Path] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Path =
pathJsonFormatter.read(jsOpt, unbuilder).head
override def write[J](obj: Path, builder: Builder[J]): Unit =
pathJsonFormatter.write(obj :: Nil, builder)
}
implicit val fileStampJsonFormatter: JsonFormat[Seq[(Path, FileStamp)]] =
new JsonFormat[Seq[(Path, FileStamp)]] {
override def write[J](obj: Seq[(Path, FileStamp)], builder: Builder[J]): Unit = {
val (hashes, lastModifiedTimes) = obj.partition(_._2.isInstanceOf[Hash])
builder.beginObject()
builder.addField("hashes", hashes.asInstanceOf[Seq[(Path, Hash)]])(fileHashJsonFormatter)
builder.addField(
"lastModifiedTimes",
lastModifiedTimes.asInstanceOf[Seq[(Path, LastModified)]]
)(
fileLastModifiedJsonFormatter
)
builder.endObject()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, FileStamp)] =
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val hashes = unbuilder.readField("hashes")(fileHashJsonFormatter)
val lastModifieds =
unbuilder.readField("lastModifiedTimes")(fileLastModifiedJsonFormatter)
unbuilder.endObject()
hashes ++ lastModifieds
case None =>
deserializationError("Expected JsObject but found None")
}
}
val fileHashJsonFormatter: JsonFormat[Seq[(Path, Hash)]] =
new JsonFormat[Seq[(Path, Hash)]] {
override def write[J](obj: Seq[(Path, Hash)], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach {
case (p, h) =>
builder.beginArray()
builder.writeString(p.toString)
builder.writeString(h.hex)
builder.endArray()
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, Hash)] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
unbuilder.beginArray(unbuilder.nextElement)
val path = Paths.get(unbuilder.readString(unbuilder.nextElement))
val hash = FileStamp.hash(unbuilder.readString(unbuilder.nextElement))
unbuilder.endArray()
path -> hash
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
val fileLastModifiedJsonFormatter: JsonFormat[Seq[(Path, LastModified)]] =
new JsonFormat[Seq[(Path, LastModified)]] {
override def write[J](obj: Seq[(Path, LastModified)], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach {
case (p, lm) =>
builder.beginArray()
builder.writeString(p.toString)
builder.writeLong(lm.time)
builder.endArray()
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, LastModified)] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
unbuilder.beginArray(unbuilder.nextElement)
val path = Paths.get(unbuilder.readString(unbuilder.nextElement))
val hash = FileStamp.LastModified(unbuilder.readLong(unbuilder.nextElement))
unbuilder.endArray()
path -> hash
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
final case class LastModified private[sbt] (time: Long) extends FileStamp
final case class Error(exception: IOException) extends FileStamp
}

View File

@ -0,0 +1,141 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.nio
import java.io.InputStream
import java.nio.file.Path
import sbt.BuildSyntax.{ settingKey, taskKey }
import sbt.KeyRanks.{ BMinusSetting, DSetting, Invisible }
import sbt.internal.DynamicInput
import sbt.internal.nio.FileTreeRepository
import sbt.internal.util.AttributeKey
import sbt.internal.util.complete.Parser
import sbt.nio.file.{ ChangedFiles, FileAttributes, FileTreeView, Glob }
import sbt.{ Def, InputKey, State, StateTransform }
import scala.concurrent.duration.FiniteDuration
object Keys {
val allInputFiles =
taskKey[Seq[Path]]("All of the file inputs for a task excluding directories and hidden files.")
val changedInputFiles = taskKey[Option[ChangedFiles]]("The changed files for a task")
val fileInputs = settingKey[Seq[Glob]](
"The file globs that are used by a task. This setting will generally be scoped per task. It will also be used to determine the sources to watch during continuous execution."
)
val inputFileStamper = settingKey[FileStamper](
"Toggles the file stamping implementation used to determine whether or not a file has been modified."
)
val fileOutputs = settingKey[Seq[Glob]]("Describes the output files of a task.")
val allOutputFiles =
taskKey[Seq[Path]]("All of the file output for a task excluding directories and hidden files.")
val changedOutputFiles =
taskKey[Option[ChangedFiles]]("The files that have changed since the last task run.")
val outputFileStamper = settingKey[FileStamper](
"Toggles the file stamping implementation used to determine whether or not a file has been modified."
)
val fileTreeView =
taskKey[FileTreeView.Nio[FileAttributes]]("A view of the local file system tree")
// watch related settings
val watchAntiEntropyRetentionPeriod = settingKey[FiniteDuration](
"Wall clock Duration for which a FileEventMonitor will store anti-entropy events. This prevents spurious triggers when a task takes a long time to run. Higher values will consume more memory but make spurious triggers less likely."
).withRank(BMinusSetting)
val watchDeletionQuarantinePeriod = settingKey[FiniteDuration](
"Period for which deletion events will be quarantined. This is to prevent spurious builds when a file is updated with a rename which manifests as a file deletion followed by a file creation. The higher this value is set, the longer the delay will be between a file deletion and a build trigger but the less likely it is for a spurious trigger."
).withRank(DSetting)
private[this] val forceTriggerOnAnyChangeMessage =
"Force the watch process to rerun the current task(s) if any relevant source change is " +
"detected regardless of whether or not the underlying file has actually changed."
// watch related keys
val watchForceTriggerOnAnyChange =
Def.settingKey[Boolean](forceTriggerOnAnyChangeMessage).withRank(DSetting)
val watchLogLevel =
settingKey[sbt.util.Level.Value]("Transform the default logger in continuous builds.")
.withRank(DSetting)
val watchInputHandler = settingKey[InputStream => Watch.Action](
"Function that is periodically invoked to determine if the continuous build should be stopped or if a build should be triggered. It will usually read from stdin to respond to user commands. This is only invoked if watchInputStream is set."
).withRank(DSetting)
val watchInputStream = taskKey[InputStream](
"The input stream to read for user input events. This will usually be System.in"
).withRank(DSetting)
val watchInputParser = settingKey[Parser[Watch.Action]](
"A parser of user input that can be used to trigger or exit a continuous build"
).withRank(DSetting)
val watchOnEnter = settingKey[() => Unit](
"Function to run prior to beginning a continuous build. This will run before the continuous task(s) is(are) first evaluated."
).withRank(DSetting)
val watchOnExit = settingKey[() => Unit](
"Function to run upon exit of a continuous build. It can be used to cleanup resources used during the watch."
).withRank(DSetting)
val watchOnFileInputEvent = settingKey[(Int, Watch.Event) => Watch.Action](
"Callback to invoke if an event is triggered in a continuous build by one of the files matching an fileInput glob for the task and its transitive dependencies"
).withRank(DSetting)
val watchOnIteration = settingKey[Int => Watch.Action](
"Function that is invoked before waiting for file system events or user input events."
).withRank(DSetting)
val watchOnTermination = settingKey[(Watch.Action, String, Int, State) => State](
"Transforms the state upon completion of a watch. The String argument is the command that was run during the watch. The Int parameter specifies how many times the command was run during the watch."
).withRank(DSetting)
val watchStartMessage = settingKey[(Int, String, Seq[String]) => Option[String]](
"The message to show when triggered execution waits for sources to change. The parameters are the current watch iteration count, the current project name and the tasks that are being run with each build."
).withRank(DSetting)
// The watchTasks key should really be named watch, but that is already taken by the deprecated watch key. I'd be surprised if there are any plugins that use it so I think we should consider breaking binary compatibility to rename this task.
val watchTasks = InputKey[StateTransform](
"watch",
"Watch a task (or multiple tasks) and rebuild when its file inputs change or user input is received. The semantics are more or less the same as the `~` command except that it cannot transform the state on exit. This means that it cannot be used to reload the build."
).withRank(DSetting)
val watchTrackMetaBuild = settingKey[Boolean](
s"Toggles whether or not changing the build files (e.g. **/*.sbt, project/**/*.{scala,java}) should automatically trigger a project reload"
).withRank(DSetting)
val watchTriggeredMessage = settingKey[(Int, Path, Seq[String]) => Option[String]](
"The message to show before triggered execution executes an action after sources change. The parameters are the path that triggered the build and the current watch iteration count."
).withRank(DSetting)
// internal keys
private[sbt] val globalFileTreeRepository = AttributeKey[FileTreeRepository[FileAttributes]](
"global-file-tree-repository",
"Provides a view into the file system that may or may not cache the tree in memory",
Int.MaxValue
)
private[sbt] val dynamicDependency = settingKey[Unit](
"Leaves a breadcrumb that the scoped task is evaluated inside of a dynamic task"
).withRank(Invisible)
private[sbt] val transitiveClasspathDependency = settingKey[Unit](
"Leaves a breadcrumb that the scoped task has transitive classpath dependencies"
).withRank(Invisible)
private[sbt] val transitiveDynamicInputs =
taskKey[Seq[DynamicInput]]("The transitive inputs and triggers for a key").withRank(Invisible)
private[sbt] val dynamicFileOutputs =
taskKey[Seq[Path]]("The outputs of a task").withRank(Invisible)
private[sbt] val autoClean =
taskKey[Unit]("Automatically clean up a task returning file or path").withRank(Invisible)
private[sbt] val inputFileStamps =
taskKey[Seq[(Path, FileStamp)]]("Retrieves the hashes for a set of task input files")
.withRank(Invisible)
private[sbt] val outputFileStamps =
taskKey[Seq[(Path, FileStamp)]]("Retrieves the hashes for a set of task output files")
.withRank(Invisible)
private[sbt] type FileAttributeMap =
java.util.HashMap[Path, FileStamp]
private[sbt] val persistentFileAttributeMap =
AttributeKey[FileAttributeMap]("persistent-file-attribute-map", Int.MaxValue)
private[sbt] val allInputPathsAndAttributes =
taskKey[Seq[(Path, FileAttributes)]]("Get all of the file inputs for a task")
.withRank(Invisible)
private[sbt] val fileAttributeMap = taskKey[FileAttributeMap](
"Map of file stamps that may be cleared between task evaluation runs."
).withRank(Invisible)
private[sbt] val pathToFileStamp = taskKey[Path => FileStamp](
"A function that computes a file stamp for a path. It may have the side effect of updating a cache."
).withRank(Invisible)
}

View File

@ -0,0 +1,374 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package nio
import java.io.File
import java.nio.file.{ Files, Path }
import sbt.Project._
import sbt.internal.Clean.ToSeqPath
import sbt.internal.util.{ AttributeKey, SourcePosition }
import sbt.internal.{ Clean, Continuous, DynamicInput, SettingsGraph }
import sbt.nio.FileStamp.{ fileStampJsonFormatter, pathJsonFormatter, _ }
import sbt.nio.FileStamper.{ Hash, LastModified }
import sbt.nio.Keys._
import sbt.nio.file.ChangedFiles
import sbt.std.TaskExtra._
import sjsonnew.JsonFormat
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.immutable.VectorBuilder
private[sbt] object Settings {
private[sbt] def inject(transformed: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = {
val fileOutputScopes = transformed.collect {
case s if s.key.key == sbt.nio.Keys.fileOutputs.key && s.key.scope.task.toOption.isDefined =>
s.key.scope
}.toSet
val cleanScopes = new java.util.HashSet[Scope].asScala
transformed.flatMap {
case s if s.key.key == sbt.nio.Keys.fileInputs.key => inputPathSettings(s)
case s => maybeAddOutputsAndFileStamps(s, fileOutputScopes, cleanScopes)
} ++ addCleanImpls(cleanScopes.toSeq)
}
/**
* This method checks if the setting is for a task with a return type in:
* `File`, `Seq[File]`, `Path`, `Seq[Path`. If it does, then we inject a number of
* task definition settings that allow the user to check if the output paths of
* the task have changed. It also adds a custom clean task that will delete the
* paths returned by the task, provided that they are in the task's target directory. We also inject these tasks if the fileOutputs setting is defined
* for the task.
*
* @param setting the setting to possibly inject with additional settings
* @param fileOutputScopes the set of scopes for which the fileOutputs setting is defined
* @param cleanScopes the set of cleanScopes that we may add this setting's scope
* @return the injected settings
*/
private[this] def maybeAddOutputsAndFileStamps(
setting: Def.Setting[_],
fileOutputScopes: Set[Scope],
cleanScopes: mutable.Set[Scope]
): Seq[Def.Setting[_]] = {
setting.key.key match {
case ak: AttributeKey[_] if taskClass.isAssignableFrom(ak.manifest.runtimeClass) =>
def default: Seq[Def.Setting[_]] = {
val scope = setting.key.scope.copy(task = Select(ak))
if (fileOutputScopes.contains(scope)) {
val sk = setting.asInstanceOf[Def.Setting[Task[Any]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
cleanScopes.add(scope)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ => Nil))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ Vector(
allOutputPathsImpl(scope),
outputFileStampsImpl(scope),
cleanImpl(scope)
)
} else setting :: Nil
}
ak.manifest.typeArguments match {
case t :: Nil if seqClass.isAssignableFrom(t.runtimeClass) =>
t.typeArguments match {
// Task[Seq[File]]
case f :: Nil if fileClass.isAssignableFrom(f.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[Seq[File]]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_.map(_.toPath)))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
// Task[Seq[Path]]
case p :: Nil if pathClass.isAssignableFrom(p.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[Seq[Path]]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(identity))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
case _ => default
}
// Task[File]
case t :: Nil if fileClass.isAssignableFrom(t.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[File]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_.toPath :: Nil))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
// Task[Path]
case t :: Nil if pathClass.isAssignableFrom(t.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[Path]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ :: Nil))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
case _ => default
}
case _ => setting :: Nil
}
}
private[sbt] val inject: Def.ScopedKey[_] => Seq[Def.Setting[_]] = scopedKey =>
scopedKey.key match {
case transitiveDynamicInputs.key =>
scopedKey.scope.task.toOption.toSeq.map { key =>
val updatedKey = Def.ScopedKey(scopedKey.scope.copy(task = Zero), key)
transitiveDynamicInputs in scopedKey.scope := SettingsGraph.task(updatedKey).value
}
case dynamicDependency.key => (dynamicDependency in scopedKey.scope := { () }) :: Nil
case transitiveClasspathDependency.key =>
(transitiveClasspathDependency in scopedKey.scope := { () }) :: Nil
case allInputFiles.key => allFilesImpl(scopedKey) :: Nil
case changedInputFiles.key => changedInputFilesImpl(scopedKey)
case changedOutputFiles.key =>
changedFilesImpl(scopedKey, changedOutputFiles, outputFileStamps)
case pathToFileStamp.key => stamper(scopedKey) :: Nil
case _ => Nil
}
/**
* This method collects all of the automatically generated clean tasks and adds each of them
* to the clean method scoped by project/config or just project
*
* @param scopes the clean scopes that have been automatically generated
* @return the custom clean tasks
*/
private[this] def addCleanImpls(scopes: Seq[Scope]): Seq[Def.Setting[_]] = {
val configScopes = scopes.groupBy(scope => scope.copy(task = Zero))
val projectScopes = scopes.groupBy(scope => scope.copy(task = Zero, config = Zero))
(configScopes ++ projectScopes).map {
case (scope, cleanScopes) =>
val dependentKeys = cleanScopes.map(sbt.Keys.clean.in)
Def.setting(
sbt.Keys.clean in scope,
(sbt.Keys.clean in scope).dependsOn(dependentKeys: _*).tag(Tags.Clean),
SourcePosition.fromEnclosing()
)
}.toVector
}
/**
* This adds the [[sbt.Keys.taskDefinitionKey]] to the work for each [[Task]]. Without
* this, the previous macro doesn't work correctly because [[Previous]] is unable to
* reference the task.
*
* @param setting the [[Def.Setting[_}]] for which we add the task definition
* @tparam T the generic type of the task (needed for type checking because [[Task]] is invariant)
* @return the setting with the task definition
*/
private[this] def addTaskDefinition[T](setting: Def.Setting[Task[T]]): Def.Setting[Task[T]] =
setting.mapInit((sk, task) => Task(task.info.set(sbt.Keys.taskDefinitionKey, sk), task.work))
/**
* Returns all of the paths described by a glob along with their basic file attributes.
* No additional filtering is performed.
*
* @param setting the setting whose fileInputs we are seeking
* @return a task definition that retrieves the file input files and their attributes scoped
* to a particular task.
*/
private[sbt] def inputPathSettings(setting: Def.Setting[_]): Seq[Def.Setting[_]] = {
val scopedKey = setting.key
setting :: (Keys.allInputPathsAndAttributes in scopedKey.scope := {
val view = (fileTreeView in scopedKey.scope).value
val inputs = (fileInputs in scopedKey.scope).value
val stamper = (inputFileStamper in scopedKey.scope).value
val forceTrigger = (watchForceTriggerOnAnyChange in scopedKey.scope).value
val dynamicInputs = Continuous.dynamicInputs.value
// This makes watch work by ensuring that the input glob is registered with the
// repository used by the watch process.
sbt.Keys.state.value.get(globalFileTreeRepository).foreach { repo =>
inputs.foreach(repo.register)
}
dynamicInputs.foreach(_ ++= inputs.map(g => DynamicInput(g, stamper, forceTrigger)))
view.list(inputs)
}) :: fileStamps(scopedKey) :: allFilesImpl(scopedKey) :: Nil
}
private[this] val taskClass = classOf[Task[_]]
private[this] val seqClass = classOf[Seq[_]]
private[this] val fileClass = classOf[java.io.File]
private[this] val pathClass = classOf[java.nio.file.Path]
/**
* Returns all of the paths for the regular files described by a glob. Directories and hidden
* files are excluded.
*
* @param scopedKey the key whose file inputs we are seeking
* @return a task definition that retrieves all of the input paths scoped to the input key.
*/
private[this] def allFilesImpl(scopedKey: Def.ScopedKey[_]): Def.Setting[_] =
addTaskDefinition(Keys.allInputFiles in scopedKey.scope := {
(Keys.allInputPathsAndAttributes in scopedKey.scope).value.collect {
case (p, a) if a.isRegularFile && !Files.isHidden(p) => p
}
})
/**
* Returns all of the regular files whose stamp has changed since the last time the
* task was evaluated. The result includes new and modified files but not deleted
* files or files whose stamp has not changed since the previous run. Directories and hidden
* files are excluded
*
* @param scopedKey the key whose fileInputs we are seeking
* @return a task definition that retrieves the changed input files scoped to the key.
*/
private[this] def changedInputFilesImpl(scopedKey: Def.ScopedKey[_]): Seq[Def.Setting[_]] =
changedFilesImpl(scopedKey, changedInputFiles, inputFileStamps) ::
(watchForceTriggerOnAnyChange in scopedKey.scope := {
(watchForceTriggerOnAnyChange in scopedKey.scope).?.value match {
case Some(t) => t
case None => false
}
}) :: Nil
private[this] def changedFilesImpl(
scopedKey: Def.ScopedKey[_],
changeKey: TaskKey[Option[ChangedFiles]],
stampKey: TaskKey[Seq[(Path, FileStamp)]]
): Def.Setting[_] =
addTaskDefinition(changeKey in scopedKey.scope := {
val current = (stampKey in scopedKey.scope).value
(stampKey in scopedKey.scope).previous match {
case Some(previous) =>
val createdBuilder = new VectorBuilder[Path]
val deletedBuilder = new VectorBuilder[Path]
val updatedBuilder = new VectorBuilder[Path]
val currentMap = current.toMap
val prevMap = previous.toMap
current.foreach {
case (path, currentStamp) =>
prevMap.get(path) match {
case Some(oldStamp) => if (oldStamp != currentStamp) updatedBuilder += path
case None => createdBuilder += path
}
}
previous.foreach {
case (path, _) =>
if (currentMap.get(path).isEmpty) deletedBuilder += path
}
val created = createdBuilder.result()
val deleted = deletedBuilder.result()
val updated = updatedBuilder.result()
if (created.isEmpty && deleted.isEmpty && updated.isEmpty) {
None
} else {
val cf = ChangedFiles(created = created, deleted = deleted, updated = updated)
Some(cf)
}
case None => None
}
})
/**
* Provides an automatically generated clean method for a task that provides fileOutputs.
*
* @param scope the scope to add the custom clean
* @return a task specific clean implementation
*/
private[sbt] def cleanImpl(scope: Scope): Def.Setting[_] = addTaskDefinition {
sbt.Keys.clean in scope := Clean.task(scope, full = false).value
}
/**
* Provides an automatically generated clean method for a task that provides fileOutputs.
*
* @param taskKey the task for which we add a custom clean implementation
* @return a task specificic clean implementation
*/
private[sbt] def cleanImpl[T: JsonFormat: ToSeqPath](taskKey: TaskKey[T]): Seq[Def.Setting[_]] = {
val taskScope = taskKey.scope in taskKey.key
addTaskDefinition(sbt.Keys.clean in taskScope := Def.taskDyn {
// the clean file task needs to run first because the previous cache gets blown away
// by the second task
Clean.cleanFileOutputTask(taskKey).value
Clean.task(taskScope, full = false)
}.value)
}
/**
* Returns all of the regular files and the corresponding file stamps for the file inputs
* scoped to the input key. Directories and hidden files are excluded.
*
* @param scopedKey the key whose fileInputs we are seeking
* @return a task definition that retrieves the input files and their file stamps scoped to the
* input key.
*/
private[sbt] def fileStamps(scopedKey: Def.ScopedKey[_]): Def.Setting[_] =
addTaskDefinition(Keys.inputFileStamps in scopedKey.scope := {
val stamper = (Keys.pathToFileStamp in scopedKey.scope).value
(Keys.allInputPathsAndAttributes in scopedKey.scope).value.collect {
case (p, a) if a.isRegularFile && !Files.isHidden(p) => p -> stamper(p)
}
})
private[this] def outputsAndStamps[T: JsonFormat: ToSeqPath](
taskKey: TaskKey[T],
cleanScopes: mutable.Set[Scope]
): Seq[Def.Setting[_]] = {
val scope = taskKey.scope in taskKey.key
cleanScopes.add(scope)
Vector(allOutputPathsImpl(scope), outputFileStampsImpl(scope)) ++ cleanImpl(taskKey)
}
private[this] def allOutputPathsImpl(scope: Scope): Def.Setting[_] =
addTaskDefinition(allOutputFiles in scope := {
val fileOutputGlobs = (fileOutputs in scope).value
val allFileOutputs = fileTreeView.value.list(fileOutputGlobs).map(_._1)
val dynamicOutputs = (dynamicFileOutputs in scope).value
allFileOutputs ++ dynamicOutputs.filterNot(p => fileOutputGlobs.exists(_.matches(p)))
})
private[this] def outputFileStampsImpl(scope: Scope): Def.Setting[_] =
addTaskDefinition(outputFileStamps in scope := {
val stamper: Path => FileStamp = (outputFileStamper in scope).value match {
case LastModified => FileStamp.lastModified
case Hash => FileStamp.hash
}
(allOutputFiles in scope).value.map(p => p -> stamper(p))
})
/**
* Returns a function from `Path` to [[FileStamp]] that can be used by tasks to retrieve
* the stamp for a file. It has the side effect of stamping the file if it has not already
* been stamped during the task evaluation.
*
* @return a task definition for a function from `Path` to [[FileStamp]].
*/
private[this] def stamper(scopedKey: Def.ScopedKey[_]): Def.Setting[_] =
addTaskDefinition((Keys.pathToFileStamp in scopedKey.scope) := {
val attributeMap = Keys.fileAttributeMap.value
val stamper = (Keys.inputFileStamper in scopedKey.scope).value
path: Path =>
attributeMap.get(path) match {
case null =>
val stamp = stamper match {
case Hash => FileStamp.hash(path)
case LastModified => FileStamp.lastModified(path)
}
attributeMap.put(path, stamp)
stamp
case s => s
}
})
}

View File

@ -5,16 +5,22 @@
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
import java.io.InputStream
package sbt.nio
import java.nio.file.Path
import java.time.format.{ DateTimeFormatter, TextStyle }
import java.time.{ Instant, ZoneId, ZonedDateTime }
import java.util.Locale
import java.util.concurrent.TimeUnit
import sbt.BasicCommandStrings.ContinuousExecutePrefix
import sbt.internal.FileAttributes
import sbt._
import sbt.internal.LabeledFunctions._
import sbt.internal.util.{ JLine, Util }
import sbt.internal.nio.FileEvent
import sbt.internal.util.Util
import sbt.internal.util.complete.Parser
import sbt.internal.util.complete.Parser._
import sbt.io.FileEventMonitor.{ Creation, Deletion, Event, Update }
import sbt.nio.file.FileAttributes
import sbt.util.{ Level, Logger }
import scala.annotation.tailrec
@ -23,6 +29,95 @@ import scala.util.control.NonFatal
object Watch {
/**
* Represents a file event that has been detected during a continuous build.
*/
sealed trait Event {
/**
* The path that triggered the event.
*
* @return the path that triggered the event.
*/
def path: Path
/**
* The time specified in milliseconds from the epoch at which this event occurred.
*
* @return the time at which the event occurred.
*/
def occurredAt: FiniteDuration
}
private[this] val formatter = DateTimeFormatter.ofPattern("yyyy-MMM-dd HH:mm:ss.SSS")
private[this] val timeZone = ZoneId.systemDefault
private[this] val timeZoneName = timeZone.getDisplayName(TextStyle.SHORT, Locale.getDefault)
private[this] implicit class DurationOps(val d: Duration) extends AnyVal {
def finite: FiniteDuration = d match {
case f: FiniteDuration => f
case _ => new FiniteDuration(Long.MaxValue, TimeUnit.MILLISECONDS)
}
def toEpochString: String = {
val zdt = ZonedDateTime.ofInstant(Instant.ofEpochMilli(d.toMillis), timeZone)
s"${formatter.format(zdt)} $timeZoneName"
}
}
private[sbt] implicit class EventOps(val event: Event) extends AnyVal {
def toEpochString: String = event.occurredAt.toEpochString
}
private[sbt] object Event {
trait Impl { self: Event =>
private val name = self.getClass.getSimpleName
override def equals(o: Any): Boolean = o match {
case that: Event => this.path == that.path
case _ => false
}
override def hashCode: Int = path.hashCode
override def toString: String = s"$name($path)"
}
def fromIO(fileEvent: FileEvent[FileAttributes]): Watch.Event = fileEvent match {
case c @ FileEvent.Creation(p, _) => new Watch.Creation(p, c.occurredAt.value.finite)
case d @ FileEvent.Deletion(p, _) => new Watch.Deletion(p, d.occurredAt.value.finite)
case u @ FileEvent.Update(p, _, _) =>
new Watch.Update(p, u.occurredAt.value.finite)
}
}
final class Creation private[sbt] (
override val path: Path,
override val occurredAt: FiniteDuration
) extends Event
with Event.Impl {
override def toString: String = s"Creation($path, ${occurredAt.toEpochString})"
}
object Creation {
def apply(event: FileEvent[FileAttributes]): Creation =
new Creation(event.path, event.occurredAt.value.finite)
def unapply(creation: Creation): Option[Path] = Some(creation.path)
}
final class Deletion private[sbt] (
override val path: Path,
override val occurredAt: FiniteDuration
) extends Event
with Event.Impl {
override def toString: String = s"Deletion($path, ${occurredAt.toEpochString})"
}
object Deletion {
def apply(event: FileEvent[FileAttributes]): Deletion =
new Deletion(event.path, event.occurredAt.value.finite)
def unapply(deletion: Deletion): Option[Path] = Some(deletion.path)
}
final class Update private[sbt] (
override val path: Path,
override val occurredAt: FiniteDuration
) extends Event
with Event.Impl {
override def toString: String = s"Update(path, ${occurredAt.toEpochString})"
}
object Update {
def apply(event: FileEvent[FileAttributes]): Update =
new Update(event.path, event.occurredAt.value.finite)
def unapply(update: Update): Option[Path] = Some(update.path)
}
/**
* This trait is used to control the state of [[Watch.apply]]. The [[Watch.Trigger]] action
* indicates that [[Watch.apply]] should re-run the input task. The [[Watch.CancelWatch]]
@ -227,42 +322,21 @@ object Watch {
*/
@inline
private[sbt] def aggregate(
events: Seq[(Action, Event[FileAttributes])]
): Option[(Action, Event[FileAttributes])] =
events: Seq[(Action, Event)]
): Option[(Action, Event)] =
if (events.isEmpty) None else Some(events.minBy(_._1))
private implicit class StringToExec(val s: String) extends AnyVal {
def toExec: Exec = Exec(s, None)
}
private[sbt] def withCharBufferedStdIn[R](f: InputStream => R): R =
if (!Util.isWindows) JLine.usingTerminal { terminal =>
terminal.init()
val in = terminal.wrapInIfNeeded(System.in)
try {
f(in)
} finally {
terminal.reset()
}
} else
f(System.in)
/**
* A constant function that returns [[Trigger]].
*/
final val trigger: (Int, Event[FileAttributes]) => Watch.Action = {
(_: Int, _: Event[FileAttributes]) =>
Trigger
final val trigger: (Int, Event) => Watch.Action = { (_: Int, _: Event) =>
Trigger
}.label("Watched.trigger")
def ifChanged(action: Action): (Int, Event[FileAttributes]) => Watch.Action =
(_: Int, event: Event[FileAttributes]) =>
event match {
case Update(prev, cur, _) if prev.value != cur.value => action
case _: Creation[_] | _: Deletion[_] => action
case _ => Ignore
}
/**
* The minimum delay between build triggers for the same file. If the file is detected
* to have changed within this period from the last build trigger, the event will be discarded.
@ -369,14 +443,14 @@ object Watch {
* `Keys.watchTriggeredMessage := Watched.defaultOnTriggerMessage`, then nothing is logged when
* a build is triggered.
*/
final val defaultOnTriggerMessage: (Int, Event[FileAttributes], Seq[String]) => Option[String] =
((_: Int, e: Event[FileAttributes], commands: Seq[String]) => {
val msg = s"Build triggered by ${e.entry.typedPath.toPath}. " +
final val defaultOnTriggerMessage: (Int, Path, Seq[String]) => Option[String] =
((_: Int, path: Path, commands: Seq[String]) => {
val msg = s"Build triggered by $path. " +
s"Running ${commands.mkString("'", "; ", "'")}."
Some(msg)
}).label("Watched.defaultOnTriggerMessage")
final val noTriggerMessage: (Int, Event[FileAttributes], Seq[String]) => Option[String] =
final val noTriggerMessage: (Int, Path, Seq[String]) => Option[String] =
(_, _, _) => None
/**

View File

@ -12,12 +12,13 @@ import java.nio.file.{ Files, Path }
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger }
import org.scalatest.{ FlatSpec, Matchers }
import sbt.Watch.{ NullLogger, _ }
import sbt.WatchSpec._
import sbt.internal.FileAttributes
import sbt.io.FileEventMonitor.Event
import sbt.internal.nio.{ FileEvent, FileEventMonitor, FileTreeRepository }
import sbt.io._
import sbt.io.syntax._
import sbt.nio.Watch
import sbt.nio.Watch.{ NullLogger, _ }
import sbt.nio.file.{ FileAttributes, Glob }
import sbt.util.Logger
import scala.collection.mutable
@ -30,31 +31,26 @@ class WatchSpec extends FlatSpec with Matchers {
object TestDefaults {
def callbacks(
inputs: Seq[Glob],
fileEventMonitor: Option[FileEventMonitor[FileAttributes]] = None,
fileEventMonitor: Option[FileEventMonitor[FileEvent[FileAttributes]]] = None,
logger: Logger = NullLogger,
parseEvent: () => Watch.Action = () => Ignore,
onStartWatch: () => Watch.Action = () => CancelWatch: Watch.Action,
onWatchEvent: Event[FileAttributes] => Watch.Action = _ => Ignore,
triggeredMessage: Event[FileAttributes] => Option[String] = _ => None,
onWatchEvent: FileEvent[FileAttributes] => Watch.Action = _ => Ignore,
triggeredMessage: FileEvent[FileAttributes] => Option[String] = _ => None,
watchingMessage: () => Option[String] = () => None
): (NextAction, NextAction) = {
val monitor = fileEventMonitor.getOrElse {
val fileTreeRepository = FileTreeRepository.default(FileAttributes.default)
val monitor: FileEventMonitor[FileEvent[FileAttributes]] = fileEventMonitor.getOrElse {
val fileTreeRepository = FileTreeRepository.default
inputs.foreach(fileTreeRepository.register)
val m =
FileEventMonitor.antiEntropy(
fileTreeRepository,
50.millis,
m => logger.debug(m.toString),
50.millis,
10.minutes
)
new FileEventMonitor[FileAttributes] {
override def poll(duration: Duration): Seq[Event[FileAttributes]] = m.poll(duration)
override def close(): Unit = m.close()
}
FileEventMonitor.antiEntropy(
fileTreeRepository,
50.millis,
m => logger.debug(m.toString),
50.millis,
10.minutes
)
}
val onTrigger: Event[FileAttributes] => Unit = event => {
val onTrigger: FileEvent[FileAttributes] => Unit = event => {
triggeredMessage(event).foreach(logger.info(_))
}
val onStart: () => Watch.Action = () => {
@ -63,7 +59,7 @@ class WatchSpec extends FlatSpec with Matchers {
}
val nextAction: NextAction = () => {
val inputAction = parseEvent()
val fileActions = monitor.poll(10.millis).map { e: Event[FileAttributes] =>
val fileActions = monitor.poll(10.millis).map { e: FileEvent[FileAttributes] =>
onWatchEvent(e) match {
case Trigger => onTrigger(e); Trigger
case action => action
@ -113,8 +109,8 @@ class WatchSpec extends FlatSpec with Matchers {
val callbacks = TestDefaults.callbacks(
inputs = Seq(realDir ** AllPassFilter),
onStartWatch = () => if (task.getCount == 2) CancelWatch else Ignore,
onWatchEvent = e => if (e.entry.typedPath.toPath == foo) Trigger else Ignore,
triggeredMessage = e => { queue += e.entry.typedPath.toPath; None },
onWatchEvent = e => if (e.path == foo) Trigger else Ignore,
triggeredMessage = e => { queue += e.path; None },
watchingMessage = () => {
IO.touch(bar.toFile); Thread.sleep(5); IO.touch(foo.toFile)
None
@ -132,8 +128,8 @@ class WatchSpec extends FlatSpec with Matchers {
val callbacks = TestDefaults.callbacks(
inputs = Seq(realDir ** AllPassFilter),
onStartWatch = () => if (task.getCount == 3) CancelWatch else Ignore,
onWatchEvent = _ => Trigger,
triggeredMessage = e => { queue += e.entry.typedPath.toPath; None },
onWatchEvent = e => if (e.path != realDir.toPath) Trigger else Ignore,
triggeredMessage = e => { queue += e.path; None },
watchingMessage = () => {
task.getCount match {
case 1 => Files.createFile(bar)

View File

@ -0,0 +1,50 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal
import java.nio.file.{ Path, Paths }
import org.scalatest.FlatSpec
import sbt.nio.FileStamp
import sbt.nio.FileStamp._
import sjsonnew.support.scalajson.unsafe.Converter
class FileStampJsonSpec extends FlatSpec {
"file hashes" should "be serializable" in {
val hashes = Seq(
Paths.get("foo") -> FileStamp.hash("bar"),
Paths.get("bar") -> FileStamp.hash("buzz")
)
val json = Converter.toJsonUnsafe(hashes)(fileHashJsonFormatter)
val deserialized = Converter.fromJsonUnsafe(json)(fileHashJsonFormatter)
assert(hashes == deserialized)
}
"file last modified times" should "be serializable" in {
val lastModifiedTimes = Seq(
Paths.get("foo") -> FileStamp.LastModified(1234),
Paths.get("bar") -> FileStamp.LastModified(5678)
)
val json = Converter.toJsonUnsafe(lastModifiedTimes)(fileLastModifiedJsonFormatter)
val deserialized = Converter.fromJsonUnsafe(json)(fileLastModifiedJsonFormatter)
assert(lastModifiedTimes == deserialized)
}
"both" should "be serializable" in {
val hashes = Seq(
Paths.get("foo") -> FileStamp.hash("bar"),
Paths.get("bar") -> FileStamp.hash("buzz")
)
val lastModifiedTimes = Seq(
Paths.get("foo") -> FileStamp.LastModified(1234),
Paths.get("bar") -> FileStamp.LastModified(5678)
)
val both: Seq[(Path, FileStamp)] = hashes ++ lastModifiedTimes
val json = Converter.toJsonUnsafe(both)(fileStampJsonFormatter)
val deserialized = Converter.fromJsonUnsafe(json)(fileStampJsonFormatter)
assert(both.sameElements(deserialized))
}
}

View File

@ -9,7 +9,7 @@ object Dependencies {
val baseScalaVersion = scala212
// sbt modules
private val ioVersion = "1.3.0-M7"
private val ioVersion = "1.3.0-M9"
private val utilVersion = "1.3.0-M6"
private val lmVersion =
sys.props.get("sbt.build.lm.version") match {

View File

@ -5,6 +5,10 @@
* Licensed under Apache License 2.0 (see LICENSE)
*/
import sbt.nio.FileStamp
import sjsonnew.JsonFormat
import java.nio.file.{ Path => NioPath }
import scala.language.experimental.macros
package object sbt
@ -21,8 +25,7 @@ package object sbt
with sbt.BuildSyntax
with sbt.OptionSyntax
with sbt.SlashSyntax
with sbt.Import
with sbt.internal.GlobListers {
with sbt.Import {
// IO
def uri(s: String): URI = new URI(s)
def file(s: String): File = new File(s)
@ -30,7 +33,12 @@ package object sbt
implicit def fileToRichFile(file: File): sbt.io.RichFile = new sbt.io.RichFile(file)
implicit def filesToFinder(cc: Traversable[File]): sbt.io.PathFinder =
sbt.io.PathFinder.strict(cc)
implicit val fileStampJsonFormatter: JsonFormat[Seq[(NioPath, FileStamp)]] =
FileStamp.fileStampJsonFormatter
implicit val pathJsonFormatter: JsonFormat[Seq[NioPath]] = FileStamp.pathJsonFormatter
implicit val fileJsonFormatter: JsonFormat[Seq[File]] = FileStamp.fileJsonFormatter
implicit val singlePathJsonFormatter: JsonFormat[NioPath] = FileStamp.pathJson
implicit val singleFileJsonFormatter: JsonFormat[File] = FileStamp.fileJson
// others
object CompileOrder {

View File

@ -12,7 +12,7 @@ private[sbt] trait IOSyntax0 extends IOSyntax1 {
override def |(g: A => Option[B]): A => Option[B] = (a: A) => f(a) orElse g(a)
}
}
private[sbt] trait IOSyntax1 extends sbt.io.IOSyntax
private[sbt] trait Alternative[A, B] {
private[sbt] sealed trait IOSyntax1 extends sbt.io.IOSyntax with sbt.nio.file.syntax0
private[sbt] sealed trait Alternative[A, B] {
def |(g: A => Option[B]): A => Option[B]
}

View File

@ -42,7 +42,6 @@ trait Import {
val ExistsFileFilter = sbt.io.ExistsFileFilter
val FileFilter = sbt.io.FileFilter
type FileFilter = sbt.io.FileFilter
type Glob = sbt.io.Glob
val GlobFilter = sbt.io.GlobFilter
val Hash = sbt.io.Hash
val HiddenFileFilter = sbt.io.HiddenFileFilter
@ -61,6 +60,18 @@ trait Import {
type WatchSource = sbt.internal.io.Source
val WatchSource = sbt.internal.io.Source
// sbt.nio
val ** = sbt.nio.file.**
val * = sbt.nio.file.*
val AnyPath = sbt.nio.file.AnyPath
type ChangedFiles = sbt.nio.file.ChangedFiles
val ChangedFiles = sbt.nio.file.ChangedFiles
type Glob = sbt.nio.file.Glob
val Glob = sbt.nio.file.Glob
type RelativeGlob = sbt.nio.file.RelativeGlob
val RelativeGlob = sbt.nio.file.RelativeGlob
val RecursiveGlob = sbt.nio.file.RecursiveGlob
// sbt.util
type AbstractLogger = sbt.util.AbstractLogger
type BasicCache[I, O] = sbt.util.BasicCache[I, O]

View File

@ -1,6 +1,10 @@
import sbt.nio.file.Glob
cleanKeepFiles ++= Seq(
target.value / "keep",
target.value / "keepfile"
)
cleanKeepGlobs += target.value / "keepdir" ** AllPassFilter
// This is necessary because recursive globs do not include the base directory.
cleanKeepGlobs += Glob(target.value / "keepdir")

View File

@ -1,7 +1,9 @@
import sbt.nio.file.Glob
Compile / sourceGenerators += Def.task {
val files = Seq(sourceManaged.value / "foo.txt", sourceManaged.value / "bar.txt")
files.foreach(IO.touch(_))
files
}
cleanKeepGlobs += (sourceManaged.value / "bar.txt").toGlob
cleanKeepGlobs += Glob(sourceManaged.value, "bar.txt")

View File

@ -1,2 +1,4 @@
import sbt.nio.file.Glob
cleanKeepGlobs in Compile +=
((classDirectory in Compile in compile).value / "X.class").toGlob
Glob((classDirectory in Compile in compile).value, "X.class")

View File

@ -16,8 +16,8 @@ val snapshot = (project in file(".")).settings(
libraryDependencies += "sbt" %% "foo-lib" % "0.1.0-SNAPSHOT",
rewriteIvy := {
val dir = Def.spaceDelimited().parsed.head
sbt.IO.delete(file("ivy"))
sbt.IO.copyDirectory(file(s"libraries/library-$dir/ivy"), file("ivy"))
sbt.IO.delete(baseDirectory.value / "ivy")
sbt.IO.copyDirectory(baseDirectory.value / s"libraries/library-$dir/ivy", baseDirectory.value / "ivy")
Files.walk(file("ivy").getCanonicalFile.toPath).iterator.asScala.foreach { f =>
Files.setLastModifiedTime(f, FileTime.fromMillis(System.currentTimeMillis + 3000))
}

View File

@ -1 +0,0 @@
val root = Build.root

View File

@ -1,33 +0,0 @@
import java.nio.file.{ Path, Paths }
import sbt._
import sbt.io.Glob
import sbt.Keys._
object Build {
val simpleTest = taskKey[Unit]("Check that glob file selectors work")
val relativeSubdir = Paths.get("subdir")
val relativeFiles =
Seq(Paths.get("foo.txt"), Paths.get("bar.json"), relativeSubdir.resolve("baz.yml"))
val files = taskKey[Path]("The files subdirectory")
val subdir = taskKey[Path]("The subdir path in the files subdirectory")
val allFiles = taskKey[Seq[Path]]("Returns all of the regular files in the files subdirectory")
private def check(actual: Any, expected: Any): Unit =
if (actual != expected) throw new IllegalStateException(s"$actual did not equal $expected")
val root = (project in file("."))
.settings(
files := (baseDirectory.value / "files").toPath,
subdir := files.value.resolve("subdir"),
allFiles := {
val f = files.value
relativeFiles.map(f.resolve(_))
},
simpleTest := {
val allPaths: Glob = files.value.allPaths
val af = allFiles.value.toSet
val sub = subdir.value
check(allPaths.all.map(_._1).toSet, af + sub)
check(allPaths.all.filter(_._2.isRegularFile).map(_._1).toSet, af)
check(allPaths.all.filter(_._2.isDirectory).map(_._1).toSet, Set(sub))
}
)
}

View File

@ -1 +0,0 @@
> simpleTest

View File

@ -0,0 +1 @@
foo

View File

@ -0,0 +1,43 @@
import java.nio.file.Path
import sjsonnew.BasicJsonProtocol._
val copyFile = taskKey[Int]("dummy task")
copyFile / fileInputs += baseDirectory.value.toGlob / "base" / "*.txt"
copyFile / fileOutputs += baseDirectory.value.toGlob / "out" / "*.txt"
copyFile / target := baseDirectory.value / "out"
copyFile := Def.task {
val prev = copyFile.previous
val changes: Option[Seq[Path]] = (copyFile / changedInputFiles).value.map {
case ChangedFiles(c, _, u) => c ++ u
}
prev match {
case Some(v: Int) if changes.isEmpty => v
case _ =>
changes.getOrElse((copyFile / allInputFiles).value).foreach { p =>
val outDir = baseDirectory.value / "out"
IO.createDirectory(outDir)
IO.copyFile(p.toFile, outDir / p.getFileName.toString)
}
prev.map(_ + 1).getOrElse(1)
}
}.value
val checkOutDirectoryIsEmpty = taskKey[Unit]("validates that the output directory is empty")
checkOutDirectoryIsEmpty := {
assert(fileTreeView.value.list(baseDirectory.value.toGlob / "out" / **).isEmpty)
}
val checkOutDirectoryHasFile = taskKey[Unit]("validates that the output directory is empty")
checkOutDirectoryHasFile := {
val result = fileTreeView.value.list(baseDirectory.value.toGlob / "out" / **).map(_._1.toFile)
assert(result == Seq(baseDirectory.value / "out" / "Foo.txt"))
}
val checkCount = inputKey[Unit]("Check that the expected number of evaluations have run.")
checkCount := Def.inputTask {
val expected = Def.spaceDelimited("").parsed.head.toInt
val previous = copyFile.previous.getOrElse(0)
assert(previous == expected)
}.evaluated

View File

@ -0,0 +1 @@
bar

View File

@ -0,0 +1,3 @@
object Count {
var value = 0
}

View File

@ -0,0 +1,59 @@
> checkOutDirectoryIsEmpty
> copyFile
> checkOutDirectoryHasFile
> checkCount 1
> copyFile
> checkOutDirectoryHasFile
> checkCount 1
> copyFile / clean
> checkOutDirectoryIsEmpty
> copyFile
> checkOutDirectoryHasFile
> checkCount 1
$ copy-file changes/Foo.txt base/Foo.txt
> copyFile
> checkOutDirectoryHasFile
> checkCount 2
> clean
> checkOutDirectoryIsEmpty
> copyFile
> checkOutDirectoryHasFile
> checkCount 1
> copyFile
> checkOutDirectoryHasFile
> checkCount 1
> copyFile / clean
> checkOutDirectoryIsEmpty
> checkCount 0
> copyFile / allOutputFiles
> checkCount 1
> checkOutDirectoryHasFile

View File

@ -0,0 +1 @@
### Bar

View File

@ -0,0 +1,11 @@
import sbt.nio.Keys._
val fileInputTask = taskKey[Unit]("task with file inputs")
fileInputTask / fileInputs += Glob(baseDirectory.value / "base", "*.md")
fileInputTask := Def.taskDyn {
if ((fileInputTask / changedInputFiles).value.fold(false)(_.updated.nonEmpty))
Def.task(assert(true))
else Def.task(assert(false))
}.value

View File

@ -0,0 +1 @@
### new bar

View File

@ -0,0 +1,5 @@
-> fileInputTask
$ copy-file changes/Bar.md base/Bar.md
> fileInputTask

View File

@ -0,0 +1,24 @@
import java.nio.file.{ Path, Paths }
val foo = taskKey[Seq[Path]]("Copy files")
foo / fileInputs += baseDirectory.value.toGlob / "base" / "*.txt"
foo / target := baseDirectory.value / "out"
foo := {
val out = baseDirectory.value / "out"
((foo / allInputFiles).value: Seq[Path]).map { p =>
val f = p.toFile
val target = out / f.getName
IO.copyFile (f, target)
target.toPath
}
}
val checkOutputFiles = inputKey[Unit]("check output files")
checkOutputFiles := {
val actual: Seq[Path] =
fileTreeView.value.list(baseDirectory.value.toGlob / "out" / **).map(_._1.getFileName).toList
Def.spaceDelimited("").parsed.head match {
case "empty" => assert(actual.isEmpty)
case fileName => assert(actual == Paths.get(fileName) :: Nil)
}
}

View File

@ -0,0 +1,11 @@
> foo / clean
> checkOutputFiles empty
> foo
> checkOutputFiles foo.txt
> foo / clean
> checkOutputFiles empty

View File

@ -0,0 +1 @@
### Bar

View File

@ -0,0 +1 @@
foo

View File

@ -0,0 +1,41 @@
import sbt.nio.Keys._
val foo = taskKey[Unit]("foo")
foo / fileInputs := Seq(
(baseDirectory.value / "base").toGlob / "*.md",
(baseDirectory.value / "base").toGlob / "*.txt",
)
val checkModified = taskKey[Unit]("check that modified files are returned")
checkModified := Def.taskDyn {
val modified = (foo / changedInputFiles).value.map(_.updated).getOrElse(Nil)
val allFiles = (foo / allInputFiles).value
if (modified.isEmpty) Def.task(assert(true))
else Def.task {
assert(modified != allFiles)
assert(modified == Seq((baseDirectory.value / "base" / "Bar.md").toPath))
}
}.value
val checkRemoved = taskKey[Unit]("check that removed files are returned")
checkRemoved := Def.taskDyn {
val files = (foo / allInputFiles).value
val removed = (foo / changedInputFiles).value.map(_.deleted).getOrElse(Nil)
if (removed.isEmpty) Def.task(assert(true))
else Def.task {
assert(files == Seq((baseDirectory.value / "base" / "Foo.txt").toPath))
assert(removed == Seq((baseDirectory.value / "base" / "Bar.md").toPath))
}
}.value
val checkAdded = taskKey[Unit]("check that modified files are returned")
checkAdded := Def.taskDyn {
val files = (foo / allInputFiles).value
val added = (foo / changedInputFiles).value.map(_.created).getOrElse(Nil)
if (added.isEmpty || files.sameElements(added)) Def.task(assert(true))
else Def.task {
val base = baseDirectory.value / "base"
assert(files.sameElements(Seq("Bar.md", "Foo.txt").map(p => (base / p).toPath)))
assert(added == Seq((baseDirectory.value / "base" / "Bar.md").toPath))
}
}.value

View File

@ -0,0 +1 @@
### Bar updated

View File

@ -0,0 +1 @@
### Bar

View File

@ -0,0 +1 @@
foo

View File

@ -0,0 +1,17 @@
> checkModified
$ copy-file changes/Bar.md base/Bar.md
> checkModified
> checkRemoved
$ delete base/Bar.md
> checkRemoved
> checkAdded
$ copy-file original/Bar.md base/Bar.md
> checkAdded

View File

@ -5,7 +5,7 @@ val foo = taskKey[Seq[File]]("Retrieve Foo.txt")
foo / fileInputs += baseDirectory.value ** "*.txt"
foo := (foo / fileInputs).value.all.map(_._1.toFile)
foo := (foo / allInputFiles).value.map(_.toFile)
val checkFoo = taskKey[Unit]("Check that the Foo.txt file is retrieved")
@ -16,7 +16,7 @@ val bar = taskKey[Seq[File]]("Retrieve Bar.md")
bar / fileInputs += baseDirectory.value / "base/subdir/nested-subdir" * "*.md"
bar := (bar / fileInputs).value.all.map(_._1.toFile)
bar := (bar / allInputFiles).value.map(_.toFile)
val checkBar = taskKey[Unit]("Check that the Bar.md file is retrieved")
@ -32,7 +32,8 @@ val checkAll = taskKey[Unit]("Check that the Bar.md file is retrieved")
checkAll := {
import sbt.dsl.LinterLevel.Ignore
val expected = Set("Foo.txt", "Bar.md").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
assert((all / fileInputs).value.all.map(_._1.toFile).toSet == expected)
val actual = (all / allInputFiles).value.map(_.toFile).toSet
assert(actual == expected)
}
val set = taskKey[Seq[File]]("Specify redundant sources in a set")
@ -42,30 +43,18 @@ set / fileInputs ++= Seq(
baseDirectory.value / "base" / "subdir" / "nested-subdir" * -DirectoryFilter
)
val checkSet = taskKey[Unit]("Verify that redundant sources are handled")
checkSet := {
val redundant = (set / fileInputs).value.all.map(_._1.toFile)
assert(redundant.size == 2)
val deduped = (set / fileInputs).value.toSet[Glob].all.map(_._1.toFile)
val expected = Seq("Bar.md", "Foo.txt").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
assert(deduped.sorted == expected)
}
val depth = taskKey[Seq[File]]("Specify redundant sources with limited depth")
val checkDepth = taskKey[Unit]("Check that the Bar.md file is retrieved")
depth / fileInputs ++= Seq(
sbt.io.Glob(baseDirectory.value / "base", -DirectoryFilter, 2),
sbt.io.Glob(baseDirectory.value / "base" / "subdir", -DirectoryFilter, 1)
)
depth / fileInputs ++= {
Seq(
Glob(baseDirectory.value / "base", AnyPath / AnyPath / "*.md"),
Glob(baseDirectory.value / "base" / "subdir", AnyPath / "*.md"),
)
}
checkDepth := {
val redundant = (depth / fileInputs).value.all.map(_._1.toFile)
assert(redundant.size == 2)
val deduped = (depth / fileInputs).value.toSet[Glob].all.map(_._1.toFile)
val expected = Seq("Bar.md", "Foo.txt").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
assert(deduped.sorted == expected)
val expected = Seq("Bar.md").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
val actual = (depth / allInputFiles).value.map(_.toFile)
assert(actual == expected)
}

View File

@ -4,6 +4,4 @@
> checkAll
> checkSet
> checkDepth
> checkDepth

View File

@ -0,0 +1 @@
val root = sbt.interproject.inputs.Build.root

View File

@ -0,0 +1,70 @@
package sbt
package interproject.inputs
import sbt.Keys._
import sbt.nio.Keys._
/**
* This test is for internal logic so it must be in the sbt package because it uses package
* private apis.
*/
object Build {
val cached = settingKey[Unit]("")
val newInputs = settingKey[Unit]("")
val checkCompile = taskKey[Unit]("check compile inputs")
val checkRun = taskKey[Unit]("check runtime inputs")
val checkTest = taskKey[Unit]("check test inputs")
val root = (project in file(".")).settings(
Compile / cached / fileInputs := (Compile / unmanagedSources / fileInputs).value ++
(Compile / unmanagedResources / fileInputs).value,
Test / cached / fileInputs := (Test / unmanagedSources / fileInputs).value ++
(Test / unmanagedResources / fileInputs).value,
Compile / newInputs / fileInputs += baseDirectory.value * "*.sc",
Compile / unmanagedSources / fileInputs ++= (Compile / newInputs / fileInputs).value,
checkCompile := {
val actual = (Compile / compile / transitiveDynamicInputs).value.map(_.glob).toSet
val expected = ((Compile / cached / fileInputs).value ++
(Compile / newInputs / fileInputs).value).toSet
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
if (actual != expected) {
val actualExtra = actual diff expected
val expectedExtra = expected diff actual
throw new IllegalStateException(
s"$actual did not equal $expected\n" +
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields $actualExtra" else ""}" +
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
}
},
checkRun := {
val actual = (Runtime / run / transitiveDynamicInputs).value.map(_.glob).toSet
// Runtime doesn't add any new inputs, but it should correctly find the Compile inputs via
// delegation.
val expected = ((Compile / cached / fileInputs).value ++
(Compile / newInputs / fileInputs).value).toSet
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
if (actual != expected) {
val actualExtra = actual diff expected
val expectedExtra = expected diff actual
throw new IllegalStateException(
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields: $actualExtra" else ""}" +
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
}
},
checkTest := {
val actual = (Test / compile / transitiveDynamicInputs).value.map(_.glob).toSet
val expected = ((Test / cached / fileInputs).value ++
(Compile / newInputs / fileInputs).value ++ (Compile / cached / fileInputs).value).toSet
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
if (actual != expected) {
val actualExtra = actual diff expected
val expectedExtra = expected diff actual
throw new IllegalStateException(
s"$actual did not equal $expected\n" +
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields $actualExtra" else ""}" +
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
}
}
)
}

View File

@ -0,0 +1 @@
### Bar

View File

@ -0,0 +1,20 @@
import sbt.nio.Keys._
val fileInputTask = taskKey[Unit]("task with file inputs")
fileInputTask / fileInputs += (baseDirectory.value / "base").toGlob / "*.md"
fileInputTask / inputFileStamper := sbt.nio.FileStamper.LastModified
fileInputTask := Def.taskDyn {
(fileInputTask / changedInputFiles).value match {
case Some(ChangedFiles(_, _, u)) if u.nonEmpty => Def.task(assert(true))
case None => Def.task(assert(false))
}
}.value
val setLastModified = taskKey[Unit]("Reset the last modified time")
setLastModified := {
val file = baseDirectory.value / "base" / "Bar.md"
IO.setModifiedTimeOrFalse(file, 1234567890L)
}

View File

@ -0,0 +1 @@
### new bar

View File

@ -0,0 +1 @@
### new bar 2

View File

@ -0,0 +1,26 @@
-> fileInputTask
$ touch base/Bar.md
# this should succeed even though the contents didn't change
> fileInputTask
$ copy-file changes/Bar.md base/Bar.md
# the last modified should change due to the copy
> fileInputTask
> setLastModified
> fileInputTask
$ copy-file changes/Bar2.md base/Bar.md
> setLastModified
# this should fail even though we changed the file with a copy
-> fileInputTask
$ touch base/Bar.md
> fileInputTask

View File

@ -0,0 +1,132 @@
import java.nio.file.{ Files, Path }
import scala.sys.process._
val compileLib = taskKey[Seq[Path]]("Compile the library")
compileLib / sourceDirectory := sourceDirectory.value / "lib"
compileLib / fileInputs := {
val base: Glob = (compileLib / sourceDirectory).value.toGlob
base / ** / "*.c" :: base / "include" / "*.h" :: Nil
}
compileLib / target := baseDirectory.value / "out" / "objects"
compileLib := {
val allFiles: Seq[Path] = (compileLib / allInputFiles).value
val changedFiles: Option[Seq[Path]] = (compileLib / changedInputFiles).value match {
case Some(ChangedFiles(c, _, u)) => Some(c ++ u)
case None => None
}
val include = (compileLib / sourceDirectory).value / "include"
val objectDir: Path = (compileLib / target).value.toPath / "objects"
val logger = streams.value.log
def objectFileName(path: Path): String = {
val name = path.getFileName.toString
name.substring(0, name.lastIndexOf('.')) + ".o"
}
compileLib.previous match {
case Some(outputs: Seq[Path]) if changedFiles.isEmpty =>
logger.info("Not compiling libfoo: no inputs have changed.")
outputs
case _ =>
Files.createDirectories(objectDir)
def extensionFilter(ext: String): Path => Boolean = _.getFileName.toString.endsWith(s".$ext")
val cFiles: Seq[Path] =
if (changedFiles.fold(false)(_.exists(extensionFilter("h")))) allFiles.filter(extensionFilter("c"))
else changedFiles.getOrElse(allFiles).filter(extensionFilter("c"))
cFiles.map { file =>
val outFile = objectDir.resolve(objectFileName(file))
logger.info(s"Compiling $file to $outFile")
Seq("gcc", "-c", file.toString, s"-I$include", "-o", outFile.toString).!!
outFile
}
}
}
val linkLib = taskKey[Path]("")
linkLib / target := baseDirectory.value / "out" / "lib"
linkLib := {
val changedObjects = (compileLib / changedOutputFiles).value
val outPath = (linkLib / target).value.toPath
val allObjects = (compileLib / allOutputFiles).value.map(_.toString)
val logger = streams.value.log
linkLib.previous match {
case Some(p: Path) if changedObjects.isEmpty =>
logger.info("Not running linker: no outputs have changed.")
p
case _ =>
val (linkOptions, libraryPath) = if (scala.util.Properties.isMac) {
val path = outPath.resolve("libfoo.dylib")
(Seq("-dynamiclib", "-o", path.toString), path)
} else {
val path = outPath.resolve("libfoo.so")
(Seq("-shared", "-fPIC", "-o", path.toString), path)
}
logger.info(s"Linking $libraryPath")
Files.createDirectories(outPath)
("gcc" +: (linkOptions ++ allObjects)).!!
libraryPath
}
}
val compileMain = taskKey[Path]("compile main")
compileMain / sourceDirectory := sourceDirectory.value / "main"
compileMain / fileInputs := (compileMain / sourceDirectory).value.toGlob / "main.c" :: Nil
compileMain / target := baseDirectory.value / "out" / "main"
compileMain := {
val library = linkLib.value
val changed: Boolean = (compileMain / changedInputFiles).value.nonEmpty ||
(linkLib / changedOutputFiles).value.nonEmpty
val include = (compileLib / sourceDirectory).value / "include"
val logger = streams.value.log
val outDir = (compileMain / target).value.toPath
val outPath = outDir.resolve("main.out")
compileMain.previous match {
case Some(p: Path) if changed =>
logger.info(s"Not building $outPath: no dependencies have changed")
p
case _ =>
(compileMain / allInputFiles).value match {
case Seq(main) =>
Files.createDirectories(outDir)
logger.info(s"Building executable $outPath")
Seq(
"gcc",
main.toString,
s"-I$include",
"-o",
outPath.toString,
s"-L${library.getParent}",
"-lfoo"
).!!
outPath
case main =>
throw new IllegalStateException(s"multiple main files detected: ${main.mkString(",")}")
}
}
}
val executeMain = inputKey[Unit]("run the main method")
executeMain := {
val args = Def.spaceDelimited("<arguments>").parsed
val binary: Seq[Path] = (compileMain / allOutputFiles).value
val logger = streams.value.log
binary match {
case Seq(b) =>
val argString =
if (args.nonEmpty) s" with arguments: ${args.mkString("'", "', '", "'")}" else ""
logger.info(s"Running $b$argString")
logger.info(RunBinary(b, args, linkLib.value).mkString("\n"))
case b =>
throw new IllegalArgumentException(
s"compileMain generated multiple binaries: ${b.mkString(", ")}"
)
}
}
val checkOutput = inputKey[Unit]("check the output value")
checkOutput := {
val args @ Seq(arg, res) = Def.spaceDelimited("").parsed
val binary: Path = (compileMain / allOutputFiles).value.head
val output = RunBinary(binary, args, linkLib.value)
assert(output.contains(s"f($arg) = $res"))
()
}

View File

@ -0,0 +1,16 @@
#include "lib.h"
#define __STR(x) #x
#define STR(x) __STR(x)
#define BODY(x, op) x op x
#define OP *
#define ARG x
const int func(const int x) {
return BODY(ARG, OP);
}
const char* func_str() {
return BODY(STR(ARG), " "STR(OP)" ");
}

View File

@ -0,0 +1,15 @@
import java.nio.file.Path
import java.util.concurrent.TimeUnit
object RunBinary {
def apply(binary: Path, args: Seq[String], libraryPath: Path): Seq[String] = {
val builder = new java.lang.ProcessBuilder(binary.toString +: args :_*)
if (scala.util.Properties.isLinux) {
builder.environment.put("LD_LIBRARY_PATH", libraryPath.getParent.toString)
}
val process = builder.start()
process.waitFor(5, TimeUnit.SECONDS)
scala.io.Source.fromInputStream(process.getInputStream).getLines.toVector ++
scala.io.Source.fromInputStream(process.getErrorStream).getLines
}
}

View File

@ -0,0 +1,3 @@
const int func(const int);
const char* func_str();

View File

@ -0,0 +1,16 @@
#include "lib.h"
#define __STR(x) #x
#define STR(x) __STR(x)
#define BODY(x, op) x op x op x
#define OP *
#define ARG x
const int func(const int x) {
return BODY(ARG, OP);
}
const char* func_str() {
return BODY(STR(ARG), " "STR(OP)" ");
}

View File

@ -0,0 +1,17 @@
#include "lib.h"
#include "stdio.h"
#include "stdlib.h"
int main(int argc, char *argv[]) {
if (argc == 1) printf("No arguments provided, evaluating f with default value: 1\n");
printf("f := %s\n", func_str());
if (argc == 1) {
printf("f(1) = %d\n", func(1));
} else {
for (int i = 1; i < argc; ++i) {
int arg = atoi(argv[i]);
printf("f(%d) = %d\n", arg, func(arg));
}
}
return 0;
}

View File

@ -0,0 +1,25 @@
> executeMain 1
> checkDirectoryContents out/main main.out
> compileMain / clean
> checkDirectoryContents out/main empty
> checkDirectoryContents out/lib libfoo*
> linkLib / clean
> checkDirectoryContents out/lib empty
> executeMain 1
> checkDirectoryContents out/main main.out
> checkDirectoryContents out/lib libfoo*
> checkOutput 2 8
$ copy-file changes/lib.c src/lib/lib.c
> checkOutput 2 4

View File

@ -0,0 +1,19 @@
import java.nio.file.Path
val checkDirectoryContents = inputKey[Unit]("Validates that a directory has the expected files")
checkDirectoryContents := {
val arguments = Def.spaceDelimited("").parsed
val directory = (baseDirectory.value / arguments.head).toPath
val view = fileTreeView.value
val expected = arguments.tail
expected match {
case s if s.isEmpty => assert(view.list(directory.toGlob / **).isEmpty)
case Seq("empty") => assert(view.list(directory.toGlob / **).isEmpty)
case globStrings =>
val globs = globStrings.map(Glob.apply)
val actual: Seq[Path] = view.list(directory.toGlob / **).map {
case (p, _) => directory.relativize(p)
}
assert(actual.forall(f => globs.exists(_.matches(f))))
}
}

View File

@ -1,42 +0,0 @@
import java.nio.file.Path
import sbt.internal.{FileAttributes, FileTree}
val allInputs = taskKey[Seq[File]]("")
val allInputsExplicit = taskKey[Seq[File]]("")
val checkInputs = inputKey[Unit]("")
val checkInputsExplicit = inputKey[Unit]("")
allInputs := (Compile / unmanagedSources / fileInputs).value.all.map(_._1.toFile)
checkInputs := {
val res = allInputs.value
val scala = (Compile / scalaSource).value
val expected = Def.spaceDelimited("<args>").parsed.map(scala / _).toSet
assert(res.toSet == expected)
}
// In this test we override the FileTree.Repository used by the all method.
allInputsExplicit := {
val files = scala.collection.mutable.Set.empty[File]
val underlying = implicitly[FileTree.Repository]
val repo = new FileTree.Repository {
override def get(glob: Glob): Seq[(Path, FileAttributes)] = {
val res = underlying.get(glob)
files ++= res.map(_._1.toFile)
res
}
override def close(): Unit = {}
}
val include = (Compile / unmanagedSources / includeFilter).value
val _ = (Compile / unmanagedSources / fileInputs).value.all(repo).map(_._1.toFile).toSet
files.filter(include.accept).toSeq
}
checkInputsExplicit := {
val res = allInputsExplicit.value
val scala = (Compile / scalaSource).value
val expected = Def.spaceDelimited("<args>").parsed.map(scala / _).toSet
assert(res.toSet == expected)
}

View File

@ -1,3 +0,0 @@
> checkInputs foo/Foo.scala bar/Bar.scala
> checkInputsExplicit foo/Foo.scala bar/Bar.scala

View File

@ -1,58 +0,0 @@
import sbt.internal.TransitiveGlobs._
val cached = settingKey[Unit]("")
val newInputs = settingKey[Unit]("")
Compile / cached / fileInputs := (Compile / unmanagedSources / fileInputs).value ++
(Compile / unmanagedResources / fileInputs).value
Test / cached / fileInputs := (Test / unmanagedSources / fileInputs).value ++
(Test / unmanagedResources / fileInputs).value
Compile / newInputs / fileInputs += baseDirectory.value * "*.sc"
Compile / unmanagedSources / fileInputs ++= (Compile / newInputs / fileInputs).value
val checkCompile = taskKey[Unit]("check compile inputs")
checkCompile := {
val actual = (Compile / compile / transitiveInputs).value.toSet
val expected = ((Compile / cached / fileInputs).value ++ (Compile / newInputs / fileInputs).value).toSet
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
if (actual != expected) {
val actualExtra = actual diff expected
val expectedExtra = expected diff actual
throw new IllegalStateException(
s"$actual did not equal $expected\n" +
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields $actualExtra" else ""}" +
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
}
}
val checkRun = taskKey[Unit]("check runtime inputs")
checkRun := {
val actual = (Runtime / run / transitiveInputs).value.toSet
// Runtime doesn't add any new inputs, but it should correctly find the Compile inputs via
// delegation.
val expected = ((Compile / cached / fileInputs).value ++ (Compile / newInputs / fileInputs).value).toSet
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
if (actual != expected) {
val actualExtra = actual diff expected
val expectedExtra = expected diff actual
throw new IllegalStateException(
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields: $actualExtra" else ""}" +
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
}
}
val checkTest = taskKey[Unit]("check test inputs")
checkTest := {
val actual = (Test / compile / transitiveInputs).value.toSet
val expected = ((Test / cached / fileInputs).value ++ (Compile / newInputs / fileInputs).value ++
(Compile / cached / fileInputs).value).toSet
streams.value.log.debug(s"actual: $actual\nexpected:$expected")
if (actual != expected) {
val actualExtra = actual diff expected
val expectedExtra = expected diff actual
throw new IllegalStateException(
s"$actual did not equal $expected\n" +
s"${if (actualExtra.nonEmpty) s"Actual result had extra fields $actualExtra" else ""}" +
s"${if (expectedExtra.nonEmpty) s"Actual result was missing: $expectedExtra" else ""}")
}
}

View File

@ -1,3 +0,0 @@
package bar
object Bar

View File

@ -1,3 +0,0 @@
package foo
object Foo

View File

@ -10,4 +10,4 @@ checkStringValue := checkStringValueImpl.evaluated
setStringValue / watchTriggers := baseDirectory.value * "string.txt" :: Nil
watchOnEvent := { _ => _ => Watch.CancelWatch }
watchOnFileInputEvent := { (_, _) => sbt.nio.Watch.CancelWatch }

View File

@ -2,6 +2,8 @@ package sbt.input.aggregation
import sbt._
import Keys._
import sbt.nio.Keys._
import sbt.nio.Watch
object Build {
val setStringValue = inputKey[Unit]("set a global string to a value")
@ -31,10 +33,10 @@ object Build {
setStringValueImpl.evaluated
},
checkStringValue := checkStringValueImpl.evaluated,
watchOnEvent := { _ => _ => Watch.CancelWatch }
watchOnFileInputEvent := { (_, _) => Watch.CancelWatch }
)
lazy val bar = project.settings(fileInputs in setStringValue += baseDirectory.value * "foo.txt")
lazy val root = (project in file(".")).aggregate(foo, bar).settings(
watchOnEvent := { _ => _ => Watch.CancelWatch }
watchOnFileInputEvent := { (_, _) => Watch.CancelWatch }
)
}

View File

@ -1,14 +1,16 @@
package sbt.watch.task
import java.nio.file.Path
import sbt._
import Keys._
import sbt.internal.FileTree
import sbt.nio.Keys._
import sbt.nio.Watch
object Build {
val reloadFile = settingKey[File]("file to toggle whether or not to reload")
val setStringValue = taskKey[Unit]("set a global string to a value")
val checkStringValue = inputKey[Unit]("check the value of a global")
val foo = taskKey[Unit]("foo")
val foo = taskKey[Seq[Path]]("foo")
def setStringValueImpl: Def.Initialize[Task[Unit]] = Def.task {
val i = (setStringValue / fileInputs).value
val (stringFile, string) = ("foo.txt", "bar")
@ -22,21 +24,22 @@ object Build {
lazy val root = (project in file(".")).settings(
reloadFile := baseDirectory.value / "reload",
foo / fileInputs += baseDirectory.value * "foo.txt",
foo := (foo / allInputFiles).value,
setStringValue := Def.taskDyn {
// This hides foo / fileInputs from the input graph
Def.taskDyn {
val _ = (foo / fileInputs).value.all
val inputs = foo.value
// By putting setStringValueImpl.value inside a Def.task, we ensure that
// (foo / fileInputs).value is registered with the file repository before modifying the file.
Def.task(setStringValueImpl.value)
if (inputs.isEmpty) Def.task(setStringValueImpl.value)
else Def.task(assert(false))
}
}.value,
checkStringValue := checkStringValueImpl.evaluated,
watchOnInputEvent := { (_, _) => Watch.CancelWatch },
watchOnTriggerEvent := { (_, _) => Watch.CancelWatch },
watchOnFileInputEvent := { (_, _) => Watch.CancelWatch },
watchTasks := Def.inputTask {
val prev = watchTasks.evaluated
new StateTransform(prev.state.fail)
}.evaluated
)
}
}

View File

@ -0,0 +1,108 @@
package sbt
package input.aggregation
import java.nio.file.Paths
import sbt.Keys._
import sbt.internal.DynamicInput
import sbt.nio.{ file => _, _ }
import sbt.nio.Keys._
/**
* This test is for internal logic so it must be in the sbt package because it uses package
* private apis.
*/
object Build {
val setStringValue = inputKey[Unit]("set a global string to a value")
val checkStringValue = inputKey[Unit]("check the value of a global")
val checkTriggers = taskKey[Unit]("Check that the triggers are correctly aggregated.")
val checkGlobs = taskKey[Unit](
"Check that the globs are correctly aggregated and that the globs are the union of the inputs and the triggers"
)
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
IO.write(file(stringFile), string)
}
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed
assert(IO.read(file(stringFile)) == string)
}
def triggers(t: Seq[DynamicInput]): Seq[Glob] = t.collect {
// This is a hack to exclude the default compile and resource file inputs
case i if !i.glob.toString.contains("*") => i.glob
}
lazy val foo = project
.settings(
setStringValue := {
val _ = (fileInputs in (bar, setStringValue)).value
setStringValueImpl.evaluated
},
checkStringValue := checkStringValueImpl.evaluated,
watchOnFileInputEvent := { (_, _) =>
Watch.CancelWatch
},
Compile / compile / watchOnIteration := { _ =>
Watch.CancelWatch
},
checkTriggers := {
val actual = triggers((Compile / compile / transitiveDynamicInputs).value).toSet
val base = baseDirectory.value.getParentFile
// This checks that since foo depends on bar there is a transitive trigger generated
// for the "bar.txt" trigger added to bar / Compile / unmanagedResources (which is a
// transitive dependency of
val expected: Set[Glob] = Set(base * "baz.txt", (base / "bar") * "bar.txt")
assert(actual == expected)
},
Test / test / watchTriggers += (baseDirectory.value / "test.txt").toGlob,
Test / checkTriggers := {
val testTriggers = triggers((Test / test / transitiveDynamicInputs).value).toSet
// This validates that since the "test.txt" trigger is only added to the Test / test task,
// that the Test / compile does not pick it up. Both of them pick up the the triggers that
// are found in the test above for the compile configuration because of the transitive
// classpath dependency that is added in Defaults.internalDependencies.
val compileTriggers = triggers((Test / compile / transitiveDynamicInputs).value).toSet
val base = baseDirectory.value.getParentFile
val expected: Set[Glob] =
Set(base * "baz.txt", (base / "bar") * "bar.txt", (base / "foo") * "test.txt")
assert(testTriggers == expected)
assert((testTriggers - ((base / "foo") * "test.txt")) == compileTriggers)
},
)
.dependsOn(bar)
lazy val bar = project.settings(
fileInputs in setStringValue += baseDirectory.value * "foo.txt",
setStringValue / watchTriggers += baseDirectory.value * "bar.txt",
// This trigger should transitively propagate to foo / compile and foo / Test / compile
Compile / unmanagedResources / watchTriggers += baseDirectory.value * "bar.txt",
checkTriggers := {
val base = baseDirectory.value.getParentFile
val actual = triggers((Compile / compile / transitiveDynamicInputs).value).toSet
val expected: Set[Glob] = Set((base / "bar") * "bar.txt", base * "baz.txt")
assert(actual == expected)
},
// This trigger should not transitively propagate to any foo task
Test / unmanagedResources / watchTriggers += baseDirectory.value * "bar-test.txt",
Test / checkTriggers := {
val testTriggers = triggers((Test / test / transitiveDynamicInputs).value).toSet
val compileTriggers = triggers((Test / compile / transitiveDynamicInputs).value).toSet
val base = baseDirectory.value.getParentFile
val expected: Set[Glob] =
Set(base * "baz.txt", (base / "bar") * "bar.txt", (base / "bar") * "bar-test.txt")
assert(testTriggers == expected)
assert(testTriggers == compileTriggers)
},
)
lazy val root = (project in file("."))
.aggregate(foo, bar)
.settings(
watchOnFileInputEvent := { (_, _) =>
Watch.CancelWatch
},
checkTriggers := {
val actual = triggers((Compile / compile / transitiveDynamicInputs).value)
val expected: Seq[Glob] = baseDirectory.value * "baz.txt" :: Nil
assert(actual == expected)
},
)
}

View File

@ -2,8 +2,6 @@
> Test / checkTriggers
> checkGlobs
# do not set the project here to ensure the bar/bar.txt trigger is captured by aggregation
# also add random spaces and multiple commands to ensure the parser is sane.
> ~ setStringValue bar/bar.txt bar; root / setStringValue bar/bar.txt baz

View File

@ -1,94 +0,0 @@
package sbt.input.aggregation
import sbt._
import Keys._
import sbt.internal.TransitiveGlobs._
object Build {
val setStringValue = inputKey[Unit]("set a global string to a value")
val checkStringValue = inputKey[Unit]("check the value of a global")
val checkTriggers = taskKey[Unit]("Check that the triggers are correctly aggregated.")
val checkGlobs = taskKey[Unit]("Check that the globs are correctly aggregated and that the globs are the union of the inputs and the triggers")
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
IO.write(file(stringFile), string)
}
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed
assert(IO.read(file(stringFile)) == string)
}
def checkGlobsImpl: Def.Initialize[Task[Unit]] = Def.task {
val (globInputs, globTriggers) = (Compile / compile / transitiveGlobs).value
val inputs = (Compile / compile / transitiveInputs).value.toSet
val triggers = (Compile / compile / transitiveTriggers).value.toSet
assert(globInputs.toSet == inputs)
assert(globTriggers.toSet == triggers)
}
lazy val foo = project.settings(
setStringValue := {
val _ = (fileInputs in (bar, setStringValue)).value
setStringValueImpl.evaluated
},
checkStringValue := checkStringValueImpl.evaluated,
watchOnTriggerEvent := { (_, _) => Watch.CancelWatch },
watchOnInputEvent := { (_, _) => Watch.CancelWatch },
Compile / compile / watchOnStart := { _ => () => Watch.CancelWatch },
checkTriggers := {
val actual = (Compile / compile / transitiveTriggers).value.toSet
val base = baseDirectory.value.getParentFile
// This checks that since foo depends on bar there is a transitive trigger generated
// for the "bar.txt" trigger added to bar / Compile / unmanagedResources (which is a
// transitive dependency of
val expected: Set[Glob] = Set(base * "baz.txt", (base / "bar") * "bar.txt")
assert(actual == expected)
},
Test / test / watchTriggers += baseDirectory.value * "test.txt",
Test / checkTriggers := {
val testTriggers = (Test / test / transitiveTriggers).value.toSet
// This validates that since the "test.txt" trigger is only added to the Test / test task,
// that the Test / compile does not pick it up. Both of them pick up the the triggers that
// are found in the test above for the compile configuration because of the transitive
// classpath dependency that is added in Defaults.internalDependencies.
val compileTriggers = (Test / compile / transitiveTriggers).value.toSet
val base = baseDirectory.value.getParentFile
val expected: Set[Glob] = Set(
base * "baz.txt", (base / "bar") * "bar.txt", (base / "foo") * "test.txt")
assert(testTriggers == expected)
assert((testTriggers - ((base / "foo") * "test.txt")) == compileTriggers)
},
checkGlobs := checkGlobsImpl.value
).dependsOn(bar)
lazy val bar = project.settings(
fileInputs in setStringValue += baseDirectory.value * "foo.txt",
setStringValue / watchTriggers += baseDirectory.value * "bar.txt",
// This trigger should transitively propagate to foo / compile and foo / Test / compile
Compile / unmanagedResources / watchTriggers += baseDirectory.value * "bar.txt",
checkTriggers := {
val base = baseDirectory.value.getParentFile
val actual = (Compile / compile / transitiveTriggers).value
val expected: Set[Glob] = Set((base / "bar") * "bar.txt", base * "baz.txt")
assert(actual.toSet == expected)
},
// This trigger should not transitively propagate to any foo task
Test / unmanagedResources / watchTriggers += baseDirectory.value * "bar-test.txt",
Test / checkTriggers := {
val testTriggers = (Test / test / transitiveTriggers).value.toSet
val compileTriggers = (Test / compile / transitiveTriggers).value.toSet
val base = baseDirectory.value.getParentFile
val expected: Set[Glob] = Set(
base * "baz.txt", (base / "bar") * "bar.txt", (base / "bar") * "bar-test.txt")
assert(testTriggers == expected)
assert(testTriggers == compileTriggers)
},
checkGlobs := checkGlobsImpl.value
)
lazy val root = (project in file(".")).aggregate(foo, bar).settings(
watchOnEvent := { _ => _ => Watch.CancelWatch },
checkTriggers := {
val actual = (Compile / compile / transitiveTriggers).value
val expected: Seq[Glob] = baseDirectory.value * "baz.txt" :: Nil
assert(actual == expected)
},
checkGlobs := checkGlobsImpl.value
)
}

View File

@ -6,6 +6,8 @@ import complete.Parser._
import java.io.{ PipedInputStream, PipedOutputStream }
import Keys._
import sbt.nio.Watch
import sbt.nio.Keys._
object Build {
val root = (project in file(".")).settings(

Some files were not shown because too many files have changed in this diff Show More