Merge pull request #4927 from eatkins/file-report

File report
This commit is contained in:
eugene yokota 2019-08-09 22:06:19 -04:00 committed by GitHub
commit c124bc1dcd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
65 changed files with 1191 additions and 455 deletions

View File

@ -10,7 +10,8 @@ package sbt
import sbt.Def.{ Initialize, ScopedKey }
import sbt.Previous._
import sbt.Scope.Global
import sbt.internal.util.{ IMap, RMap, ~> }
import sbt.internal.util._
import sbt.std.TaskExtra._
import sbt.util.StampedFormat
import sjsonnew.JsonFormat
@ -20,34 +21,42 @@ import scala.util.control.NonFatal
* Reads the previous value of tasks on-demand. The read values are cached so that they are only read once per task execution.
* `referenced` provides the `Format` to use for each key.
*/
private[sbt] final class Previous(streams: Streams, referenced: IMap[ScopedTaskKey, Referenced]) {
private[this] val map = referenced.mapValues(toValue)
private[this] def toValue = λ[Referenced ~> ReferencedValue](new ReferencedValue(_))
private[sbt] final class Previous(streams: Streams, referenced: IMap[Previous.Key, Referenced]) {
private[this] var map = IMap.empty[Previous.Key, ReferencedValue]
// We can't use mapValues to transform the map because mapValues is lazy and evaluates the
// transformation function every time a value is fetched from the map, defeating the entire
// purpose of ReferencedValue.
for (referenced.TPair(k, v) <- referenced.toTypedSeq) map = map.put(k, new ReferencedValue(v))
private[this] final class ReferencedValue[T](referenced: Referenced[T]) {
import referenced.{ stamped, task }
lazy val previousValue: Option[T] = {
try Option(streams(task).cacheStoreFactory.make(StreamName).read[T]()(stamped))
catch { case NonFatal(_) => None }
}
lazy val previousValue: Option[T] = referenced.read(streams)
}
/** Used by the .previous runtime implementation to get the previous value for task `key`. */
private def get[T](key: ScopedKey[Task[T]]): Option[T] =
private def get[T](key: Key[T]): Option[T] =
map.get(key).flatMap(_.previousValue)
}
object Previous {
import sjsonnew.BasicJsonProtocol.StringJsonFormat
private[sbt] type ScopedTaskKey[T] = ScopedKey[Task[T]]
private type AnyTaskKey = ScopedTaskKey[Any]
private type Streams = sbt.std.Streams[ScopedKey[_]]
/** The stream where the task value is persisted. */
private final val StreamName = "previous"
private[sbt] final val DependencyDirectory = "previous-dependencies"
/** Represents a reference task.previous*/
private[sbt] final class Referenced[T](val task: ScopedKey[Task[T]], val format: JsonFormat[T]) {
lazy val stamped = StampedFormat.withStamp(task.key.manifest.toString)(format)
private[sbt] final class Referenced[T](val key: Key[T], val format: JsonFormat[T]) {
def this(task: ScopedTaskKey[T], format: JsonFormat[T]) = this(Key(task, task), format)
@deprecated("unused", "1.3.0")
private[sbt] def task: ScopedKey[Task[T]] = key.task
lazy val stamped: JsonFormat[T] =
StampedFormat.withStamp(key.task.key.manifest.toString)(format)
def setTask(newTask: ScopedKey[Task[T]]) = new Referenced(newTask, format)
private[sbt] def read(streams: Streams): Option[T] =
try Option(streams(key.cacheKey).cacheStoreFactory.make(StreamName).read[T]()(stamped))
catch { case NonFatal(_) => None }
}
private[sbt] val references = SettingKey[References](
@ -61,16 +70,41 @@ object Previous {
KeyRanks.Invisible
)
private[sbt] class Key[T](val task: ScopedKey[Task[T]], val enclosing: AnyTaskKey) {
override def equals(o: Any): Boolean = o match {
case that: Key[_] => this.task == that.task && this.enclosing == that.enclosing
case _ => false
}
override def hashCode(): Int = (task.## * 31) ^ enclosing.##
def cacheKey: AnyTaskKey = {
if (task == enclosing) task
else {
val am = enclosing.scope.extra match {
case Select(a) => a.put(scopedKeyAttribute, task.asInstanceOf[AnyTaskKey])
case _ => AttributeMap.empty.put(scopedKeyAttribute, task.asInstanceOf[AnyTaskKey])
}
Def.ScopedKey(enclosing.scope.copy(extra = Select(am)), enclosing.key)
}
}.asInstanceOf[AnyTaskKey]
}
private[sbt] object Key {
def apply[T, U](key: ScopedKey[Task[T]], enclosing: ScopedKey[Task[U]]): Key[T] =
new Key(key, enclosing.asInstanceOf[AnyTaskKey])
}
/** Records references to previous task value. This should be completely populated after settings finish loading. */
private[sbt] final class References {
private[this] var map = IMap.empty[ScopedTaskKey, Referenced]
private[this] var map = IMap.empty[Key, Referenced]
@deprecated("unused", "1.3.0")
def recordReference[T](key: ScopedKey[Task[T]], format: JsonFormat[T]): Unit =
recordReference(Key(key, key), format)
// TODO: this arbitrarily chooses a JsonFormat.
// The need to choose is a fundamental problem with this approach, but this should at least make a stable choice.
def recordReference[T](key: ScopedKey[Task[T]], format: JsonFormat[T]): Unit = synchronized {
def recordReference[T](key: Key[T], format: JsonFormat[T]): Unit = synchronized {
map = map.put(key, new Referenced(key, format))
}
def getReferences: IMap[ScopedTaskKey, Referenced] = synchronized { map }
def getReferences: IMap[Key, Referenced] = synchronized { map }
}
/** Persists values of tasks t where there is some task referencing it via t.previous. */
@ -80,27 +114,60 @@ object Previous {
streams: Streams
): Unit = {
val map = referenced.getReferences
def impl[T](key: ScopedKey[_], result: T): Unit =
for (i <- map.get(key.asInstanceOf[ScopedTaskKey[T]])) {
val out = streams.apply(i.task).cacheStoreFactory.make(StreamName)
try out.write(result)(i.stamped)
catch { case NonFatal(_) => }
}
val reverse = map.keys.groupBy(_.task)
// We first collect all of the successful tasks and write their scoped key into a map
// along with their values.
val successfulTaskResults = (for {
results.TPair(task, Value(v)) <- results.toTypedSeq
key <- task.info.attributes.get(Def.taskDefinitionKey).asInstanceOf[Option[AnyTaskKey]]
} yield key -> v).toMap
// We then traverse the successful results and look up all of the referenced values for
// each of these tasks. This can be a many to one relationship if multiple tasks refer
// the previous value of another task. For each reference we find, we check if the task has
// been successfully evaluated. If so, we write it to the appropriate previous cache for
// the completed task.
for {
results.TPair(Task(info, _), Value(result)) <- results.toTypedSeq
key <- info.attributes get Def.taskDefinitionKey
} impl(key, result)
(k, v) <- successfulTaskResults
keys <- reverse.get(k)
key <- keys if successfulTaskResults.contains(key.enclosing)
ref <- map.get(key.asInstanceOf[Key[Any]])
} {
val out = streams(key.cacheKey).cacheStoreFactory.make(StreamName)
try out.write(v)(ref.stamped)
catch { case NonFatal(_) => }
}
}
private[sbt] val scopedKeyAttribute = AttributeKey[AnyTaskKey](
"previous-scoped-key-attribute",
"Specifies a scoped key for a task on which .previous is called. Used to " +
"set the cache directory for the task-specific previous value: see Previous.runtimeInEnclosingTask."
)
/** Public as a macro implementation detail. Do not call directly. */
def runtime[T](skey: TaskKey[T])(implicit format: JsonFormat[T]): Initialize[Task[Option[T]]] = {
val inputs = (cache in Global) zip Def.validated(skey, selfRefOk = true) zip (references in Global)
inputs {
case ((prevTask, resolved), refs) =>
refs.recordReference(resolved, format) // always evaluated on project load
import std.TaskExtra._
prevTask.map(_ get resolved) // evaluated if this task is evaluated
val key = Key(resolved, resolved)
refs.recordReference(key, format) // always evaluated on project load
prevTask.map(_.get(key)) // evaluated if this task is evaluated
}
}
/** Public as a macro implementation detail. Do not call directly. */
def runtimeInEnclosingTask[T](skey: TaskKey[T])(
implicit format: JsonFormat[T]
): Initialize[Task[Option[T]]] = {
val inputs = (cache in Global)
.zip(Def.validated(skey, selfRefOk = true))
.zip(references in Global)
.zip(Def.resolvedScoped)
inputs {
case (((prevTask, resolved), refs), inTask: ScopedKey[Task[_]] @unchecked) =>
val key = Key(resolved, inTask)
refs.recordReference(key, format) // always evaluated on project load
prevTask.map(_.get(key)) // evaluated if this task is evaluated
}
}
}

View File

@ -16,7 +16,6 @@ import lmcoursier.CoursierDependencyResolution
import lmcoursier.definitions.{ Configuration => CConfiguration }
import org.apache.ivy.core.module.descriptor.ModuleDescriptor
import org.apache.ivy.core.module.id.ModuleRevisionId
import sbt.coursierint._
import sbt.Def.{ Initialize, ScopedKey, Setting, SettingsDefinition }
import sbt.Keys._
import sbt.Project.{
@ -28,6 +27,7 @@ import sbt.Project.{
richTaskSessionVar
}
import sbt.Scope.{ GlobalScope, ThisScope, fillTaskAxis }
import sbt.coursierint._
import sbt.internal.CommandStrings.ExportStream
import sbt.internal._
import sbt.internal.classpath.AlternativeZincUtil
@ -69,10 +69,10 @@ import sbt.librarymanagement.CrossVersion.{ binarySbtVersion, binaryScalaVersion
import sbt.librarymanagement._
import sbt.librarymanagement.ivy._
import sbt.librarymanagement.syntax._
import sbt.nio.Watch
import sbt.nio.Keys._
import sbt.nio.file.{ FileTreeView, Glob, RecursiveGlob }
import sbt.nio.file.syntax._
import sbt.nio.file.{ FileTreeView, Glob, RecursiveGlob }
import sbt.nio.{ FileChanges, Watch }
import sbt.std.TaskExtra._
import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint }
import sbt.util.CacheImplicits._
@ -150,6 +150,10 @@ object Defaults extends BuildCommon {
defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq(
excludeFilter :== HiddenFileFilter,
fileInputs :== Nil,
fileInputIncludeFilter :== AllPassFilter.toNio,
fileInputExcludeFilter :== DirectoryFilter.toNio || HiddenFileFilter,
fileOutputIncludeFilter :== AllPassFilter.toNio,
fileOutputExcludeFilter :== NothingFilter.toNio,
inputFileStamper :== sbt.nio.FileStamper.Hash,
outputFileStamper :== sbt.nio.FileStamper.LastModified,
onChangedBuildSource :== sbt.nio.Keys.WarnOnSourceChanges,
@ -605,10 +609,14 @@ object Defaults extends BuildCommon {
s"inc_compile$extra.zip"
},
externalHooks := {
import sbt.nio.FileStamp.Formats.seqPathFileStampJsonFormatter
val current =
(unmanagedSources / inputFileStamps).value ++ (managedSources / outputFileStamps).value
val previous = (externalHooks / inputFileStamps).previous
ExternalHooks.default.value(previous.flatMap(sbt.nio.Settings.changedFiles(_, current)))
val changes = previous
.map(sbt.nio.Settings.changedFiles(_, current))
.getOrElse(FileChanges.noPrevious(current.map(_._1)))
ExternalHooks.default.value(changes, fileTreeView.value)
},
externalHooks / inputFileStamps := {
compile.value // ensures the inputFileStamps previous value is only set if compile succeeds.
@ -2772,7 +2780,8 @@ object Classpaths {
import CacheStoreFactory.jvalueIsoString
val cacheStoreFactory: CacheStoreFactory = {
val factory = state.value.get(Keys.cacheStoreFactory).getOrElse(InMemoryCacheStore.factory(0))
val factory =
state.value.get(Keys.cacheStoreFactoryFactory).getOrElse(InMemoryCacheStore.factory(0))
factory(cacheDirectory.toPath, Converter)
}

View File

@ -491,7 +491,7 @@ object Keys {
val pluginData = taskKey[PluginData]("Information from the plugin build needed in the main build definition.").withRank(DTask)
val globalPluginUpdate = taskKey[UpdateReport]("A hook to get the UpdateReport of the global plugin.").withRank(DTask)
private[sbt] val taskCancelStrategy = settingKey[State => TaskCancellationStrategy]("Experimental task cancellation handler.").withRank(DTask)
private[sbt] val cacheStoreFactory = AttributeKey[CacheStoreFactoryFactory]("cache-store-factory")
private[sbt] val cacheStoreFactoryFactory = AttributeKey[CacheStoreFactoryFactory]("cache-store-factory-factory")
val fileCacheSize = settingKey[String]("The approximate maximum size in bytes of the cache used to store previous task results. For example, it could be set to \"256M\" to make the maximum size 256 megabytes.")
// Experimental in sbt 0.13.2 to enable grabbing semantic compile failures.

View File

@ -852,8 +852,8 @@ object BuiltinCommands {
.getOpt(Keys.fileCacheSize)
.flatMap(SizeParser(_))
.getOrElse(SysProp.fileCacheSize)
s.get(Keys.cacheStoreFactory).foreach(_.close())
s.put(Keys.cacheStoreFactory, InMemoryCacheStore.factory(size))
s.get(Keys.cacheStoreFactoryFactory).foreach(_.close())
s.put(Keys.cacheStoreFactoryFactory, InMemoryCacheStore.factory(size))
}
def registerCompilerCache(s: State): State = {

View File

@ -15,6 +15,7 @@ import Def.{ ScopeLocal, ScopedKey, Setting, displayFull }
import BuildPaths.outputDirectory
import Scope.GlobalScope
import BuildStreams.Streams
import sbt.LocalRootProject
import sbt.io.syntax._
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attributed, Settings }
import sbt.internal.util.Attributed.data
@ -291,6 +292,7 @@ object BuildStreams {
final val GlobalPath = "_global"
final val BuildUnitPath = "_build"
final val StreamsDirectory = "streams"
private final val RootPath = "_root"
def mkStreams(
units: Map[URI, LoadedBuildUnit],
@ -308,7 +310,8 @@ object BuildStreams {
displayFull,
LogManager.construct(data, s),
sjsonnew.support.scalajson.unsafe.Converter, {
val factory = s.get(Keys.cacheStoreFactory).getOrElse(InMemoryCacheStore.factory(0))
val factory =
s.get(Keys.cacheStoreFactoryFactory).getOrElse(InMemoryCacheStore.factory(0))
(file, converter: SupportConverter[JValue]) => factory(file.toPath, converter)
}
)
@ -337,14 +340,39 @@ object BuildStreams {
pathComponent(scope.config, scoped, "config")(_.name) ::
pathComponent(scope.task, scoped, "task")(_.label) ::
pathComponent(scope.extra, scoped, "extra")(showAMap) ::
scoped.key.label ::
Nil
scoped.key.label :: previousComponent(scope.extra)
}
private def previousComponent(value: ScopeAxis[AttributeMap]): List[String] =
value match {
case Select(am) =>
am.get(Previous.scopedKeyAttribute) match {
case Some(sk) =>
val project = sk.scope.project match {
case Zero => GlobalPath
case Select(BuildRef(_)) => BuildUnitPath
case Select(ProjectRef(_, id)) => id
case Select(LocalProject(id)) => id
case Select(RootProject(_)) => RootPath
case Select(LocalRootProject) => LocalRootProject.toString
case Select(ThisBuild) | Select(ThisProject) | This =>
// Don't want to crash if somehow an unresolved key makes it in here.
This.toString
}
List(Previous.DependencyDirectory, project) ++ nonProjectPath(sk)
case _ => Nil
}
case _ => Nil
}
def showAMap(a: AttributeMap): String =
a.entries.toStream
.sortBy(_.key.label)
.map { case AttributeEntry(key, value) => s"${key.label}=$value" }
.flatMap {
// The Previous.scopedKeyAttribute is an implementation detail that allows us to get a
// more specific cache directory for a task stream.
case AttributeEntry(key, _) if key == Previous.scopedKeyAttribute => Nil
case AttributeEntry(key, value) => s"${key.label}=$value" :: Nil
}
.mkString(" ")
def projectPath(

View File

@ -83,7 +83,7 @@ private[sbt] object Clean {
Def.taskDyn {
val state = Keys.state.value
val extracted = Project.extract(state)
val view = fileTreeView.value
val view = (fileTreeView in scope).value
val manager = streamsManager.value
Def.task {
val excludeFilter = cleanFilter(scope).value
@ -139,7 +139,9 @@ private[sbt] object Clean {
// We do not want to inadvertently delete files that are not in the target directory.
val excludeFilter: Path => Boolean = path => !path.startsWith(targetDir) || filter(path)
val delete = cleanDelete(scope).value
val st = streams.in(scope).value
taskKey.previous.foreach(_.toSeqPath.foreach(p => if (!excludeFilter(p)) delete(p)))
delete(st.cacheDirectory.toPath / Previous.DependencyDirectory)
}
} tag Tags.Clean
private[this] def tryDelete(debug: String => Unit): Path => Unit = path => {

View File

@ -17,8 +17,8 @@ import sbt.internal.inc.Stamp.equivStamp.equiv
import sbt.io.syntax._
import sbt.nio.Keys._
import sbt.nio.file.syntax._
import sbt.nio.file.{ ChangedFiles, RecursiveGlob }
import sbt.nio.{ FileStamp, FileStamper }
import sbt.nio.file.{ FileAttributes, FileTreeView, RecursiveGlob }
import sbt.nio.{ FileChanges, FileStamp, FileStamper }
import xsbti.compile._
import xsbti.compile.analysis.Stamp
@ -26,7 +26,8 @@ import scala.collection.JavaConverters._
private[sbt] object ExternalHooks {
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
def default: Def.Initialize[sbt.Task[Option[ChangedFiles] => ExternalHooks]] = Def.task {
private type Func = (FileChanges, FileTreeView[(Path, FileAttributes)]) => ExternalHooks
def default: Def.Initialize[sbt.Task[Func]] = Def.task {
val unmanagedCache = unmanagedFileStampCache.value
val managedCache = managedFileStampCache.value
val cp = dependencyClasspath.value.map(_.data)
@ -35,14 +36,16 @@ private[sbt] object ExternalHooks {
managedCache.getOrElseUpdate(path, FileStamper.LastModified)
}
val classGlob = classDirectory.value.toGlob / RecursiveGlob / "*.class"
fileTreeView.value.list(classGlob).foreach {
case (path, _) => managedCache.update(path, FileStamper.LastModified)
}
val options = (compileOptions in compile).value
apply(_, options, unmanagedCache, managedCache)
(fc: FileChanges, fileTreeView: FileTreeView[(Path, FileAttributes)]) => {
fileTreeView.list(classGlob).foreach {
case (path, _) => managedCache.update(path, FileStamper.LastModified)
}
apply(fc, options, unmanagedCache, managedCache)
}
}
private def apply(
changedFiles: Option[ChangedFiles],
changedFiles: FileChanges,
options: CompileOptions,
unmanagedCache: FileStamp.Cache,
managedCache: FileStamp.Cache
@ -59,11 +62,12 @@ private[sbt] object ExternalHooks {
}
private def add(f: File, set: java.util.Set[File]): Unit = { set.add(f); () }
val allChanges = new java.util.HashSet[File]
changedFiles foreach {
case ChangedFiles(c, d, u) =>
changedFiles match {
case FileChanges(c, d, m, _) =>
c.foreach(add(_, getAdded, allChanges))
d.foreach(add(_, getRemoved, allChanges))
u.foreach(add(_, getChanged, allChanges))
m.foreach(add(_, getChanged, allChanges))
case _ =>
}
override def isEmpty: java.lang.Boolean =
getAdded.isEmpty && getRemoved.isEmpty && getChanged.isEmpty

View File

@ -0,0 +1,100 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import java.nio.file.{ Path => NioPath }
import sbt.nio.Keys._
import sbt.nio.{ FileChanges, FileStamp }
import scala.annotation.compileTimeOnly
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
/**
* Provides extension methods to `TaskKey[T]` that can be use to fetch the input and output file
* dependency changes for a task. Nothing in this object is intended to be called directly but,
* because there are macro definitions, some of the definitions must be public.
*
*/
object FileChangesMacro {
private[sbt] sealed abstract class TaskOps[T](val taskKey: TaskKey[T]) {
@compileTimeOnly(
"`inputFileChanges` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def inputFileChanges: FileChanges = macro changedInputFilesImpl[T]
@compileTimeOnly(
"`outputFileChanges` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def outputFileChanges: FileChanges = macro changedOutputFilesImpl[T]
@compileTimeOnly(
"`inputFiles` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def inputFiles: Seq[NioPath] = macro inputFilesImpl[T]
@compileTimeOnly(
"`outputFiles` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
)
def outputFiles: Seq[NioPath] = macro outputFilesImpl[T]
}
def changedInputFilesImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[FileChanges] = {
impl[T](c)(
c.universe.reify(allInputFiles),
c.universe.reify(changedInputFiles),
c.universe.reify(inputFileStamps)
)
}
def changedOutputFilesImpl[T: c.WeakTypeTag](
c: blackbox.Context
): c.Expr[FileChanges] = {
impl[T](c)(
c.universe.reify(allOutputFiles),
c.universe.reify(changedOutputFiles),
c.universe.reify(outputFileStamps)
)
}
private def impl[T: c.WeakTypeTag](
c: blackbox.Context
)(
currentKey: c.Expr[TaskKey[Seq[NioPath]]],
changeKey: c.Expr[TaskKey[Seq[(NioPath, FileStamp)] => FileChanges]],
mapKey: c.Expr[TaskKey[Seq[(NioPath, FileStamp)]]]
): c.Expr[FileChanges] = {
import c.universe._
val taskScope = getTaskScope(c)
reify {
val changes = (changeKey.splice in taskScope.splice).value
val current = (currentKey.splice in taskScope.splice).value
import sbt.nio.FileStamp.Formats._
val previous = Previous.runtimeInEnclosingTask(mapKey.splice in taskScope.splice).value
previous.map(changes).getOrElse(FileChanges.noPrevious(current))
}
}
def inputFilesImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Seq[NioPath]] = {
val taskKey = getTaskScope(c)
c.universe.reify((allInputFiles in taskKey.splice).value)
}
def outputFilesImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Seq[NioPath]] = {
val taskKey = getTaskScope(c)
c.universe.reify((allOutputFiles in taskKey.splice).value)
}
private def getTaskScope[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[sbt.Scope] = {
import c.universe._
val taskTpe = c.weakTypeOf[TaskKey[T]]
lazy val err = "Couldn't expand file change macro."
c.macroApplication match {
case Select(Apply(_, k :: Nil), _) if k.tpe <:< taskTpe =>
val expr = c.Expr[TaskKey[T]](k)
c.universe.reify {
if (expr.splice.scope.task.toOption.isDefined) expr.splice.scope
else expr.splice.scope in expr.splice.key
}
case _ => c.abort(c.enclosingPosition, err)
}
}
}

View File

@ -11,8 +11,9 @@ package internal.nio
import sbt.Keys.{ baseDirectory, state, streams }
import sbt.SlashSyntax0._
import sbt.io.syntax._
import sbt.nio.FileChanges
import sbt.nio.Keys._
import sbt.nio.file.{ ChangedFiles, Glob, RecursiveGlob }
import sbt.nio.file.{ Glob, RecursiveGlob }
private[sbt] object CheckBuildSources {
private[sbt] def needReloadImpl: Def.Initialize[Task[StateTransform]] = Def.task {
@ -22,17 +23,21 @@ private[sbt] object CheckBuildSources {
(onChangedBuildSource in Scope.Global).value match {
case IgnoreSourceChanges => new StateTransform(st)
case o =>
import sbt.nio.FileStamp.Formats._
logger.debug("Checking for meta build source updates")
(changedInputFiles in checkBuildSources).value match {
case Some(cf: ChangedFiles) if !firstTime =>
val previous = (inputFileStamps in checkBuildSources).previous
val changes = (changedInputFiles in checkBuildSources).value
previous.map(changes) match {
case Some(fileChanges @ FileChanges(created, deleted, modified, _))
if fileChanges.hasChanges && !firstTime =>
val rawPrefix = s"build source files have changed\n" +
(if (cf.created.nonEmpty) s"new files: ${cf.created.mkString("\n ", "\n ", "\n")}"
(if (created.nonEmpty) s"new files: ${created.mkString("\n ", "\n ", "\n")}"
else "") +
(if (cf.deleted.nonEmpty)
s"deleted files: ${cf.deleted.mkString("\n ", "\n ", "\n")}"
(if (deleted.nonEmpty)
s"deleted files: ${deleted.mkString("\n ", "\n ", "\n")}"
else "") +
(if (cf.updated.nonEmpty)
s"updated files: ${cf.updated.mkString("\n ", "\n ", "\n")}"
(if (modified.nonEmpty)
s"modified files: ${modified.mkString("\n ", "\n ", "\n")}"
else "")
val prefix = rawPrefix.linesIterator.filterNot(_.trim.isEmpty).mkString("\n")
if (o == ReloadOnSourceChanges) {

View File

@ -0,0 +1,60 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.nio
import java.nio.file.Path
/**
* A report on the changes of the input file dependencies or output files of a task compared to
* some previous time. It also contains the complete list of current inputs or outputs.
*
* @param created the files that were not present previously. When this is non empty, it does not
* necessarily mean that the files were recently created. It could just indicate
* that there was no previous cache entry for the file stamps (
* see [[FileChanges#noPrevious]]).
* @param deleted the files that have been deleted. This should be empty when no previous list of
* files is available.
* @param modified the files that have been modified. This should be empty when no previous list of
* files is available.
* @param unmodified the files that have no changes. This should be empty when no previous list of
* files is availab.e
*/
final case class FileChanges(
created: Seq[Path],
deleted: Seq[Path],
modified: Seq[Path],
unmodified: Seq[Path]
) {
/**
* Return true either if there is no previous information or
* @return true if there are no changes.
*/
lazy val hasChanges: Boolean = created.nonEmpty || deleted.nonEmpty || modified.nonEmpty
}
object FileChanges {
/**
* Creates an instance of [[FileChanges]] for a collection of files for which there were no
* previous file stamps available.
* @param files all of the existing files.
* @return the [[FileChanges]] with the [[FileChanges.created]] field set to the input, `files`.
*/
def noPrevious(files: Seq[Path]): FileChanges =
FileChanges(created = files, deleted = Nil, modified = Nil, unmodified = Nil)
/**
* Creates an instance of [[FileChanges]] for a collection of files for which there were no
* changes when compared to the previous file stamps.
* @param files all of the existing files.
* @return the [[FileChanges]] with the [[FileChanges.unmodified]] field set to the input, `files`.
*/
def unmodified(files: Seq[Path]): FileChanges =
FileChanges(created = Nil, deleted = Nil, modified = Nil, unmodified = files)
}

View File

@ -17,14 +17,39 @@ import sbt.nio.file.FileAttributes
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
import xsbti.compile.analysis.{ Stamp => XStamp }
/**
* A trait that indicates what file stamping implementation should be used to track the state of
* a given file. The two choices are [[FileStamper.Hash]] and [[FileStamper.LastModified]].
*/
sealed trait FileStamper
/**
* Provides implementations of [[FileStamper]].
*
*/
object FileStamper {
/**
* Track files using a hash.
*/
case object Hash extends FileStamper
/**
* Track files using the last modified time.
*/
case object LastModified extends FileStamper
}
private[sbt] sealed trait FileStamp
private[sbt] object FileStamp {
/**
* Represents the state of a file. This representation is either a hash of the file contents or
* the last modified time.
*/
sealed trait FileStamp
/**
* Provides json formatters for [[FileStamp]].
*/
object FileStamp {
private[sbt] type Id[T] = T
private[sbt] implicit class Ops(val fileStamp: FileStamp) {
@ -35,11 +60,12 @@ private[sbt] object FileStamp {
}
}
def apply(path: Path, fileStamper: FileStamper): Option[FileStamp] = fileStamper match {
case FileStamper.Hash => hash(path)
case FileStamper.LastModified => lastModified(path)
}
def apply(path: Path, fileAttributes: FileAttributes): Option[FileStamp] =
private[sbt] def apply(path: Path, fileStamper: FileStamper): Option[FileStamp] =
fileStamper match {
case FileStamper.Hash => hash(path)
case FileStamper.LastModified => lastModified(path)
}
private[sbt] def apply(path: Path, fileAttributes: FileAttributes): Option[FileStamp] =
try {
if (fileAttributes.isDirectory) lastModified(path)
else
@ -51,129 +77,38 @@ private[sbt] object FileStamp {
} catch {
case e: IOException => Some(Error(e))
}
def hash(string: String): Hash = new FileHashImpl(sbt.internal.inc.Hash.unsafeFromString(string))
def hash(path: Path): Option[Hash] = Stamper.forHash(path.toFile) match {
private[sbt] def hash(string: String): Hash =
new FileHashImpl(sbt.internal.inc.Hash.unsafeFromString(string))
private[sbt] def hash(path: Path): Option[Hash] = Stamper.forHash(path.toFile) match {
case EmptyStamp => None
case s => Some(new FileHashImpl(s))
}
def lastModified(path: Path): Option[LastModified] = IO.getModifiedTimeOrZero(path.toFile) match {
case 0 => None
case l => Some(LastModified(l))
}
private[sbt] def lastModified(path: Path): Option[LastModified] =
IO.getModifiedTimeOrZero(path.toFile) match {
case 0 => None
case l => Some(LastModified(l))
}
private[this] class FileHashImpl(val xstamp: XStamp) extends Hash(xstamp.getHash.orElse(""))
sealed abstract case class Hash private[sbt] (hex: String) extends FileStamp
final case class LastModified private[sbt] (time: Long) extends FileStamp
final case class Error(exception: IOException) extends FileStamp
private[sbt] sealed abstract case class Hash private[sbt] (hex: String) extends FileStamp
private[sbt] final case class LastModified private[sbt] (time: Long) extends FileStamp
private[sbt] final case class Error(exception: IOException) extends FileStamp
implicit val pathJsonFormatter: JsonFormat[Seq[Path]] = new JsonFormat[Seq[Path]] {
override def write[J](obj: Seq[Path], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach { path =>
builder.writeString(path.toString)
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[Path] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
Paths.get(unbuilder.readString(unbuilder.nextElement))
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
implicit val fileJsonFormatter: JsonFormat[Seq[File]] = new JsonFormat[Seq[File]] {
override def write[J](obj: Seq[File], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach { file =>
builder.writeString(file.toString)
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[File] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
new File(unbuilder.readString(unbuilder.nextElement))
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
implicit val fileJson: JsonFormat[File] = new JsonFormat[File] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): File =
fileJsonFormatter.read(jsOpt, unbuilder).head
override def write[J](obj: File, builder: Builder[J]): Unit =
fileJsonFormatter.write(obj :: Nil, builder)
}
implicit val pathJson: JsonFormat[Path] = new JsonFormat[Path] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Path =
pathJsonFormatter.read(jsOpt, unbuilder).head
override def write[J](obj: Path, builder: Builder[J]): Unit =
pathJsonFormatter.write(obj :: Nil, builder)
}
implicit val fileStampJsonFormatter: JsonFormat[Seq[(Path, FileStamp)]] =
new JsonFormat[Seq[(Path, FileStamp)]] {
override def write[J](obj: Seq[(Path, FileStamp)], builder: Builder[J]): Unit = {
val (hashes, lastModifiedTimes) = obj.partition(_._2.isInstanceOf[Hash])
builder.beginObject()
builder.addField("hashes", hashes.asInstanceOf[Seq[(Path, Hash)]])(fileHashJsonFormatter)
builder.addField(
"lastModifiedTimes",
lastModifiedTimes.asInstanceOf[Seq[(Path, LastModified)]]
)(
fileLastModifiedJsonFormatter
)
builder.endObject()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, FileStamp)] =
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val hashes = unbuilder.readField("hashes")(fileHashJsonFormatter)
val lastModifieds =
unbuilder.readField("lastModifiedTimes")(fileLastModifiedJsonFormatter)
unbuilder.endObject()
hashes ++ lastModifieds
case None =>
deserializationError("Expected JsObject but found None")
}
}
val fileHashJsonFormatter: JsonFormat[Seq[(Path, Hash)]] =
new JsonFormat[Seq[(Path, Hash)]] {
override def write[J](obj: Seq[(Path, Hash)], builder: Builder[J]): Unit = {
object Formats {
implicit val seqPathJsonFormatter: JsonFormat[Seq[Path]] = new JsonFormat[Seq[Path]] {
override def write[J](obj: Seq[Path], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach {
case (p, h) =>
builder.beginArray()
builder.writeString(p.toString)
builder.writeString(h.hex)
builder.endArray()
obj.foreach { path =>
builder.writeString(path.toString)
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, Hash)] =
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[Path] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
unbuilder.beginArray(unbuilder.nextElement)
val path = Paths.get(unbuilder.readString(unbuilder.nextElement))
val hash = FileStamp.hash(unbuilder.readString(unbuilder.nextElement))
unbuilder.endArray()
path -> hash
Paths.get(unbuilder.readString(unbuilder.nextElement))
}
unbuilder.endArray()
res
@ -181,30 +116,22 @@ private[sbt] object FileStamp {
deserializationError("Expected JsArray but found None")
}
}
val fileLastModifiedJsonFormatter: JsonFormat[Seq[(Path, LastModified)]] =
new JsonFormat[Seq[(Path, LastModified)]] {
override def write[J](obj: Seq[(Path, LastModified)], builder: Builder[J]): Unit = {
implicit val seqFileJsonFormatter: JsonFormat[Seq[File]] = new JsonFormat[Seq[File]] {
override def write[J](obj: Seq[File], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach {
case (p, lm) =>
builder.beginArray()
builder.writeString(p.toString)
builder.writeLong(lm.time)
builder.endArray()
obj.foreach { file =>
builder.writeString(file.toString)
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, LastModified)] =
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[File] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
unbuilder.beginArray(unbuilder.nextElement)
val path = Paths.get(unbuilder.readString(unbuilder.nextElement))
val hash = FileStamp.LastModified(unbuilder.readLong(unbuilder.nextElement))
unbuilder.endArray()
path -> hash
new File(unbuilder.readString(unbuilder.nextElement))
}
unbuilder.endArray()
res
@ -212,6 +139,111 @@ private[sbt] object FileStamp {
deserializationError("Expected JsArray but found None")
}
}
implicit val fileJsonFormatter: JsonFormat[File] = new JsonFormat[File] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): File =
seqFileJsonFormatter.read(jsOpt, unbuilder).head
override def write[J](obj: File, builder: Builder[J]): Unit =
seqFileJsonFormatter.write(obj :: Nil, builder)
}
implicit val pathJsonFormatter: JsonFormat[Path] = new JsonFormat[Path] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Path =
seqPathJsonFormatter.read(jsOpt, unbuilder).head
override def write[J](obj: Path, builder: Builder[J]): Unit =
seqPathJsonFormatter.write(obj :: Nil, builder)
}
implicit val seqPathFileStampJsonFormatter: JsonFormat[Seq[(Path, FileStamp)]] =
new JsonFormat[Seq[(Path, FileStamp)]] {
override def write[J](obj: Seq[(Path, FileStamp)], builder: Builder[J]): Unit = {
val (hashes, lastModifiedTimes) = obj.partition(_._2.isInstanceOf[Hash])
builder.beginObject()
builder.addField("hashes", hashes.asInstanceOf[Seq[(Path, Hash)]])(
seqPathHashJsonFormatter
)
builder.addField(
"lastModifiedTimes",
lastModifiedTimes.asInstanceOf[Seq[(Path, LastModified)]]
)(seqPathLastModifiedJsonFormatter)
builder.endObject()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, FileStamp)] =
jsOpt match {
case Some(js) =>
unbuilder.beginObject(js)
val hashes = unbuilder.readField("hashes")(seqPathHashJsonFormatter)
val lastModifieds =
unbuilder.readField("lastModifiedTimes")(seqPathLastModifiedJsonFormatter)
unbuilder.endObject()
hashes ++ lastModifieds
case None =>
deserializationError("Expected JsObject but found None")
}
}
private[sbt] val seqPathHashJsonFormatter: JsonFormat[Seq[(Path, Hash)]] =
new JsonFormat[Seq[(Path, Hash)]] {
override def write[J](obj: Seq[(Path, Hash)], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach {
case (p, h) =>
builder.beginArray()
builder.writeString(p.toString)
builder.writeString(h.hex)
builder.endArray()
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, Hash)] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
unbuilder.beginArray(unbuilder.nextElement)
val path = Paths.get(unbuilder.readString(unbuilder.nextElement))
val hash = FileStamp.hash(unbuilder.readString(unbuilder.nextElement))
unbuilder.endArray()
path -> hash
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
private[sbt] val seqPathLastModifiedJsonFormatter: JsonFormat[Seq[(Path, LastModified)]] =
new JsonFormat[Seq[(Path, LastModified)]] {
override def write[J](obj: Seq[(Path, LastModified)], builder: Builder[J]): Unit = {
builder.beginArray()
obj.foreach {
case (p, lm) =>
builder.beginArray()
builder.writeString(p.toString)
builder.writeLong(lm.time)
builder.endArray()
}
builder.endArray()
}
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[(Path, LastModified)] =
jsOpt match {
case Some(js) =>
val size = unbuilder.beginArray(js)
val res = (1 to size) map { _ =>
unbuilder.beginArray(unbuilder.nextElement)
val path = Paths.get(unbuilder.readString(unbuilder.nextElement))
val hash = FileStamp.LastModified(unbuilder.readLong(unbuilder.nextElement))
unbuilder.endArray()
path -> hash
}
unbuilder.endArray()
res
case None =>
deserializationError("Expected JsArray but found None")
}
}
}
private implicit class EitherOps(val e: Either[FileStamp, FileStamp]) extends AnyVal {
def value: Option[FileStamp] = if (e == null) None else Some(e.fold(identity, identity))

View File

@ -17,8 +17,8 @@ import sbt.internal.DynamicInput
import sbt.internal.nio.FileTreeRepository
import sbt.internal.util.AttributeKey
import sbt.internal.util.complete.Parser
import sbt.nio.file.{ ChangedFiles, FileAttributes, FileTreeView, Glob }
import sbt.{ Def, InputKey, ProjectRef, State, StateTransform }
import sbt.nio.file.{ FileAttributes, FileTreeView, Glob, PathFilter }
import sbt._
import scala.concurrent.duration.FiniteDuration
@ -29,19 +29,30 @@ object Keys {
case object ReloadOnSourceChanges extends WatchBuildSourceOption
val allInputFiles =
taskKey[Seq[Path]]("All of the file inputs for a task excluding directories and hidden files.")
val changedInputFiles = taskKey[Option[ChangedFiles]]("The changed files for a task")
val changedInputFiles =
taskKey[Seq[(Path, FileStamp)] => FileChanges]("The changed files for a task")
val fileInputs = settingKey[Seq[Glob]](
"The file globs that are used by a task. This setting will generally be scoped per task. It will also be used to determine the sources to watch during continuous execution."
)
val fileInputIncludeFilter =
settingKey[PathFilter]("A filter to apply to the input sources of a task.")
val fileInputExcludeFilter =
settingKey[PathFilter]("An exclusion filter to apply to the input sources of a task.")
val inputFileStamper = settingKey[FileStamper](
"Toggles the file stamping implementation used to determine whether or not a file has been modified."
)
val fileOutputs = settingKey[Seq[Glob]]("Describes the output files of a task.")
val fileOutputIncludeFilter =
settingKey[PathFilter]("A filter to apply to the outputs of a task.")
val fileOutputExcludeFilter =
settingKey[PathFilter]("An exclusion filter to apply to the outputs of a task.")
val allOutputFiles =
taskKey[Seq[Path]]("All of the file output for a task excluding directories and hidden files.")
taskKey[Seq[Path]]("All of the file outputs for a task excluding directories and hidden files.")
val changedOutputFiles =
taskKey[Option[ChangedFiles]]("The files that have changed since the last task run.")
taskKey[Seq[(Path, FileStamp)] => FileChanges](
"The files that have changed since the last task run."
)
val outputFileStamper = settingKey[FileStamper](
"Toggles the file stamping implementation used to determine whether or not a file has been modified."
)
@ -130,10 +141,10 @@ object Keys {
private[sbt] val dynamicFileOutputs =
taskKey[Seq[Path]]("The outputs of a task").withRank(Invisible)
private[sbt] val inputFileStamps =
val inputFileStamps =
taskKey[Seq[(Path, FileStamp)]]("Retrieves the hashes for a set of task input files")
.withRank(Invisible)
private[sbt] val outputFileStamps =
val outputFileStamps =
taskKey[Seq[(Path, FileStamp)]]("Retrieves the hashes for a set of task output files")
.withRank(Invisible)
private[sbt] type FileAttributeMap =

View File

@ -9,23 +9,24 @@ package sbt
package nio
import java.io.File
import java.nio.file.{ Files, Path }
import java.nio.file.Path
import java.util.concurrent.ConcurrentHashMap
import sbt.Project._
import sbt.internal.Clean.ToSeqPath
import sbt.internal.Continuous.FileStampRepository
import sbt.internal.util.{ AttributeKey, SourcePosition }
import sbt.internal.{ Clean, Continuous, DynamicInput, SettingsGraph }
import sbt.nio.FileStamp.{ fileStampJsonFormatter, pathJsonFormatter, _ }
import sbt.nio.FileStamp.Formats._
import sbt.nio.FileStamper.{ Hash, LastModified }
import sbt.nio.Keys._
import sbt.nio.file.ChangedFiles
import sbt.nio.file.{ AllPass, FileAttributes }
import sbt.std.TaskExtra._
import sjsonnew.JsonFormat
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.immutable.VectorBuilder
import scala.collection.mutable
private[sbt] object Settings {
private[sbt] def inject(transformed: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = {
@ -36,7 +37,7 @@ private[sbt] object Settings {
val cleanScopes = new java.util.HashSet[Scope].asScala
transformed.flatMap {
case s if s.key.key == sbt.nio.Keys.fileInputs.key => inputPathSettings(s)
case s => maybeAddOutputsAndFileStamps(s, fileOutputScopes, cleanScopes)
case s => s :: maybeAddOutputsAndFileStamps(s, fileOutputScopes, cleanScopes)
} ++ addCleanImpls(cleanScopes.toSeq)
}
@ -45,8 +46,8 @@ private[sbt] object Settings {
* `File`, `Seq[File]`, `Path`, `Seq[Path`. If it does, then we inject a number of
* task definition settings that allow the user to check if the output paths of
* the task have changed. It also adds a custom clean task that will delete the
* paths returned by the task, provided that they are in the task's target directory. We also inject these tasks if the fileOutputs setting is defined
* for the task.
* paths returned by the task, provided that they are in the task's target directory. We also
* inject these tasks if the fileOutputs setting is defined for the task.
*
* @param setting the setting to possibly inject with additional settings
* @param fileOutputScopes the set of scopes for which the fileOutputs setting is defined
@ -57,80 +58,45 @@ private[sbt] object Settings {
setting: Def.Setting[_],
fileOutputScopes: Set[Scope],
cleanScopes: mutable.Set[Scope]
): Seq[Def.Setting[_]] = {
): List[Def.Setting[_]] = {
setting.key.key match {
case ak: AttributeKey[_] if taskClass.isAssignableFrom(ak.manifest.runtimeClass) =>
def default: Seq[Def.Setting[_]] = {
def default: List[Def.Setting[_]] = {
val scope = setting.key.scope.copy(task = Select(ak))
if (fileOutputScopes.contains(scope)) {
val sk = setting.asInstanceOf[Def.Setting[Task[Any]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
val scopedKey = Keys.dynamicFileOutputs in (sk.scope in sk.key)
cleanScopes.add(scope)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ => Nil))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ Vector(
allOutputPathsImpl(scope),
outputFileStampsImpl(scope),
cleanImpl(scope)
)
} else setting :: Nil
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ => Nil))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
} :: allOutputPathsImpl(scope) :: outputFileStampsImpl(scope) :: cleanImpl(scope) :: Nil
} else Nil
}
def mkSetting[T: JsonFormat: ToSeqPath]: List[Def.Setting[_]] = {
val sk = setting.asInstanceOf[Def.Setting[Task[T]]].key
val taskKey = TaskKey(sk.key) in sk.scope
// We create a previous reference so that clean automatically works without the
// user having to explicitly call previous anywhere.
val init = Previous.runtime(taskKey).zip(taskKey) {
case (_, t) => t.map(implicitly[ToSeqPath[T]].apply)
}
val key = Def.ScopedKey(taskKey.scope in taskKey.key, Keys.dynamicFileOutputs.key)
addTaskDefinition(Def.setting[Task[Seq[Path]]](key, init, setting.pos)) ::
outputsAndStamps(taskKey, cleanScopes)
}
ak.manifest.typeArguments match {
case t :: Nil if seqClass.isAssignableFrom(t.runtimeClass) =>
t.typeArguments match {
// Task[Seq[File]]
case f :: Nil if fileClass.isAssignableFrom(f.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[Seq[File]]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_.map(_.toPath)))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
// Task[Seq[Path]]
case p :: Nil if pathClass.isAssignableFrom(p.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[Seq[Path]]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(identity))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
case _ => default
case f :: Nil if fileClass.isAssignableFrom(f.runtimeClass) => mkSetting[Seq[File]]
case p :: Nil if pathClass.isAssignableFrom(p.runtimeClass) => mkSetting[Seq[Path]]
case _ => default
}
// Task[File]
case t :: Nil if fileClass.isAssignableFrom(t.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[File]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_.toPath :: Nil))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
// Task[Path]
case t :: Nil if pathClass.isAssignableFrom(t.runtimeClass) =>
val sk = setting.asInstanceOf[Def.Setting[Task[Path]]].key
val scopedKey = sk.scopedKey.copy(sk.scope in sk.key, Keys.dynamicFileOutputs.key)
Vector(
setting,
addTaskDefinition {
val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ :: Nil))
Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)
}
) ++ outputsAndStamps(TaskKey(sk.key) in sk.scope, cleanScopes)
case _ => default
case t :: Nil if fileClass.isAssignableFrom(t.runtimeClass) => mkSetting[File]
case t :: Nil if pathClass.isAssignableFrom(t.runtimeClass) => mkSetting[Path]
case _ => default
}
case _ => setting :: Nil
case _ => Nil
}
}
private[sbt] val inject: Def.ScopedKey[_] => Seq[Def.Setting[_]] = scopedKey =>
@ -143,8 +109,6 @@ private[sbt] object Settings {
case dynamicDependency.key => (dynamicDependency in scopedKey.scope := { () }) :: Nil
case transitiveClasspathDependency.key =>
(transitiveClasspathDependency in scopedKey.scope := { () }) :: Nil
case changedOutputFiles.key =>
changedFilesImpl(scopedKey, changedOutputFiles, outputFileStamps)
case _ => Nil
}
@ -191,12 +155,13 @@ private[sbt] object Settings {
*/
private[sbt] def inputPathSettings(setting: Def.Setting[_]): Seq[Def.Setting[_]] = {
val scopedKey = setting.key
setting :: (Keys.allInputPathsAndAttributes in scopedKey.scope := {
val view = (fileTreeView in scopedKey.scope).value
val inputs = (fileInputs in scopedKey.scope).value
val stamper = (inputFileStamper in scopedKey.scope).value
val forceTrigger = (watchForceTriggerOnAnyChange in scopedKey.scope).value
val dynamicInputs = (Continuous.dynamicInputs in scopedKey.scope).value
val scope = scopedKey.scope
setting :: (Keys.allInputPathsAndAttributes in scope := {
val view = (fileTreeView in scope).value
val inputs = (fileInputs in scope).value
val stamper = (inputFileStamper in scope).value
val forceTrigger = (watchForceTriggerOnAnyChange in scope).value
val dynamicInputs = (Continuous.dynamicInputs in scope).value
// This makes watch work by ensuring that the input glob is registered with the
// repository used by the watch process.
sbt.Keys.state.value.get(globalFileTreeRepository).foreach { repo =>
@ -204,8 +169,7 @@ private[sbt] object Settings {
}
dynamicInputs.foreach(_ ++= inputs.map(g => DynamicInput(g, stamper, forceTrigger)))
view.list(inputs)
}) :: fileStamps(scopedKey) :: allFilesImpl(scopedKey) :: Nil ++
changedInputFilesImpl(scopedKey)
}) :: fileStamps(scopedKey) :: allFilesImpl(scope) :: changedInputFilesImpl(scope)
}
private[this] val taskClass = classOf[Task[_]]
@ -220,12 +184,15 @@ private[sbt] object Settings {
* @param scopedKey the key whose file inputs we are seeking
* @return a task definition that retrieves all of the input paths scoped to the input key.
*/
private[this] def allFilesImpl(scopedKey: Def.ScopedKey[_]): Def.Setting[_] =
addTaskDefinition(Keys.allInputFiles in scopedKey.scope := {
(Keys.allInputPathsAndAttributes in scopedKey.scope).value.collect {
case (p, a) if a.isRegularFile && !Files.isHidden(p) => p
private[this] def allFilesImpl(scope: Scope): Def.Setting[_] = {
addTaskDefinition(Keys.allInputFiles in scope := {
val filter =
(fileInputIncludeFilter in scope).value && !(fileInputExcludeFilter in scope).value
(Keys.allInputPathsAndAttributes in scope).value.collect {
case (p, a) if filter.accept(p, a) => p
}
})
}
/**
* Returns all of the regular files whose stamp has changed since the last time the
@ -233,54 +200,55 @@ private[sbt] object Settings {
* files or files whose stamp has not changed since the previous run. Directories and hidden
* files are excluded
*
* @param scopedKey the key whose fileInputs we are seeking
* @param scope the scope corresponding to the task whose fileInputs we are seeking
* @return a task definition that retrieves the changed input files scoped to the key.
*/
private[this] def changedInputFilesImpl(scopedKey: Def.ScopedKey[_]): Seq[Def.Setting[_]] =
changedFilesImpl(scopedKey, changedInputFiles, inputFileStamps) ::
(watchForceTriggerOnAnyChange in scopedKey.scope := {
(watchForceTriggerOnAnyChange in scopedKey.scope).?.value match {
private[this] def changedInputFilesImpl(scope: Scope): List[Def.Setting[_]] =
changedFilesImpl(scope, changedInputFiles, inputFileStamps) ::
(watchForceTriggerOnAnyChange in scope := {
(watchForceTriggerOnAnyChange in scope).?.value match {
case Some(t) => t
case None => false
}
}) :: Nil
private[this] def changedFilesImpl(
scopedKey: Def.ScopedKey[_],
changeKey: TaskKey[Option[ChangedFiles]],
scope: Scope,
changeKey: TaskKey[Seq[(Path, FileStamp)] => FileChanges],
stampKey: TaskKey[Seq[(Path, FileStamp)]]
): Def.Setting[_] =
addTaskDefinition(changeKey in scopedKey.scope := {
val current = (stampKey in scopedKey.scope).value
(stampKey in scopedKey.scope).previous.flatMap(changedFiles(_, current))
addTaskDefinition(changeKey in scope := {
val current = (stampKey in scope).value
changedFiles(_, current)
})
private[sbt] def changedFiles(
previous: Seq[(Path, FileStamp)],
current: Seq[(Path, FileStamp)]
): Option[ChangedFiles] = {
): FileChanges = {
val createdBuilder = new VectorBuilder[Path]
val deletedBuilder = new VectorBuilder[Path]
val updatedBuilder = new VectorBuilder[Path]
val currentMap = current.toMap
val prevMap = previous.toMap
val modifiedBuilder = new VectorBuilder[Path]
val unmodifiedBuilder = new VectorBuilder[Path]
val seen = ConcurrentHashMap.newKeySet[Path]
val prevMap = new ConcurrentHashMap[Path, FileStamp]()
previous.foreach { case (k, v) => prevMap.put(k, v); () }
current.foreach {
case (path, currentStamp) =>
prevMap.get(path) match {
case Some(oldStamp) => if (oldStamp != currentStamp) updatedBuilder += path
case None => createdBuilder += path
if (seen.add(path)) {
prevMap.remove(path) match {
case null => createdBuilder += path
case old => (if (old != currentStamp) modifiedBuilder else unmodifiedBuilder) += path
}
}
}
previous.foreach {
case (path, _) =>
if (currentMap.get(path).isEmpty) deletedBuilder += path
}
val created = createdBuilder.result()
val deleted = deletedBuilder.result()
val updated = updatedBuilder.result()
if (created.isEmpty && deleted.isEmpty && updated.isEmpty) {
None
prevMap.forEach((p, _) => deletedBuilder += p)
val unmodified = unmodifiedBuilder.result()
if (unmodified.size == current.size) {
FileChanges.unmodified(unmodifiedBuilder.result)
} else {
val cf = ChangedFiles(created = created, deleted = deleted, updated = updated)
Some(cf)
val created = createdBuilder.result()
val deleted = deletedBuilder.result()
val modified = modifiedBuilder.result()
FileChanges(created, deleted, modified, unmodified)
}
}
@ -300,7 +268,7 @@ private[sbt] object Settings {
* @param taskKey the task for which we add a custom clean implementation
* @return a task specificic clean implementation
*/
private[sbt] def cleanImpl[T: JsonFormat: ToSeqPath](taskKey: TaskKey[T]): Seq[Def.Setting[_]] = {
private[sbt] def cleanImpl[T: JsonFormat: ToSeqPath](taskKey: TaskKey[T]): Def.Setting[_] = {
val taskScope = taskKey.scope in taskKey.key
addTaskDefinition(sbt.Keys.clean in taskScope := Def.taskDyn {
// the clean file task needs to run first because the previous cache gets blown away
@ -318,10 +286,11 @@ private[sbt] object Settings {
* @return a task definition that retrieves the input files and their file stamps scoped to the
* input key.
*/
private[sbt] def fileStamps(scopedKey: Def.ScopedKey[_]): Def.Setting[_] =
addTaskDefinition(Keys.inputFileStamps in scopedKey.scope := {
val cache = (unmanagedFileStampCache in scopedKey.scope).value
val stamper = (Keys.inputFileStamper in scopedKey.scope).value
private[sbt] def fileStamps(scopedKey: Def.ScopedKey[_]): Def.Setting[_] = {
val scope = scopedKey.scope
addTaskDefinition(Keys.inputFileStamps in scope := {
val cache = (unmanagedFileStampCache in scope).value
val stamper = (Keys.inputFileStamper in scope).value
val stampFile: Path => Option[(Path, FileStamp)] =
sbt.Keys.state.value.get(globalFileTreeRepository) match {
case Some(repo: FileStampRepository) =>
@ -335,25 +304,43 @@ private[sbt] object Settings {
case _ =>
(path: Path) => cache.getOrElseUpdate(path, stamper).map(path -> _)
}
(Keys.allInputPathsAndAttributes in scopedKey.scope).value.flatMap {
case (path, a) if a.isRegularFile && !Files.isHidden(path) => stampFile(path)
case _ => None
val filter =
(fileInputIncludeFilter in scope).value && !(fileInputExcludeFilter in scope).value
(Keys.allInputPathsAndAttributes in scope).value.flatMap {
case (path, a) if filter.accept(path, a) => stampFile(path)
case _ => None
}
})
}
private[this] def outputsAndStamps[T: JsonFormat: ToSeqPath](
taskKey: TaskKey[T],
cleanScopes: mutable.Set[Scope]
): Seq[Def.Setting[_]] = {
): List[Def.Setting[_]] = {
val scope = taskKey.scope in taskKey.key
cleanScopes.add(scope)
Vector(allOutputPathsImpl(scope), outputFileStampsImpl(scope)) ++ cleanImpl(taskKey)
val changes = changedFilesImpl(scope, changedOutputFiles, outputFileStamps) :: Nil
allOutputPathsImpl(scope) :: outputFileStampsImpl(scope) :: cleanImpl(taskKey) :: changes
}
private[this] def allOutputPathsImpl(scope: Scope): Def.Setting[_] =
addTaskDefinition(allOutputFiles in scope := {
val filter =
(fileOutputIncludeFilter in scope).value && !(fileOutputExcludeFilter in scope).value
val fileOutputGlobs = (fileOutputs in scope).value
val allFileOutputs = fileTreeView.value.list(fileOutputGlobs).map(_._1)
val allFileOutputs = (fileTreeView in scope).value.list(fileOutputGlobs).map(_._1)
val dynamicOutputs = (dynamicFileOutputs in scope).value
allFileOutputs ++ dynamicOutputs.filterNot(p => fileOutputGlobs.exists(_.matches(p)))
/*
* We want to avoid computing the FileAttributes in the common case where nothing is
* being filtered (which is the case with the default filters:
* include = AllPass, exclude = NoPass).
*/
val attributeFilter: Path => Boolean = filter match {
case AllPass => _ => true
case f => p => FileAttributes(p).map(f.accept(p, _)).getOrElse(false)
}
allFileOutputs ++ dynamicOutputs.filterNot { p =>
fileOutputGlobs.exists(_.matches(p)) || !attributeFilter(p)
}
})
private[this] def outputFileStampsImpl(scope: Scope): Def.Setting[_] =
addTaskDefinition(outputFileStamps in scope := {
@ -361,7 +348,11 @@ private[sbt] object Settings {
case LastModified => FileStamp.lastModified
case Hash => FileStamp.hash
}
(allOutputFiles in scope).value.flatMap(p => stamper(p).map(p -> _))
val allFiles = (allOutputFiles in scope).value
// The cache invalidation is specifically so that source formatters can run before
// the compile task and the file stamps seen by compile match the post-format stamps.
allFiles.foreach((unmanagedFileStampCache in scope).value.invalidate)
allFiles.flatMap(p => stamper(p).map(p -> _))
})
}

View File

@ -11,7 +11,8 @@ import java.nio.file.{ Path, Paths }
import org.scalatest.FlatSpec
import sbt.nio.FileStamp
import sbt.nio.FileStamp._
import sbt.nio.FileStamp.Formats
import sjsonnew.JsonFormat
import sjsonnew.support.scalajson.unsafe.Converter
class FileStampJsonSpec extends FlatSpec {
@ -20,8 +21,10 @@ class FileStampJsonSpec extends FlatSpec {
Paths.get("foo") -> FileStamp.hash("bar"),
Paths.get("bar") -> FileStamp.hash("buzz")
)
val json = Converter.toJsonUnsafe(hashes)(fileHashJsonFormatter)
val deserialized = Converter.fromJsonUnsafe(json)(fileHashJsonFormatter)
implicit val formatter: JsonFormat[Seq[(Path, FileStamp.Hash)]] =
Formats.seqPathHashJsonFormatter
val json = Converter.toJsonUnsafe(hashes)
val deserialized = Converter.fromJsonUnsafe(json)
assert(hashes == deserialized)
}
"file last modified times" should "be serializable" in {
@ -29,8 +32,10 @@ class FileStampJsonSpec extends FlatSpec {
Paths.get("foo") -> FileStamp.LastModified(1234),
Paths.get("bar") -> FileStamp.LastModified(5678)
)
val json = Converter.toJsonUnsafe(lastModifiedTimes)(fileLastModifiedJsonFormatter)
val deserialized = Converter.fromJsonUnsafe(json)(fileLastModifiedJsonFormatter)
implicit val formatter: JsonFormat[Seq[(Path, FileStamp.LastModified)]] =
Formats.seqPathLastModifiedJsonFormatter
val json = Converter.toJsonUnsafe(lastModifiedTimes)
val deserialized = Converter.fromJsonUnsafe(json)
assert(lastModifiedTimes == deserialized)
}
"both" should "be serializable" in {
@ -43,8 +48,9 @@ class FileStampJsonSpec extends FlatSpec {
Paths.get("bar") -> FileStamp.LastModified(5678)
)
val both: Seq[(Path, FileStamp)] = hashes ++ lastModifiedTimes
val json = Converter.toJsonUnsafe(both)(fileStampJsonFormatter)
val deserialized = Converter.fromJsonUnsafe(json)(fileStampJsonFormatter)
import Formats.seqPathFileStampJsonFormatter
val json = Converter.toJsonUnsafe(both)
val deserialized = Converter.fromJsonUnsafe(json)
assert(both == deserialized)
}
}

View File

@ -10,7 +10,7 @@ object Dependencies {
def nightlyVersion: Option[String] = sys.props.get("sbt.build.version")
// sbt modules
private val ioVersion = nightlyVersion.getOrElse("1.3.0-M13")
private val ioVersion = nightlyVersion.getOrElse("1.3.0-M15")
private val utilVersion = nightlyVersion.getOrElse("1.3.0-M8")
private val lmVersion =
sys.props.get("sbt.build.lm.version") match {

View File

@ -9,6 +9,8 @@ import sbt.nio.FileStamp
import sjsonnew.JsonFormat
import java.nio.file.{ Path => NioPath }
import sbt.internal.FileChangesMacro
import scala.language.experimental.macros
package object sbt
@ -33,12 +35,17 @@ package object sbt
implicit def fileToRichFile(file: File): sbt.io.RichFile = new sbt.io.RichFile(file)
implicit def filesToFinder(cc: Traversable[File]): sbt.io.PathFinder =
sbt.io.PathFinder.strict(cc)
/*
* Provides macro extension methods. Because the extension methods are all macros, no instance
* of FileChangesMacro.TaskOps is ever made which is why it is ok to use `???`.
*/
implicit def taskToTaskOpts[T](t: TaskKey[T]): FileChangesMacro.TaskOps[T] = ???
implicit val fileStampJsonFormatter: JsonFormat[Seq[(NioPath, FileStamp)]] =
FileStamp.fileStampJsonFormatter
implicit val pathJsonFormatter: JsonFormat[Seq[NioPath]] = FileStamp.pathJsonFormatter
implicit val fileJsonFormatter: JsonFormat[Seq[File]] = FileStamp.fileJsonFormatter
implicit val singlePathJsonFormatter: JsonFormat[NioPath] = FileStamp.pathJson
implicit val singleFileJsonFormatter: JsonFormat[File] = FileStamp.fileJson
FileStamp.Formats.seqPathFileStampJsonFormatter
implicit val pathJsonFormatter: JsonFormat[Seq[NioPath]] = FileStamp.Formats.seqPathJsonFormatter
implicit val fileJsonFormatter: JsonFormat[Seq[File]] = FileStamp.Formats.seqFileJsonFormatter
implicit val singlePathJsonFormatter: JsonFormat[NioPath] = FileStamp.Formats.pathJsonFormatter
implicit val singleFileJsonFormatter: JsonFormat[File] = FileStamp.Formats.fileJsonFormatter
// others
object CompileOrder {

View File

@ -66,8 +66,12 @@ trait Import {
val AnyPath = sbt.nio.file.AnyPath
type ChangedFiles = sbt.nio.file.ChangedFiles
val ChangedFiles = sbt.nio.file.ChangedFiles
type FileChanges = sbt.nio.FileChanges
val FileChanges = sbt.nio.FileChanges
type Glob = sbt.nio.file.Glob
val Glob = sbt.nio.file.Glob
type PathFilter = sbt.nio.file.PathFilter
val PathFilter = sbt.nio.file.PathFilter
type RelativeGlob = sbt.nio.file.RelativeGlob
val RelativeGlob = sbt.nio.file.RelativeGlob
val RecursiveGlob = sbt.nio.file.RecursiveGlob

View File

@ -9,13 +9,14 @@ copyFile / target := baseDirectory.value / "out"
copyFile := Def.task {
val prev = copyFile.previous
val changes: Option[Seq[Path]] = (copyFile / changedInputFiles).value.map {
case ChangedFiles(c, _, u) => c ++ u
val changes: Option[Seq[Path]] = copyFile.inputFileChanges match {
case fc @ FileChanges(c, _, u, _) if fc.hasChanges => Some(c ++ u)
case _ => None
}
prev match {
case Some(v: Int) if changes.isEmpty => v
case _ =>
changes.getOrElse((copyFile / allInputFiles).value).foreach { p =>
changes.getOrElse(copyFile.inputFiles).foreach { p =>
val outDir = baseDirectory.value / "out"
IO.createDirectory(outDir)
IO.copyFile(p.toFile, outDir / p.getFileName.toString)
@ -35,9 +36,15 @@ checkOutDirectoryHasFile := {
assert(result == Seq(baseDirectory.value / "out" / "Foo.txt"))
}
val checkCount = inputKey[Unit]("Check that the expected number of evaluations have run.")
checkCount := Def.inputTask {
val expected = Def.spaceDelimited("").parsed.head.toInt
commands += Command.single("checkCount") { (s, digits) =>
s"writeCount $digits" :: "checkCountImpl" :: s
}
val writeCount = inputKey[Unit]("writes the count to a file")
writeCount := IO.write(baseDirectory.value / "expectedCount", Def.spaceDelimited().parsed.head)
val checkCountImpl = taskKey[Unit]("Check that the expected number of evaluations have run.")
checkCountImpl := {
val expected = IO.read(baseDirectory.value / "expectedCount").toInt
val previous = copyFile.previous.getOrElse(0)
assert(previous == expected)
}.evaluated
}

View File

@ -0,0 +1,21 @@
version = 2.0.0
maxColumn = 100
project.git = true
project.excludeFilters = [ "\\Wsbt-test\\W", "\\Winput_sources\\W", "\\Wcontraband-scala\\W" ]
# http://docs.scala-lang.org/style/scaladoc.html recommends the JavaDoc style.
# scala/scala is written that way too https://github.com/scala/scala/blob/v2.12.2/src/library/scala/Predef.scala
docstrings = JavaDoc
# This also seems more idiomatic to include whitespace in import x.{ yyy }
spaces.inImportCurlyBraces = true
# This is more idiomatic Scala.
# http://docs.scala-lang.org/style/indentation.html#methods-with-numerous-arguments
align.openParenCallSite = false
align.openParenDefnSite = false
# For better code clarity
danglingParentheses = true
trailingCommas = preserve

View File

@ -0,0 +1,40 @@
import java.nio.file.Path
import complete.DefaultParsers._
enablePlugins(ScalafmtPlugin)
val classFiles = taskKey[Seq[Path]]("The classfiles generated by compile")
classFiles := {
val classes = (Compile / classDirectory).value.toGlob / ** / "*.class"
fileTreeView.value.list(classes).map(_._1)
}
classFiles := classFiles.dependsOn(Compile / compile).value
val compileAndCheckNoClassFileUpdates = taskKey[Unit]("Checks that there are no class file updates")
compileAndCheckNoClassFileUpdates := {
val current = (classFiles / outputFileStamps).value.toSet
val previous = (classFiles / outputFileStamps).previous.getOrElse(Nil).toSet
assert(current == previous)
}
val checkLastModified = inputKey[Unit]("Check the last modified time for a file")
checkLastModified := {
(Space ~> OptSpace ~> matched(charClass(_ != ' ').+) ~ (Space ~> ('!'.? ~ Digit.+.map(
_.mkString.toLong
)))).parsed match {
case (file, (negate, expectedLastModified)) =>
val sourceFile = baseDirectory.value / "src" / "main" / "scala" / file
val lastModified = IO.getModifiedTimeOrZero(sourceFile)
negate match {
case Some(_) => assert(lastModified != expectedLastModified)
case None => assert(lastModified == expectedLastModified)
}
}
}
val setLastModified = inputKey[Unit]("Set the last modified time for a file")
setLastModified := {
val Seq(file, lm) = Def.spaceDelimited().parsed
val sourceFile = baseDirectory.value / "src" / "main" / "scala" / file
IO.setModifiedTimeOrFalse(sourceFile, lm.toLong)
}

View File

@ -0,0 +1 @@
class Bar { val x = }

View File

@ -0,0 +1 @@
class Bar {val x=2}

View File

@ -0,0 +1 @@
class Foo{val x=1}

View File

@ -0,0 +1 @@
libraryDependencies += "org.scalameta" %% "scalafmt-dynamic" % "2.0.0"

View File

@ -0,0 +1,56 @@
import java.io.PrintWriter
import java.nio.file._
import sbt._
import sbt.Keys.{ baseDirectory, unmanagedSources }
import sbt.nio.Keys.{ fileInputs, inputFileStamps, outputFileStamper, outputFileStamps }
import sbt.nio.FileStamper
import org.scalafmt.interfaces.{ Scalafmt, ScalafmtReporter }
object ScalafmtPlugin extends AutoPlugin {
private val reporter = new ScalafmtReporter {
override def error(file: Path, message: String): Unit = throw new Exception(s"$file $message")
override def error(file: Path, e: Throwable): Unit = throw e
override def excluded(file: Path): Unit = {}
override def parsedConfig(config: Path, scalafmtVersion: String): Unit = {}
override def downloadWriter: PrintWriter = new PrintWriter(System.out, true)
}
private val formatter = Scalafmt.create(this.getClass.getClassLoader).withReporter(reporter)
object autoImport {
val scalafmtImpl = taskKey[Seq[Path]]("Format scala sources")
val scalafmt = taskKey[Unit]("Format scala sources and validate results")
}
import autoImport._
override lazy val projectSettings = super.projectSettings ++ Seq(
Compile / scalafmtImpl / fileInputs := (Compile / unmanagedSources / fileInputs).value,
Compile / scalafmtImpl / outputFileStamper := FileStamper.Hash,
Compile / scalafmtImpl := {
val config = baseDirectory.value.toPath / ".scalafmt.conf"
val allInputStamps = (Compile / scalafmtImpl / inputFileStamps).value
val previous =
(Compile / scalafmtImpl / outputFileStamps).previous.map(_.toMap).getOrElse(Map.empty)
allInputStamps.flatMap {
case (p, s) if previous.get(p).fold(false)(_ == s) => Some(p)
case (p, s) =>
try {
println(s"Formatting $p")
Files.write(p, formatter.format(config, p, new String(Files.readAllBytes(p))).getBytes)
Some(p)
} catch {
case e: Exception =>
println(e)
None
}
}
},
Compile / scalafmt := {
val outputs = (Compile / scalafmtImpl / outputFileStamps).value.toMap
val improperlyFormatted = (Compile / scalafmtImpl).inputFiles.filterNot(outputs.contains _)
if (improperlyFormatted.nonEmpty) {
val msg = s"There were improperly formatted files:\n${improperlyFormatted mkString "\n"}"
throw new IllegalStateException(msg)
}
},
Compile / unmanagedSources / inputFileStamps :=
(Compile / unmanagedSources / inputFileStamps).dependsOn(Compile / scalafmt).value
)
}

View File

@ -0,0 +1 @@
class Foo{val x=1}

View File

@ -0,0 +1,47 @@
> setLastModified Foo.scala 12345678
# The first time we run compile, we expect an updated class file for Foo.class
-> compileAndCheckNoClassFileUpdates
# scalafmt should modify Foo.scala
> checkLastModified Foo.scala !12345678
# The first time we run compile, there should be no updates since Foo.scala hasn't changed since
# scalafmt modified it in the first run
> compileAndCheckNoClassFileUpdates
$ copy-file changes/Foo.scala src/main/scala/Foo.scala
$ copy-file changes/Bar-bad.scala src/main/scala/Bar.scala
> setLastModified Foo.scala 12345678
> setLastModified Bar.scala 12345678
# formatting should fail because Bar.scala is invalid, but Foo.scala should be re-formatted
-> scalafmt
> checkLastModified Foo.scala !12345678
> checkLastModified Bar.scala 12345678
$ copy-file changes/Bar.scala src/main/scala/Bar.scala
> setLastModified Foo.scala 12345678
> setLastModified Bar.scala 12345678
# Formatting should now succeed and Foo.scala should not be re-formatted
> scalafmt
> checkLastModified Foo.scala 12345678
> checkLastModified Bar.scala !12345678
# make sure that the custom clean task doesn't blow away the scala source files (it should exclude
# any files not in the target directory
> scalafmt / clean
$ exists src/main/scala/Foo.scala
$ exists src/main/scala/Bar.scala

View File

@ -4,8 +4,8 @@ val fileInputTask = taskKey[Unit]("task with file inputs")
fileInputTask / fileInputs += Glob(baseDirectory.value / "base", "*.md")
fileInputTask := Def.taskDyn {
if ((fileInputTask / changedInputFiles).value.fold(false)(_.updated.nonEmpty))
Def.task(assert(true))
else Def.task(assert(false))
}.value
fileInputTask := {
val created = fileInputTask.inputFileChanges.created
if (created.exists(_.getFileName.toString.startsWith("foo"))) assert(false)
assert(true)
}

View File

@ -1,5 +1,9 @@
-> fileInputTask
> fileInputTask
$ copy-file changes/Bar.md base/Bar.md
> fileInputTask
$ copy-file changes/Bar.md base/foo.md
-> fileInputTask

View File

@ -5,7 +5,7 @@ foo / fileInputs += baseDirectory.value.toGlob / "base" / "*.txt"
foo / target := baseDirectory.value / "out"
foo := {
val out = baseDirectory.value / "out"
((foo / allInputFiles).value: Seq[Path]).map { p =>
foo.inputFiles.map { p =>
val f = p.toFile
val target = out / f.getName
IO.copyFile (f, target)

View File

@ -0,0 +1 @@
class Foo

View File

@ -2,4 +2,4 @@
-> test
> test
> test

View File

@ -7,20 +7,20 @@ foo / fileInputs := Seq(
)
val checkModified = taskKey[Unit]("check that modified files are returned")
checkModified := Def.taskDyn {
val modified = (foo / changedInputFiles).value.map(_.updated).getOrElse(Nil)
val allFiles = (foo / allInputFiles).value
if (modified.isEmpty) Def.task(assert(true))
else Def.task {
checkModified := {
val modified = foo.inputFileChanges.modified
val allFiles = foo.inputFiles
if (modified.isEmpty) assert(true)
else {
assert(modified != allFiles)
assert(modified == Seq((baseDirectory.value / "base" / "Bar.md").toPath))
}
}.value
}
val checkRemoved = taskKey[Unit]("check that removed files are returned")
checkRemoved := Def.taskDyn {
val files = (foo / allInputFiles).value
val removed = (foo / changedInputFiles).value.map(_.deleted).getOrElse(Nil)
val files = foo.inputFiles
val removed = foo.inputFileChanges.deleted
if (removed.isEmpty) Def.task(assert(true))
else Def.task {
assert(files == Seq((baseDirectory.value / "base" / "Foo.txt").toPath))
@ -30,12 +30,12 @@ checkRemoved := Def.taskDyn {
val checkAdded = taskKey[Unit]("check that modified files are returned")
checkAdded := Def.taskDyn {
val files = (foo / allInputFiles).value
val added = (foo / changedInputFiles).value.map(_.created).getOrElse(Nil)
if (added.isEmpty || (files.toSet == added.toSet)) Def.task(assert(true))
val files = foo.inputFiles
val created = foo.inputFileChanges.created
if (created.isEmpty || (files.toSet == created.toSet)) Def.task(assert(true))
else Def.task {
val base = baseDirectory.value / "base"
assert(files.toSet == Set("Bar.md", "Foo.txt").map(p => (base / p).toPath))
assert(added == Seq((baseDirectory.value / "base" / "Bar.md").toPath))
assert(created == Seq((baseDirectory.value / "base" / "Bar.md").toPath))
}
}.value

View File

@ -0,0 +1 @@
fooo

View File

@ -0,0 +1 @@
foo

View File

@ -4,6 +4,16 @@ $ copy-file changes/Bar.md base/Bar.md
> checkModified
$ copy-file changes/Foo-bad.txt base/Foo.txt
-> checkModified
-> checkModified
$ copy-file changes/Foo.txt base/Foo.txt
> checkModified
> checkRemoved
$ delete base/Bar.md

View File

@ -5,7 +5,7 @@ val foo = taskKey[Seq[File]]("Retrieve Foo.txt")
foo / fileInputs += baseDirectory.value.toGlob / ** / "*.txt"
foo := (foo / allInputFiles).value.map(_.toFile)
foo := foo.inputFiles.map(_.toFile)
val checkFoo = taskKey[Unit]("Check that the Foo.txt file is retrieved")
@ -16,7 +16,7 @@ val bar = taskKey[Seq[File]]("Retrieve Bar.md")
bar / fileInputs += baseDirectory.value.toGlob / "base" / "subdir" / "nested-subdir" / "*.md"
bar := (bar / allInputFiles).value.map(_.toFile)
bar := bar.inputFiles.map(_.toFile)
val checkBar = taskKey[Unit]("Check that the Bar.md file is retrieved")
@ -32,7 +32,7 @@ val checkAll = taskKey[Unit]("Check that the Bar.md file is retrieved")
checkAll := {
import sbt.dsl.LinterLevel.Ignore
val expected = Set("Foo.txt", "Bar.md").map(baseDirectory.value / "base" / "subdir" / "nested-subdir" / _)
val actual = (all / allInputFiles).value.map(_.toFile).toSet
val actual = all.inputFiles.map(_.toFile).toSet
assert(actual == expected)
}
@ -55,6 +55,6 @@ depth / fileInputs ++= {
checkDepth := {
val expected = Seq("Bar.md").map(baseDirectory.value / "base/subdir/nested-subdir" / _)
val actual = (depth / allInputFiles).value.map(_.toFile)
val actual = depth.inputFiles.map(_.toFile)
assert(actual == expected)
}

View File

@ -0,0 +1,34 @@
import java.nio.file.{ Files, Path }
val copyPaths = taskKey[Seq[Path]]("Copy paths")
copyPaths / fileInputs += baseDirectory.value.toGlob / "inputs" / *
copyPaths := {
val outFile = streams.value.cacheDirectory
IO.delete(outFile)
val out = Files.createDirectories(outFile.toPath)
copyPaths.inputFiles.map { path =>
Files.write(out / path.getFileName.toString, Files.readAllBytes(path))
}
}
val checkPaths = inputKey[Unit]("check paths")
checkPaths := {
val expectedFileNames = Def.spaceDelimited().parsed.toSet
val actualFileNames = copyPaths.outputFiles.map(_.getFileName.toString).toSet
assert(expectedFileNames == actualFileNames)
}
val newFilter = settingKey[PathFilter]("Works around quotations not working in scripted")
newFilter := HiddenFileFilter.toNio || "**/bar.txt"
val fooFilter = settingKey[PathFilter]("A filter for the bar.txt file")
fooFilter := ** / ".foo.txt"
Global / onLoad := { s: State =>
if (scala.util.Properties.isWin) {
val path = s.baseDir.toPath / "inputs" / ".foo.txt"
Files.setAttribute(path, "dos:hidden", true)
}
s
}

View File

@ -0,0 +1 @@
foo

View File

@ -0,0 +1 @@
bar

View File

@ -0,0 +1,10 @@
# hidden files are excluded
> checkPaths bar.txt
> set copyPaths / fileInputExcludeFilter := NothingFilter.toNio
> checkPaths .foo.txt bar.txt
> set copyPaths / fileInputIncludeFilter := fooFilter.value
> checkPaths .foo.txt

View File

@ -1,17 +1,32 @@
import sbt.nio.Keys._
import scala.util.Try
val fileInputTask = taskKey[Unit]("task with file inputs")
fileInputTask / fileInputs += (baseDirectory.value / "base").toGlob / "*.md"
fileInputTask / inputFileStamper := sbt.nio.FileStamper.LastModified
fileInputTask := Def.taskDyn {
(fileInputTask / changedInputFiles).value match {
case Some(ChangedFiles(_, _, u)) if u.nonEmpty => Def.task(assert(true))
case None => Def.task(assert(false))
}
}.value
fileInputTask := {
/*
* Normally we'd use an input task for this kind of thing, but input tasks don't work with
* incremental task evaluation so, instead, we manually set the input in a file. As a result,
* most of the test commands have to be split into two: one to set the expected result and one
* to validate it.
*/
val expectedChanges =
Try(IO.read(baseDirectory.value / "expected").split(" ").toSeq.filterNot(_.isEmpty))
.getOrElse(Nil)
.map(baseDirectory.value.toPath / "base" / _)
val actual = fileInputTask.inputFileChanges.modified
assert(actual.toSet == expectedChanges.toSet)
}
val setExpected = inputKey[Unit]("Writes a space separated list of files")
setExpected := {
IO.write(baseDirectory.value / "expected", Def.spaceDelimited().parsed.mkString(" "))
}
val setLastModified = taskKey[Unit]("Reset the last modified time")
setLastModified := {

View File

@ -1,8 +1,10 @@
-> fileInputTask
> fileInputTask
$ touch base/Bar.md
# this should succeed even though the contents didn't change
# The change to Bar.md should be detected since we set last modified instead of hash
> setExpected Bar.md
> fileInputTask
$ copy-file changes/Bar.md base/Bar.md
@ -18,9 +20,13 @@ $ copy-file changes/Bar2.md base/Bar.md
> setLastModified
# this should fail even though we changed the file with a copy
-> fileInputTask
# Since we reverted to the previous last modified time, there should be no changes
> setExpected
> fileInputTask
$ touch base/Bar.md
> setExpected Bar.md
> fileInputTask

View File

@ -1,4 +1,3 @@
Compile / excludeFilter := "Bar.scala" || "Baz.scala"
val checkSources = inputKey[Unit]("Check that the compile sources match the input file names")
checkSources := {
@ -6,3 +5,11 @@ checkSources := {
val actual = (Compile / unmanagedSources).value.map(_.getName).toSet
assert(sources == actual)
}
val oldExcludeFilter = settingKey[sbt.io.FileFilter]("the default exclude filter")
oldExcludeFilter := "Bar.scala" || "Baz.scala"
Compile / excludeFilter := oldExcludeFilter.value
val newFilter = settingKey[sbt.nio.file.PathFilter]("an alternative path filter")
newFilter := !sbt.nio.file.PathFilter(** / "{Baz,Bar}.scala")

View File

@ -6,4 +6,16 @@
> checkSources Foo.scala Bar.scala
-> compile
-> compile
> set Compile / unmanagedSources / excludeFilter := oldExcludeFilter.value
> compile
> set Compile / unmanagedSources / excludeFilter := HiddenFileFilter
-> compile
> set Compile / unmanagedSources / fileInputIncludeFilter := newFilter.value
> compile

View File

@ -2,7 +2,7 @@ import java.nio.file.{ Files, Path }
import scala.sys.process._
val compileOpts = settingKey[Seq[String]]("Extra compile options")
compileOpts := { if (scala.util.Properties.isLinux) "-fPIC" :: "-std=gnu99" :: Nil else Nil }
compileOpts := { "-fPIC" :: "-std=gnu99" :: Nil }
val compileLib = taskKey[Seq[Path]]("Compile the library")
compileLib / sourceDirectory := sourceDirectory.value / "lib"
compileLib / fileInputs := {
@ -11,62 +11,53 @@ compileLib / fileInputs := {
}
compileLib / target := baseDirectory.value / "out" / "objects"
compileLib := {
val allFiles: Seq[Path] = (compileLib / allInputFiles).value
val changedFiles: Option[Seq[Path]] = (compileLib / changedInputFiles).value match {
case Some(ChangedFiles(c, _, u)) => Some(c ++ u)
case None => None
}
val include = (compileLib / sourceDirectory).value / "include"
val objectDir: Path = (compileLib / target).value.toPath / "objects"
val outputDir = Files.createDirectories(streams.value.cacheDirectory.toPath)
val logger = streams.value.log
def objectFileName(path: Path): String = {
val name = path.getFileName.toString
name.substring(0, name.lastIndexOf('.')) + ".o"
}
compileLib.previous match {
case Some(outputs: Seq[Path]) if changedFiles.isEmpty =>
logger.info("Not compiling libfoo: no inputs have changed.")
outputs
case _ =>
Files.createDirectories(objectDir)
def extensionFilter(ext: String): Path => Boolean = _.getFileName.toString.endsWith(s".$ext")
val cFiles: Seq[Path] =
if (changedFiles.fold(false)(_.exists(extensionFilter("h")))) allFiles.filter(extensionFilter("c"))
else changedFiles.getOrElse(allFiles).filter(extensionFilter("c"))
cFiles.map { file =>
val outFile = objectDir.resolve(objectFileName(file))
logger.info(s"Compiling $file to $outFile")
(Seq("gcc") ++ compileOpts.value ++
Seq("-c", file.toString, s"-I$include", "-o", outFile.toString)).!!
outFile
}
val include = (compileLib / sourceDirectory).value / "include"
def outputPath(path: Path): Path =
outputDir / path.getFileName.toString.replaceAll(".c$", ".o")
def compile(path: Path): Path = {
val output = outputPath(path)
logger.info(s"Compiling $path to $output")
Seq("gcc", "-fPIC", "-std=gnu99", s"-I$include", "-c", s"$path", "-o", s"$output").!!
output
}
val report = compileLib.inputFileChanges
val sourceMap = compileLib.inputFiles.view.collect {
case p: Path if p.getFileName.toString.endsWith(".c") => outputPath(p) -> p
}.toMap
val existingTargets = fileTreeView.value.list(outputDir.toGlob / **).flatMap { case (p, _) =>
if (!sourceMap.contains(p)) {
Files.deleteIfExists(p)
None
} else {
Some(p)
}
}.toSet
val updatedPaths = (report.created ++ report.modified).toSet
val needCompile =
if (updatedPaths.exists(_.getFileName.toString.endsWith(".h"))) sourceMap.values
else updatedPaths ++ sourceMap.filterKeys(!existingTargets(_)).values
needCompile.foreach(compile)
sourceMap.keys.toVector
}
val linkLib = taskKey[Path]("")
linkLib / target := baseDirectory.value / "out" / "lib"
linkLib := {
val changedObjects = (compileLib / changedOutputFiles).value
val outPath = (linkLib / target).value.toPath
val allObjects = (compileLib / allOutputFiles).value.map(_.toString)
val outputDir = Files.createDirectories(streams.value.cacheDirectory.toPath)
val logger = streams.value.log
linkLib.previous match {
case Some(p: Path) if changedObjects.isEmpty =>
logger.info("Not running linker: no outputs have changed.")
p
case _ =>
val (linkOptions, libraryPath) = if (scala.util.Properties.isMac) {
val path = outPath.resolve("libfoo.dylib")
(Seq("-dynamiclib", "-o", path.toString), path)
} else {
val path = outPath.resolve("libfoo.so")
(Seq("-shared", "-fPIC", "-o", path.toString), path)
}
logger.info(s"Linking $libraryPath")
Files.createDirectories(outPath)
("gcc" +: (linkOptions ++ allObjects)).!!
libraryPath
val isMac = scala.util.Properties.isMac
val library = outputDir / s"libfoo.${if (isMac) "dylib" else "so"}"
val (report, objects) = (compileLib.outputFileChanges, compileLib.outputFiles)
val linkOpts = if (isMac) Seq("-dynamiclib") else Seq("-shared", "-fPIC")
if (report.hasChanges || !Files.exists(library)) {
logger.info(s"Linking $library")
(Seq("gcc") ++ linkOpts ++ Seq("-o", s"$library") ++ objects.map(_.toString)).!!
} else {
logger.debug(s"Skipping linking of $library")
}
library
}
val compileMain = taskKey[Path]("compile main")
@ -75,40 +66,39 @@ compileMain / fileInputs := (compileMain / sourceDirectory).value.toGlob / "main
compileMain / target := baseDirectory.value / "out" / "main"
compileMain := {
val library = linkLib.value
val changed: Boolean = (compileMain / changedInputFiles).value.nonEmpty ||
(linkLib / changedOutputFiles).value.nonEmpty
val changed: Boolean = compileMain.inputFileChanges.hasChanges ||
linkLib.outputFileChanges.hasChanges
val include = (compileLib / sourceDirectory).value / "include"
val logger = streams.value.log
val outDir = (compileMain / target).value.toPath
val outPath = outDir.resolve("main.out")
compileMain.previous match {
case Some(p: Path) if changed =>
logger.info(s"Not building $outPath: no dependencies have changed")
p
case _ =>
(compileMain / allInputFiles).value match {
case Seq(main) =>
Files.createDirectories(outDir)
logger.info(s"Building executable $outPath")
(Seq("gcc") ++ compileOpts.value ++ Seq(
main.toString,
s"-I$include",
"-o",
outPath.toString,
s"-L${library.getParent}",
"-lfoo"
)).!!
outPath
case main =>
throw new IllegalStateException(s"multiple main files detected: ${main.mkString(",")}")
}
val inputs = compileMain.inputFiles
if (changed || !Files.exists(outPath)) {
inputs match {
case Seq(main) =>
Files.createDirectories(outDir)
logger.info(s"Building executable $outPath")
(Seq("gcc") ++ compileOpts.value ++ Seq(
main.toString,
s"-I$include",
"-o",
outPath.toString,
s"-L${library.getParent}",
"-lfoo"
)).!!
case main =>
throw new IllegalStateException(s"multiple main files detected: ${main.mkString(",")}")
}
} else {
logger.info(s"Not building $outPath: no dependencies have changed")
}
outPath
}
val executeMain = inputKey[Unit]("run the main method")
executeMain := {
val args = Def.spaceDelimited("<arguments>").parsed
val binary: Seq[Path] = (compileMain / allOutputFiles).value
val binary: Seq[Path] = compileMain.outputFiles
val logger = streams.value.log
binary match {
case Seq(b) =>
@ -126,9 +116,9 @@ executeMain := {
val checkOutput = inputKey[Unit]("check the output value")
checkOutput := {
val args @ Seq(arg, res) = Def.spaceDelimited("").parsed
val binary: Path = (compileMain / allOutputFiles).value.head
val output = RunBinary(binary, args, linkLib.value)
val Seq(arg, res) = Def.spaceDelimited("").parsed
val binary: Path = compileMain.outputFiles.head
val output = RunBinary(binary, arg :: Nil, linkLib.value)
assert(output.contains(s"f($arg) = $res"))
()
}

View File

@ -0,0 +1 @@
int

View File

@ -20,6 +20,12 @@
> checkOutput 2 8
$ copy-file changes/bad.c src/lib/bad.c
$ copy-file changes/lib.c src/lib/lib.c
-> checkOutput 2 4
$ delete src/lib/bad.c
> checkOutput 2 4

View File

@ -0,0 +1,19 @@
val foo = taskKey[Unit]("dummy task with inputs")
foo / fileInputs += baseDirectory.value.toGlob / "foo" / *
val bar = taskKey[Unit]("dummy task with inputs")
bar / fileInputs += baseDirectory.value.toGlob / "bar" / *
val check = taskKey[Unit]("check expected changes")
check := {
(foo.inputFileChanges.modified ++ bar.inputFileChanges.modified) match {
case Nil =>
val contents = IO.read(baseDirectory.value / "foo" / "foo.md")
assert(contents == "foo", s"expected 'foo', got '$contents")
case Seq(f, b) =>
val fContents = IO.read(f.toFile)
assert(fContents == "updated", s"expected 'updated', got '$fContents' for $f")
val bContents = IO.read(b.toFile)
assert(bContents == "updated", s"expected 'updated', got '$fContents' for $b")
}
}

View File

@ -0,0 +1 @@
bad

View File

@ -0,0 +1 @@
updated

View File

@ -0,0 +1 @@
foo

View File

@ -0,0 +1,17 @@
> check
$ copy-file changes/bad.md foo/foo.md
$ copy-file changes/updated.md bar/bar.md
-> check
-> check
$ copy-file changes/updated.md foo/foo.md
> check
# the changes should be empty now but the content of foo/foo.md is no longer "foo"
-> check

View File

@ -0,0 +1,31 @@
import java.nio.file.Path
val foo = taskKey[Seq[Path]]("dummy task with inputs")
foo := fileTreeView.value.list(baseDirectory.value.toGlob / "foo" / *).map(_._1)
val bar = taskKey[Seq[Path]]("dummy task with inputs")
bar := fileTreeView.value.list(baseDirectory.value.toGlob / "bar" / *).map(_._1)
val check = taskKey[Unit]("check expected changes")
check := {
foo.outputFileChanges.modified ++ bar.outputFileChanges.modified match {
case Nil =>
val contents = IO.read(baseDirectory.value / "foo" / "foo.md")
assert(contents == "foo", s"expected 'foo', got '$contents")
case Seq(f, b) =>
val fContents = IO.read(f.toFile)
assert(fContents == "updated", s"expected 'updated', got '$fContents' for $f")
val bContents = IO.read(b.toFile)
assert(bContents == "updated", s"expected 'updated', got '$fContents' for $b")
}
}
val setModified = inputKey[Unit]("set the last modified time for a file")
setModified := {
val Seq(relative, lm) = Def.spaceDelimited().parsed
// be safe in case of windows
val file = relative.split("/") match {
case Array(h, rest @ _*) => rest.foldLeft(baseDirectory.value / h)(_ / _)
}
IO.setModifiedTimeOrFalse(file, lm.toLong)
}

View File

@ -0,0 +1 @@
bad

View File

@ -0,0 +1 @@
updated

View File

@ -0,0 +1 @@
foo

View File

@ -0,0 +1,23 @@
> check
$ copy-file changes/bad.md foo/foo.md
$ copy-file changes/updated.md bar/bar.md
# just in case the two of foo.md copies happen too quickly to update the last modified time
> setModified foo/foo.md 123456
-> check
-> check
$ copy-file changes/updated.md foo/foo.md
# just in case the two of foo.md copies happen too quickly to update the last modified time
> setModified foo/foo.md 12345678
> check
# the changes should be empty now but the content of foo/foo.md is no longer "foo"
-> check

View File

@ -0,0 +1,21 @@
import java.nio.file.{ Files, Path }
val outputTask = taskKey[Seq[Path]]("A task that generates outputs")
outputTask := {
val dir = Files.createDirectories(streams.value.cacheDirectory.toPath)
Seq("foo.txt" -> "foo", "bar.txt" -> "bar").map { case (name, content) =>
Files.write(dir/ name, content.getBytes)
} :+ dir
}
val checkOutputs = inputKey[Unit]("check outputs")
checkOutputs := {
val expected = Def.spaceDelimited("").parsed.map {
case "base" => (outputTask / streams).value.cacheDirectory.toPath
case f => (outputTask / streams).value.cacheDirectory.toPath / f
}
assert((outputTask / allOutputFiles).value.toSet == expected.toSet)
}
val barFilter = settingKey[PathFilter]("A filter for the bar.txt file")
barFilter := ** / "bar.txt"

View File

@ -0,0 +1,17 @@
> compile
> checkOutputs foo.txt bar.txt base
> set outputTask / fileOutputIncludeFilter := sbt.io.RegularFileFilter
> checkOutputs foo.txt bar.txt
> set outputTask / fileOutputIncludeFilter := sbt.io.DirectoryFilter
> checkOutputs base
> set outputTask / fileOutputIncludeFilter := sbt.io.RegularFileFilter
> set outputTask / fileOutputExcludeFilter := barFilter.value
> checkOutputs foo.txt

View File

@ -5,7 +5,7 @@ import scala.collection.JavaConverters._
val foo = taskKey[Unit]("foo")
foo := {
val fooTxt = baseDirectory.value / "foo.txt"
val _ = println(s"foo inputs: ${(foo / allInputFiles).value}")
val _ = println(s"foo inputs: ${foo.inputFiles}")
IO.write(fooTxt, "foo")
println(s"foo wrote to $foo")
}

View File

@ -24,7 +24,7 @@ object Build {
lazy val root = (project in file(".")).settings(
reloadFile := baseDirectory.value / "reload",
foo / fileInputs += baseDirectory.value.toGlob / "foo.txt",
foo := (foo / allInputFiles).value,
foo := foo.inputFiles,
setStringValue := Def.taskDyn {
// This hides foo / fileInputs from the input graph
Def.taskDyn {

View File

@ -8,7 +8,7 @@ foo / watchForceTriggerOnAnyChange := true
foo / fileInputs := baseDirectory.value.toGlob / "files" / "foo.txt" :: Nil
foo / watchTriggers := baseDirectory.value.toGlob / ** / "foo.txt" :: Nil
foo := {
(foo / allInputFiles).value.foreach { p =>
foo.inputFiles.foreach { p =>
Files.setLastModifiedTime(p, FileTime.fromMillis(Files.getLastModifiedTime(p).toMillis + 3000))
}
sbt.nio.Stamps.check(foo).value