mirror of https://github.com/sbt/sbt.git
Merge pull request #4539 from eatkins/glob-performance
Glob performance
This commit is contained in:
commit
319ecf31c6
|
|
@ -1,3 +1,6 @@
|
|||
image:
|
||||
- Visual Studio 2015
|
||||
- Visual Studio 2017
|
||||
build: off
|
||||
|
||||
init:
|
||||
|
|
|
|||
|
|
@ -648,7 +648,7 @@ lazy val sbtProj = (project in file("sbt"))
|
|||
Test / run / outputStrategy := Some(StdoutOutput),
|
||||
Test / run / fork := true,
|
||||
)
|
||||
.configure(addSbtCompilerBridge)
|
||||
.configure(addSbtIO, addSbtCompilerBridge)
|
||||
|
||||
lazy val sbtBig = (project in file(".big"))
|
||||
.dependsOn(sbtProj)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal.util.appmacro
|
||||
|
||||
import scala.reflect.macros.blackbox
|
||||
|
||||
object MacroDefaults {
|
||||
|
||||
/**
|
||||
* Macro to generated default file tree repository. It must be defined as an untyped tree because
|
||||
* sbt.Keys is not available in this project. This is meant for internal use only, but must be
|
||||
* public because its a macro.
|
||||
* @param c the macro context
|
||||
* @return the tree expressing the default file tree repository.
|
||||
*/
|
||||
def fileTreeRepository(c: blackbox.Context): c.Tree = {
|
||||
import c.universe._
|
||||
q"sbt.Keys.fileTreeRepository.value: @sbtUnchecked"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,177 +0,0 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
import sbt.Watched.WatchSource
|
||||
import sbt.internal.FileCacheEntry
|
||||
import sbt.internal.io.{ HybridPollingFileTreeRepository, WatchServiceBackedObservable, WatchState }
|
||||
import sbt.io.FileTreeDataView.{ Observable, Observer }
|
||||
import sbt.io._
|
||||
import sbt.util.Logger
|
||||
|
||||
import scala.concurrent.duration._
|
||||
|
||||
/**
|
||||
* Configuration for viewing and monitoring the file system.
|
||||
*/
|
||||
final class FileTreeViewConfig private (
|
||||
val newDataView: () => FileTreeDataView[FileCacheEntry],
|
||||
val newMonitor: (
|
||||
FileTreeDataView[FileCacheEntry],
|
||||
Seq[WatchSource],
|
||||
Logger
|
||||
) => FileEventMonitor[FileCacheEntry]
|
||||
)
|
||||
object FileTreeViewConfig {
|
||||
private implicit class RepositoryOps(val repository: FileTreeRepository[FileCacheEntry]) {
|
||||
def register(sources: Seq[WatchSource]): Unit = sources foreach { s =>
|
||||
repository.register(s.base.toPath, if (s.recursive) Integer.MAX_VALUE else 0)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new FileTreeViewConfig. This factory takes a generic parameter, T, that is bounded
|
||||
* by {{{sbt.io.FileTreeDataView[FileCacheEntry]}}}. The reason for this is to ensure that a
|
||||
* sbt.io.FileTreeDataView that is instantiated by [[FileTreeViewConfig.newDataView]] can be
|
||||
* passed into [[FileTreeViewConfig.newMonitor]] without constraining the type of view to be
|
||||
* {{{sbt.io.FileTreeDataView[FileCacheEntry]}}}.
|
||||
* @param newDataView create a new sbt.io.FileTreeDataView. This value may be cached in a global
|
||||
* attribute
|
||||
* @param newMonitor create a new sbt.io.FileEventMonitor using the sbt.io.FileTreeDataView
|
||||
* created by newDataView
|
||||
* @tparam T the subtype of sbt.io.FileTreeDataView that is returned by [[FileTreeViewConfig.newDataView]]
|
||||
* @return a [[FileTreeViewConfig]] instance.
|
||||
*/
|
||||
def apply[T <: FileTreeDataView[FileCacheEntry]](
|
||||
newDataView: () => T,
|
||||
newMonitor: (T, Seq[WatchSource], Logger) => FileEventMonitor[FileCacheEntry]
|
||||
): FileTreeViewConfig =
|
||||
new FileTreeViewConfig(
|
||||
newDataView,
|
||||
(view: FileTreeDataView[FileCacheEntry], sources: Seq[WatchSource], logger: Logger) =>
|
||||
newMonitor(view.asInstanceOf[T], sources, logger)
|
||||
)
|
||||
|
||||
/**
|
||||
* Provides a [[FileTreeViewConfig]] with semantics as close as possible to sbt 1.2.0. This means
|
||||
* that there is no file tree caching and the sbt.io.FileEventMonitor will use an
|
||||
* sbt.io.WatchService for monitoring the file system.
|
||||
* @param delay the maximum delay for which the background thread will poll the
|
||||
* sbt.io.WatchService for file system events
|
||||
* @param antiEntropy the duration of the period after a path triggers a build for which it is
|
||||
* quarantined from triggering another build
|
||||
* @return a [[FileTreeViewConfig]] instance.
|
||||
*/
|
||||
def sbt1_2_compat(
|
||||
delay: FiniteDuration,
|
||||
antiEntropy: FiniteDuration
|
||||
): FileTreeViewConfig =
|
||||
FileTreeViewConfig(
|
||||
() => FileTreeView.DEFAULT.asDataView(FileCacheEntry.default),
|
||||
(_: FileTreeDataView[FileCacheEntry], sources, logger) => {
|
||||
val ioLogger: sbt.io.WatchLogger = msg => logger.debug(msg.toString)
|
||||
FileEventMonitor.antiEntropy(
|
||||
new WatchServiceBackedObservable(
|
||||
WatchState.empty(Watched.createWatchService(), sources),
|
||||
delay,
|
||||
FileCacheEntry.default,
|
||||
closeService = true,
|
||||
ioLogger
|
||||
),
|
||||
antiEntropy,
|
||||
ioLogger
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* Provides a default [[FileTreeViewConfig]]. This view caches entries and solely relies on
|
||||
* file system events from the operating system to update its internal representation of the
|
||||
* file tree.
|
||||
* @param antiEntropy the duration of the period after a path triggers a build for which it is
|
||||
* quarantined from triggering another build
|
||||
* @return a [[FileTreeViewConfig]] instance.
|
||||
*/
|
||||
def default(antiEntropy: FiniteDuration): FileTreeViewConfig =
|
||||
FileTreeViewConfig(
|
||||
() => FileTreeRepository.default(FileCacheEntry.default),
|
||||
(
|
||||
repository: FileTreeRepository[FileCacheEntry],
|
||||
sources: Seq[WatchSource],
|
||||
logger: Logger
|
||||
) => {
|
||||
repository.register(sources)
|
||||
val copied = new Observable[FileCacheEntry] {
|
||||
override def addObserver(observer: Observer[FileCacheEntry]): Int =
|
||||
repository.addObserver(observer)
|
||||
override def removeObserver(handle: Int): Unit = repository.removeObserver(handle)
|
||||
override def close(): Unit = {} // Don't close the underlying observable
|
||||
}
|
||||
FileEventMonitor.antiEntropy(copied, antiEntropy, msg => logger.debug(msg.toString))
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* Provides a default [[FileTreeViewConfig]]. When the pollingSources argument is empty, it
|
||||
* returns the same config as [[sbt.FileTreeViewConfig.default(antiEntropy:scala\.concurrent\.duration\.FiniteDuration)*]].
|
||||
* Otherwise, it returns the same config as [[polling]].
|
||||
* @param antiEntropy the duration of the period after a path triggers a build for which it is
|
||||
* quarantined from triggering another build
|
||||
* @param pollingInterval the frequency with which the sbt.io.FileEventMonitor polls the file
|
||||
* system for the paths included in pollingSources
|
||||
* @param pollingSources the sources that will not be cached in the sbt.io.FileTreeRepository and that
|
||||
* will be periodically polled for changes during continuous builds.
|
||||
* @return
|
||||
*/
|
||||
def default(
|
||||
antiEntropy: FiniteDuration,
|
||||
pollingInterval: FiniteDuration,
|
||||
pollingSources: Seq[WatchSource]
|
||||
): FileTreeViewConfig = {
|
||||
if (pollingSources.isEmpty) default(antiEntropy)
|
||||
else polling(antiEntropy, pollingInterval, pollingSources)
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a polling [[FileTreeViewConfig]]. Unlike the view returned by newDataView in
|
||||
* [[sbt.FileTreeViewConfig.default(antiEntropy:scala\.concurrent\.duration\.FiniteDuration)*]],
|
||||
* the view returned by newDataView will not cache any portion of the file system tree that is is
|
||||
* covered by the pollingSources parameter. The monitor that is generated by newMonitor, will
|
||||
* poll these directories for changes rather than relying on file system events from the
|
||||
* operating system. Any paths that are registered with the view that are not included in the
|
||||
* pollingSources will be cached and monitored using file system events from the operating system
|
||||
* in the same way that they are in the default view.
|
||||
*
|
||||
* @param antiEntropy the duration of the period after a path triggers a build for which it is
|
||||
* quarantined from triggering another build
|
||||
* @param pollingInterval the frequency with which the FileEventMonitor polls the file system
|
||||
* for the paths included in pollingSources
|
||||
* @param pollingSources the sources that will not be cached in the sbt.io.FileTreeRepository and that
|
||||
* will be periodically polled for changes during continuous builds.
|
||||
* @return a [[FileTreeViewConfig]] instance.
|
||||
*/
|
||||
def polling(
|
||||
antiEntropy: FiniteDuration,
|
||||
pollingInterval: FiniteDuration,
|
||||
pollingSources: Seq[WatchSource],
|
||||
): FileTreeViewConfig = FileTreeViewConfig(
|
||||
() => FileTreeRepository.hybrid(FileCacheEntry.default, pollingSources: _*),
|
||||
(
|
||||
repository: HybridPollingFileTreeRepository[FileCacheEntry],
|
||||
sources: Seq[WatchSource],
|
||||
logger: Logger
|
||||
) => {
|
||||
repository.register(sources)
|
||||
FileEventMonitor
|
||||
.antiEntropy(
|
||||
repository.toPollingObservable(pollingInterval, sources, NullWatchLogger),
|
||||
antiEntropy,
|
||||
msg => logger.debug(msg.toString)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
@ -10,8 +10,8 @@ package sbt
|
|||
import java.io.{ File => JFile }
|
||||
import java.nio.file.Path
|
||||
|
||||
import sbt.internal.FileCacheEntry
|
||||
import sbt.internal.inc.Stamper
|
||||
import sbt.internal.FileAttributes
|
||||
import sbt.internal.inc.{ EmptyStamp, Stamper }
|
||||
import sbt.io.TypedPath
|
||||
import xsbti.compile.analysis.Stamp
|
||||
|
||||
|
|
@ -29,50 +29,47 @@ private[sbt] trait Stamped {
|
|||
* Provides converter functions from TypedPath to [[Stamped]].
|
||||
*/
|
||||
private[sbt] object Stamped {
|
||||
type File = JFile with Stamped with TypedPath
|
||||
def file(typedPath: TypedPath, entry: FileCacheEntry): JFile with Stamped with TypedPath =
|
||||
new StampedFileImpl(typedPath, entry.stamp)
|
||||
type File = JFile with Stamped
|
||||
private[sbt] val file: ((Path, FileAttributes)) => JFile with Stamped = {
|
||||
case (path: Path, attributes: FileAttributes) =>
|
||||
new StampedFileImpl(path, attributes.stamp)
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a TypedPath instance to a [[Stamped]] by calculating the file hash.
|
||||
*/
|
||||
val sourceConverter: TypedPath => Stamp = tp => Stamper.forHash(tp.toPath.toFile)
|
||||
private[sbt] val sourceConverter: TypedPath => Stamp = tp => Stamper.forHash(tp.toPath.toFile)
|
||||
|
||||
/**
|
||||
* Converts a TypedPath instance to a [[Stamped]] using the last modified time.
|
||||
*/
|
||||
val binaryConverter: TypedPath => Stamp = tp => Stamper.forLastModified(tp.toPath.toFile)
|
||||
private[sbt] val binaryConverter: TypedPath => Stamp = tp =>
|
||||
Stamper.forLastModified(tp.toPath.toFile)
|
||||
|
||||
/**
|
||||
* A combined convert that converts TypedPath instances representing *.jar and *.class files
|
||||
* using the last modified time and all other files using the file hash.
|
||||
*/
|
||||
val converter: TypedPath => Stamp = (tp: TypedPath) =>
|
||||
if (tp.isDirectory) binaryConverter(tp)
|
||||
else {
|
||||
tp.toPath.toString match {
|
||||
case s if s.endsWith(".jar") => binaryConverter(tp)
|
||||
case s if s.endsWith(".class") => binaryConverter(tp)
|
||||
case _ => sourceConverter(tp)
|
||||
private[sbt] val converter: TypedPath => Stamp = (_: TypedPath) match {
|
||||
case typedPath if !typedPath.exists => EmptyStamp
|
||||
case typedPath if typedPath.isDirectory => binaryConverter(typedPath)
|
||||
case typedPath =>
|
||||
typedPath.toPath.toString match {
|
||||
case s if s.endsWith(".jar") => binaryConverter(typedPath)
|
||||
case s if s.endsWith(".class") => binaryConverter(typedPath)
|
||||
case _ => sourceConverter(typedPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a default ordering that just delegates to the java.io.File.compareTo method.
|
||||
*/
|
||||
implicit case object ordering extends Ordering[Stamped.File] {
|
||||
private[sbt] implicit case object ordering extends Ordering[Stamped.File] {
|
||||
override def compare(left: Stamped.File, right: Stamped.File): Int = left.compareTo(right)
|
||||
}
|
||||
|
||||
private final class StampedImpl(override val stamp: Stamp) extends Stamped
|
||||
private final class StampedFileImpl(typedPath: TypedPath, override val stamp: Stamp)
|
||||
extends java.io.File(typedPath.toPath.toString)
|
||||
private final class StampedFileImpl(path: Path, override val stamp: Stamp)
|
||||
extends java.io.File(path.toString)
|
||||
with Stamped
|
||||
with TypedPath {
|
||||
override def exists: Boolean = typedPath.exists
|
||||
override def isDirectory: Boolean = typedPath.isDirectory
|
||||
override def isFile: Boolean = typedPath.isFile
|
||||
override def isSymbolicLink: Boolean = typedPath.isSymbolicLink
|
||||
override def toPath: Path = typedPath.toPath
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
package sbt
|
||||
|
||||
import java.io.{ File, InputStream }
|
||||
import java.nio.file.FileSystems
|
||||
import java.nio.file.{ FileSystems, Path }
|
||||
|
||||
import sbt.BasicCommandStrings.{
|
||||
ContinuousExecutePrefix,
|
||||
|
|
@ -22,8 +22,7 @@ import sbt.internal.io.{ EventMonitor, Source, WatchState }
|
|||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.util.complete.{ DefaultParsers, Parser }
|
||||
import sbt.internal.util.{ AttributeKey, JLine }
|
||||
import sbt.internal.{ FileCacheEntry, LegacyWatched }
|
||||
import sbt.io.FileEventMonitor.{ Creation, Deletion, Event, Update }
|
||||
import sbt.internal.{ FileAttributes, LegacyWatched }
|
||||
import sbt.io._
|
||||
import sbt.util.{ Level, Logger }
|
||||
|
||||
|
|
@ -64,9 +63,8 @@ object Watched {
|
|||
|
||||
/**
|
||||
* This trait is used to communicate what the watch should do next at various points in time. It
|
||||
* is heavily linked to a number of callbacks in [[WatchConfig]]. For example, when the
|
||||
* sbt.io.FileEventMonitor created by [[FileTreeViewConfig.newMonitor]] detects a changed source
|
||||
* file, then we expect [[WatchConfig.onWatchEvent]] to return [[Trigger]].
|
||||
* is heavily linked to a number of callbacks in [[WatchConfig]]. For example, when the event
|
||||
* monitor detects a changed source we expect [[WatchConfig.onWatchEvent]] to return [[Trigger]].
|
||||
*/
|
||||
sealed trait Action
|
||||
|
||||
|
|
@ -146,13 +144,14 @@ object Watched {
|
|||
private[sbt] def onEvent(
|
||||
sources: Seq[WatchSource],
|
||||
projectSources: Seq[WatchSource]
|
||||
): Event[FileCacheEntry] => Watched.Action =
|
||||
): FileAttributes.Event => Watched.Action =
|
||||
event =>
|
||||
if (sources.exists(_.accept(event.entry.typedPath.toPath))) Watched.Trigger
|
||||
else if (projectSources.exists(_.accept(event.entry.typedPath.toPath))) event match {
|
||||
case Update(prev, cur, _) if prev.value != cur.value => Reload
|
||||
case _: Creation[_] | _: Deletion[_] => Reload
|
||||
case _ => Ignore
|
||||
if (sources.exists(_.accept(event.path))) Watched.Trigger
|
||||
else if (projectSources.exists(_.accept(event.path))) {
|
||||
(event.previous, event.current) match {
|
||||
case (Some(p), Some(c)) => if (c == p) Watched.Ignore else Watched.Reload
|
||||
case _ => Watched.Trigger
|
||||
}
|
||||
} else Ignore
|
||||
|
||||
private[this] val reRun = if (isWin) "" else " or 'r' to re-run the command"
|
||||
|
|
@ -334,7 +333,9 @@ object Watched {
|
|||
case action @ (CancelWatch | HandleError | Reload | _: Custom) => action
|
||||
case Trigger => Trigger
|
||||
case _ =>
|
||||
val events = config.fileEventMonitor.poll(10.millis)
|
||||
val events = config.fileEventMonitor
|
||||
.poll(10.millis)
|
||||
.map(new FileAttributes.EventImpl(_))
|
||||
val next = events match {
|
||||
case Seq() => (Ignore, None)
|
||||
case Seq(head, tail @ _*) =>
|
||||
|
|
@ -363,14 +364,14 @@ object Watched {
|
|||
if (action == HandleError) "error"
|
||||
else if (action.isInstanceOf[Custom]) action.toString
|
||||
else "cancellation"
|
||||
logger.debug(s"Stopping watch due to $cause from ${event.entry.typedPath.toPath}")
|
||||
logger.debug(s"Stopping watch due to $cause from ${event.path}")
|
||||
action
|
||||
case (Trigger, Some(event)) =>
|
||||
logger.debug(s"Triggered by ${event.entry.typedPath.toPath}")
|
||||
config.triggeredMessage(event.entry.typedPath, count).foreach(info)
|
||||
logger.debug(s"Triggered by ${event.path}")
|
||||
config.triggeredMessage(event.path, count).foreach(info)
|
||||
Trigger
|
||||
case (Reload, Some(event)) =>
|
||||
logger.info(s"Reload triggered by ${event.entry.typedPath.toPath}")
|
||||
logger.info(s"Reload triggered by ${event.path}")
|
||||
Reload
|
||||
case _ =>
|
||||
nextAction()
|
||||
|
|
@ -427,11 +428,11 @@ object Watched {
|
|||
val Configuration =
|
||||
AttributeKey[Watched]("watched-configuration", "Configures continuous execution.")
|
||||
|
||||
def createWatchService(): WatchService = {
|
||||
def createWatchService(pollDelay: FiniteDuration): WatchService = {
|
||||
def closeWatch = new MacOSXWatchService()
|
||||
sys.props.get("sbt.watch.mode") match {
|
||||
case Some("polling") =>
|
||||
new PollingWatchService(PollDelay)
|
||||
new PollingWatchService(pollDelay)
|
||||
case Some("nio") =>
|
||||
FileSystems.getDefault.newWatchService()
|
||||
case Some("closewatch") => closeWatch
|
||||
|
|
@ -440,6 +441,7 @@ object Watched {
|
|||
FileSystems.getDefault.newWatchService()
|
||||
}
|
||||
}
|
||||
def createWatchService(): WatchService = createWatchService(PollDelay)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -458,7 +460,7 @@ trait WatchConfig {
|
|||
*
|
||||
* @return an sbt.io.FileEventMonitor instance.
|
||||
*/
|
||||
def fileEventMonitor: FileEventMonitor[FileCacheEntry]
|
||||
def fileEventMonitor: FileEventMonitor[FileAttributes]
|
||||
|
||||
/**
|
||||
* A function that is periodically invoked to determine whether the watch should stop or
|
||||
|
|
@ -481,7 +483,7 @@ trait WatchConfig {
|
|||
* @param event the detected sbt.io.FileEventMonitor.Event.
|
||||
* @return the next [[Watched.Action Action]] to run.
|
||||
*/
|
||||
def onWatchEvent(event: Event[FileCacheEntry]): Watched.Action
|
||||
def onWatchEvent(event: FileAttributes.Event): Watched.Action
|
||||
|
||||
/**
|
||||
* Transforms the state after the watch terminates.
|
||||
|
|
@ -494,11 +496,11 @@ trait WatchConfig {
|
|||
|
||||
/**
|
||||
* The optional message to log when a build is triggered.
|
||||
* @param typedPath the path that triggered the build
|
||||
* @param path the path that triggered the vuild
|
||||
* @param count the current iteration
|
||||
* @return an optional log message.
|
||||
*/
|
||||
def triggeredMessage(typedPath: TypedPath, count: Int): Option[String]
|
||||
def triggeredMessage(path: Path, count: Int): Option[String]
|
||||
|
||||
/**
|
||||
* The optional message to log before each watch iteration.
|
||||
|
|
@ -537,12 +539,12 @@ object WatchConfig {
|
|||
*/
|
||||
def default(
|
||||
logger: Logger,
|
||||
fileEventMonitor: FileEventMonitor[FileCacheEntry],
|
||||
fileEventMonitor: FileEventMonitor[FileAttributes],
|
||||
handleInput: InputStream => Watched.Action,
|
||||
preWatch: (Int, Boolean) => Watched.Action,
|
||||
onWatchEvent: Event[FileCacheEntry] => Watched.Action,
|
||||
onWatchEvent: FileAttributes.Event => Watched.Action,
|
||||
onWatchTerminated: (Watched.Action, String, State) => State,
|
||||
triggeredMessage: (TypedPath, Int) => Option[String],
|
||||
triggeredMessage: (Path, Int) => Option[String],
|
||||
watchingMessage: Int => Option[String]
|
||||
): WatchConfig = {
|
||||
val l = logger
|
||||
|
|
@ -555,15 +557,15 @@ object WatchConfig {
|
|||
val wm = watchingMessage
|
||||
new WatchConfig {
|
||||
override def logger: Logger = l
|
||||
override def fileEventMonitor: FileEventMonitor[FileCacheEntry] = fem
|
||||
override def fileEventMonitor: FileEventMonitor[FileAttributes] = fem
|
||||
override def handleInput(inputStream: InputStream): Watched.Action = hi(inputStream)
|
||||
override def preWatch(count: Int, lastResult: Boolean): Watched.Action =
|
||||
pw(count, lastResult)
|
||||
override def onWatchEvent(event: Event[FileCacheEntry]): Watched.Action = owe(event)
|
||||
override def onWatchEvent(event: FileAttributes.Event): Watched.Action = owe(event)
|
||||
override def onWatchTerminated(action: Watched.Action, command: String, state: State): State =
|
||||
owt(action, command, state)
|
||||
override def triggeredMessage(typedPath: TypedPath, count: Int): Option[String] =
|
||||
tm(typedPath, count)
|
||||
override def triggeredMessage(path: Path, count: Int): Option[String] =
|
||||
tm(path, count)
|
||||
override def watchingMessage(count: Int): Option[String] = wm(count)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,101 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal
|
||||
|
||||
import java.lang
|
||||
import java.nio.file.Path
|
||||
import java.util.Optional
|
||||
|
||||
import sbt.Stamped
|
||||
import sbt.internal.inc.{ EmptyStamp, LastModified, Stamp }
|
||||
import sbt.io.FileEventMonitor.{ Creation, Deletion, Update }
|
||||
import sbt.io.{ FileEventMonitor, TypedPath }
|
||||
import xsbti.compile.analysis.{ Stamp => XStamp }
|
||||
|
||||
/**
|
||||
* Represents the FileAttributes of a file. This will be moved to io before 1.3.0 is released.
|
||||
*/
|
||||
trait FileAttributes {
|
||||
def hash: Option[String]
|
||||
def lastModified: Option[Long]
|
||||
def isRegularFile: Boolean
|
||||
def isDirectory: Boolean
|
||||
def isSymbolicLink: Boolean
|
||||
}
|
||||
object FileAttributes {
|
||||
trait Event {
|
||||
def path: Path
|
||||
def previous: Option[FileAttributes]
|
||||
def current: Option[FileAttributes]
|
||||
}
|
||||
private[sbt] class EventImpl(event: FileEventMonitor.Event[FileAttributes]) extends Event {
|
||||
override def path: Path = event.entry.typedPath.toPath
|
||||
override def previous: Option[FileAttributes] = event match {
|
||||
case Deletion(entry, _) => entry.value.toOption
|
||||
case Update(previous, _, _) => previous.value.toOption
|
||||
case _ => None
|
||||
}
|
||||
override def current: Option[FileAttributes] = event match {
|
||||
case Creation(entry, _) => entry.value.toOption
|
||||
case Update(_, current, _) => current.value.toOption
|
||||
case _ => None
|
||||
}
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case that: Event =>
|
||||
this.path == that.path && this.previous == that.previous && this.current == that.current
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode(): Int =
|
||||
((path.hashCode * 31) ^ previous.hashCode() * 31) ^ current.hashCode()
|
||||
override def toString: String = s"Event($path, $previous, $current)"
|
||||
}
|
||||
private[sbt] def default(typedPath: TypedPath): FileAttributes =
|
||||
DelegateFileAttributes(Stamped.converter(typedPath), typedPath)
|
||||
private[sbt] implicit class FileAttributesOps(val e: FileAttributes) extends AnyVal {
|
||||
private[sbt] def stamp: XStamp = e match {
|
||||
case DelegateFileAttributes(s, _) => s
|
||||
case _ =>
|
||||
e.hash
|
||||
.map(Stamp.fromString)
|
||||
.orElse(e.lastModified.map(new LastModified(_)))
|
||||
.getOrElse(EmptyStamp)
|
||||
}
|
||||
}
|
||||
|
||||
private implicit class Equiv(val xstamp: XStamp) extends AnyVal {
|
||||
def equiv(that: XStamp): Boolean = Stamp.equivStamp.equiv(xstamp, that)
|
||||
}
|
||||
private case class DelegateFileAttributes(
|
||||
private val stamp: XStamp,
|
||||
private val typedPath: TypedPath
|
||||
) extends FileAttributes
|
||||
with XStamp {
|
||||
override def getValueId: Int = stamp.getValueId
|
||||
override def writeStamp(): String = stamp.writeStamp()
|
||||
override def getHash: Optional[String] = stamp.getHash
|
||||
override def getLastModified: Optional[lang.Long] = stamp.getLastModified
|
||||
override def hash: Option[String] = getHash match {
|
||||
case h if h.isPresent => Some(h.get)
|
||||
case _ => None
|
||||
}
|
||||
override def lastModified: Option[Long] = getLastModified match {
|
||||
case l if l.isPresent => Some(l.get)
|
||||
case _ => None
|
||||
}
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case DelegateFileAttributes(thatStamp, thatTypedPath) =>
|
||||
(this.stamp equiv thatStamp) && (this.typedPath == thatTypedPath)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = stamp.hashCode
|
||||
override def toString: String = s"FileAttributes(hash = $hash, lastModified = $lastModified)"
|
||||
override def isRegularFile: Boolean = typedPath.isFile
|
||||
override def isDirectory: Boolean = typedPath.isDirectory
|
||||
override def isSymbolicLink: Boolean = typedPath.isSymbolicLink
|
||||
}
|
||||
}
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
import java.lang
|
||||
import java.util.Optional
|
||||
|
||||
import sbt.internal.inc.{ EmptyStamp, LastModified, Stamp }
|
||||
import sbt.io.TypedPath
|
||||
import xsbti.compile.analysis.{ Stamp => XStamp }
|
||||
|
||||
/**
|
||||
* Represents a cache entry for a FileTreeRepository. It can be extended to add user defined
|
||||
* data to the FileTreeRepository cache.
|
||||
*/
|
||||
trait FileCacheEntry {
|
||||
def hash: Option[String]
|
||||
def lastModified: Option[Long]
|
||||
}
|
||||
object FileCacheEntry {
|
||||
def default(typedPath: TypedPath): FileCacheEntry =
|
||||
DelegateFileCacheEntry(Stamped.converter(typedPath))
|
||||
private[sbt] implicit class FileCacheEntryOps(val e: FileCacheEntry) extends AnyVal {
|
||||
private[sbt] def stamp: XStamp = e match {
|
||||
case DelegateFileCacheEntry(s) => s
|
||||
case _ =>
|
||||
e.hash
|
||||
.map(Stamp.fromString)
|
||||
.orElse(e.lastModified.map(new LastModified(_)))
|
||||
.getOrElse(EmptyStamp)
|
||||
}
|
||||
}
|
||||
|
||||
private case class DelegateFileCacheEntry(private val stamp: XStamp)
|
||||
extends FileCacheEntry
|
||||
with XStamp {
|
||||
override def getValueId: Int = stamp.getValueId
|
||||
override def writeStamp(): String = stamp.writeStamp()
|
||||
override def getHash: Optional[String] = stamp.getHash
|
||||
override def getLastModified: Optional[lang.Long] = stamp.getLastModified
|
||||
override def hash: Option[String] = getHash match {
|
||||
case h if h.isPresent => Some(h.get)
|
||||
case _ => None
|
||||
}
|
||||
override def lastModified: Option[Long] = getLastModified match {
|
||||
case l if l.isPresent => Some(l.get)
|
||||
case _ => None
|
||||
}
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case that: DelegateFileCacheEntry => this.stamp == that.stamp
|
||||
case that: XStamp => this.stamp == that
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = stamp.hashCode
|
||||
override def toString: String = s"FileCacheEntry(hash = $hash, lastModified = $lastModified)"
|
||||
}
|
||||
}
|
||||
|
|
@ -8,15 +8,15 @@
|
|||
package sbt
|
||||
|
||||
import java.io.{ File, InputStream }
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.{ Files, Path }
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
|
||||
import org.scalatest.{ FlatSpec, Matchers }
|
||||
import sbt.Watched._
|
||||
import sbt.WatchedSpec._
|
||||
import sbt.internal.FileCacheEntry
|
||||
import sbt.io.FileEventMonitor.Event
|
||||
import sbt.io.{ FileEventMonitor, IO, TypedPath }
|
||||
import sbt.internal.FileAttributes
|
||||
import sbt.io._
|
||||
import sbt.io.syntax._
|
||||
import sbt.util.Logger
|
||||
|
||||
import scala.collection.mutable
|
||||
|
|
@ -24,20 +24,27 @@ import scala.concurrent.duration._
|
|||
|
||||
class WatchedSpec extends FlatSpec with Matchers {
|
||||
object Defaults {
|
||||
private val fileTreeViewConfig = FileTreeViewConfig.default(50.millis)
|
||||
def config(
|
||||
sources: Seq[WatchSource],
|
||||
fileEventMonitor: Option[FileEventMonitor[FileCacheEntry]] = None,
|
||||
globs: Seq[Glob],
|
||||
fileEventMonitor: Option[FileEventMonitor[FileAttributes]] = None,
|
||||
logger: Logger = NullLogger,
|
||||
handleInput: InputStream => Action = _ => Ignore,
|
||||
preWatch: (Int, Boolean) => Action = (_, _) => CancelWatch,
|
||||
onWatchEvent: Event[FileCacheEntry] => Action = _ => Ignore,
|
||||
triggeredMessage: (TypedPath, Int) => Option[String] = (_, _) => None,
|
||||
onWatchEvent: FileAttributes.Event => Action = _ => Ignore,
|
||||
triggeredMessage: (Path, Int) => Option[String] = (_, _) => None,
|
||||
watchingMessage: Int => Option[String] = _ => None
|
||||
): WatchConfig = {
|
||||
val monitor = fileEventMonitor.getOrElse(
|
||||
fileTreeViewConfig.newMonitor(fileTreeViewConfig.newDataView(), sources, logger)
|
||||
)
|
||||
val monitor = fileEventMonitor.getOrElse {
|
||||
val fileTreeRepository = FileTreeRepository.default(FileAttributes.default)
|
||||
globs.foreach(fileTreeRepository.register)
|
||||
FileEventMonitor.antiEntropy(
|
||||
fileTreeRepository,
|
||||
50.millis,
|
||||
m => logger.debug(m.toString),
|
||||
50.milliseconds,
|
||||
100.milliseconds
|
||||
)
|
||||
}
|
||||
WatchConfig.default(
|
||||
logger = logger,
|
||||
monitor,
|
||||
|
|
@ -55,13 +62,13 @@ class WatchedSpec extends FlatSpec with Matchers {
|
|||
override def read(): Int = -1
|
||||
}
|
||||
"Watched.watch" should "stop" in IO.withTemporaryDirectory { dir =>
|
||||
val config = Defaults.config(sources = Seq(WatchSource(dir.toRealPath)))
|
||||
val config = Defaults.config(globs = Seq(dir.toRealPath.toGlob))
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe CancelWatch
|
||||
}
|
||||
it should "trigger" in IO.withTemporaryDirectory { dir =>
|
||||
val triggered = new AtomicBoolean(false)
|
||||
val config = Defaults.config(
|
||||
sources = Seq(WatchSource(dir.toRealPath)),
|
||||
globs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
preWatch = (count, _) => if (count == 2) CancelWatch else Ignore,
|
||||
onWatchEvent = _ => { triggered.set(true); Trigger },
|
||||
watchingMessage = _ => {
|
||||
|
|
@ -73,28 +80,28 @@ class WatchedSpec extends FlatSpec with Matchers {
|
|||
}
|
||||
it should "filter events" in IO.withTemporaryDirectory { dir =>
|
||||
val realDir = dir.toRealPath
|
||||
val queue = new mutable.Queue[TypedPath]
|
||||
val queue = new mutable.Queue[Path]
|
||||
val foo = realDir.toPath.resolve("foo")
|
||||
val bar = realDir.toPath.resolve("bar")
|
||||
val config = Defaults.config(
|
||||
sources = Seq(WatchSource(realDir)),
|
||||
globs = Seq(realDir ** AllPassFilter),
|
||||
preWatch = (count, _) => if (count == 2) CancelWatch else Ignore,
|
||||
onWatchEvent = e => if (e.entry.typedPath.toPath == foo) Trigger else Ignore,
|
||||
onWatchEvent = e => if (e.path == foo) Trigger else Ignore,
|
||||
triggeredMessage = (tp, _) => { queue += tp; None },
|
||||
watchingMessage = _ => { Files.createFile(bar); Thread.sleep(5); Files.createFile(foo); None }
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe CancelWatch
|
||||
queue.toIndexedSeq.map(_.toPath) shouldBe Seq(foo)
|
||||
queue.toIndexedSeq shouldBe Seq(foo)
|
||||
}
|
||||
it should "enforce anti-entropy" in IO.withTemporaryDirectory { dir =>
|
||||
val realDir = dir.toRealPath
|
||||
val queue = new mutable.Queue[TypedPath]
|
||||
val foo = realDir.toPath.resolve("foo")
|
||||
val bar = realDir.toPath.resolve("bar")
|
||||
val realDir = dir.toRealPath.toPath
|
||||
val queue = new mutable.Queue[Path]
|
||||
val foo = realDir.resolve("foo")
|
||||
val bar = realDir.resolve("bar")
|
||||
val config = Defaults.config(
|
||||
sources = Seq(WatchSource(realDir)),
|
||||
globs = Seq(realDir ** AllPassFilter),
|
||||
preWatch = (count, _) => if (count == 3) CancelWatch else Ignore,
|
||||
onWatchEvent = _ => Trigger,
|
||||
onWatchEvent = e => if (e.path != realDir) Trigger else Ignore,
|
||||
triggeredMessage = (tp, _) => { queue += tp; None },
|
||||
watchingMessage = count => {
|
||||
count match {
|
||||
|
|
@ -108,12 +115,12 @@ class WatchedSpec extends FlatSpec with Matchers {
|
|||
}
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(true), config) shouldBe CancelWatch
|
||||
queue.toIndexedSeq.map(_.toPath) shouldBe Seq(bar, foo)
|
||||
queue.toIndexedSeq shouldBe Seq(bar, foo)
|
||||
}
|
||||
it should "halt on error" in IO.withTemporaryDirectory { dir =>
|
||||
val halted = new AtomicBoolean(false)
|
||||
val config = Defaults.config(
|
||||
sources = Seq(WatchSource(dir.toRealPath)),
|
||||
globs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
preWatch = (_, lastStatus) => if (lastStatus) Ignore else { halted.set(true); HandleError }
|
||||
)
|
||||
Watched.watch(NullInputStream, () => Right(false), config) shouldBe HandleError
|
||||
|
|
@ -121,7 +128,7 @@ class WatchedSpec extends FlatSpec with Matchers {
|
|||
}
|
||||
it should "reload" in IO.withTemporaryDirectory { dir =>
|
||||
val config = Defaults.config(
|
||||
sources = Seq(WatchSource(dir.toRealPath)),
|
||||
globs = Seq(dir.toRealPath ** AllPassFilter),
|
||||
preWatch = (_, _) => Ignore,
|
||||
onWatchEvent = _ => Reload,
|
||||
watchingMessage = _ => { new File(dir, "file").createNewFile(); None }
|
||||
|
|
|
|||
|
|
@ -8,12 +8,13 @@
|
|||
package sbt
|
||||
|
||||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.util.{ Attributed, AttributeKey, Init, ConsoleAppender }
|
||||
import sbt.internal.util.{ AttributeKey, Attributed, ConsoleAppender, Init }
|
||||
import sbt.util.Show
|
||||
import sbt.internal.util.complete.Parser
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import Scope.{ ThisScope, GlobalScope }
|
||||
|
||||
import Scope.{ GlobalScope, ThisScope }
|
||||
import KeyRanks.{ DTask, Invisible }
|
||||
|
||||
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ package sbt
|
|||
|
||||
import java.io.{ File, PrintWriter }
|
||||
import java.net.{ URI, URL }
|
||||
import java.nio.file.{ Path => NioPath }
|
||||
import java.util.Optional
|
||||
import java.util.concurrent.{ Callable, TimeUnit }
|
||||
|
||||
|
|
@ -47,8 +48,8 @@ import sbt.internal.util.Types._
|
|||
import sbt.internal.util._
|
||||
import sbt.internal.util.complete._
|
||||
import sbt.io.Path._
|
||||
import sbt.io.syntax._
|
||||
import sbt.io._
|
||||
import sbt.io.syntax._
|
||||
import sbt.librarymanagement.Artifact.{ DocClassifier, SourceClassifier }
|
||||
import sbt.librarymanagement.Configurations.{
|
||||
Compile,
|
||||
|
|
@ -68,8 +69,8 @@ import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprin
|
|||
import sbt.util.CacheImplicits._
|
||||
import sbt.util.InterfaceUtil.{ toJavaFunction => f1 }
|
||||
import sbt.util._
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
import sjsonnew._
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
import xsbti.CrossValue
|
||||
import xsbti.compile.{ AnalysisContents, IncOptions, IncToolOptionsUtil }
|
||||
|
||||
|
|
@ -80,6 +81,7 @@ import scala.xml.NodeSeq
|
|||
|
||||
// incremental compiler
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.GlobLister._
|
||||
import sbt.internal.inc.{
|
||||
Analysis,
|
||||
AnalyzingCompiler,
|
||||
|
|
@ -249,15 +251,15 @@ object Defaults extends BuildCommon {
|
|||
extraLoggers :== { _ =>
|
||||
Nil
|
||||
},
|
||||
pollingDirectories :== Nil,
|
||||
pollingGlobs :== Nil,
|
||||
watchSources :== Nil,
|
||||
watchProjectSources :== Nil,
|
||||
skip :== false,
|
||||
taskTemporaryDirectory := { val dir = IO.createTemporaryDirectory; dir.deleteOnExit(); dir },
|
||||
onComplete := {
|
||||
val dir = taskTemporaryDirectory.value;
|
||||
val tempDirectory = taskTemporaryDirectory.value
|
||||
() =>
|
||||
{ IO.delete(dir); IO.createDirectory(dir) }
|
||||
Clean.deleteContents(tempDirectory, _ => false)
|
||||
},
|
||||
useSuperShell :== sbt.internal.TaskProgress.isEnabled,
|
||||
progressReports := { (s: State) =>
|
||||
|
|
@ -280,14 +282,11 @@ object Defaults extends BuildCommon {
|
|||
None
|
||||
},
|
||||
watchStartMessage := Watched.defaultStartWatch,
|
||||
fileTreeViewConfig := FileManagement.defaultFileTreeView.value,
|
||||
fileTreeView := state.value
|
||||
.get(Keys.globalFileTreeView)
|
||||
.getOrElse(FileTreeView.DEFAULT.asDataView(FileCacheEntry.default)),
|
||||
fileTreeRepository := FileTree.Repository.polling,
|
||||
externalHooks := {
|
||||
val view = fileTreeView.value
|
||||
val repository = fileTreeRepository.value
|
||||
compileOptions =>
|
||||
Some(ExternalHooks(compileOptions, view))
|
||||
Some(ExternalHooks(compileOptions, repository))
|
||||
},
|
||||
watchAntiEntropy :== new FiniteDuration(500, TimeUnit.MILLISECONDS),
|
||||
watchLogger := streams.value.log,
|
||||
|
|
@ -377,13 +376,12 @@ object Defaults extends BuildCommon {
|
|||
crossPaths.value
|
||||
)
|
||||
},
|
||||
unmanagedSources := FileManagement
|
||||
.collectFiles(
|
||||
unmanagedSourceDirectories,
|
||||
includeFilter in unmanagedSources,
|
||||
excludeFilter in unmanagedSources
|
||||
)
|
||||
.value,
|
||||
unmanagedSources := {
|
||||
val filter =
|
||||
(includeFilter in unmanagedSources).value -- (excludeFilter in unmanagedSources).value
|
||||
val baseSources = if (sourcesInBase.value) baseDirectory.value * filter :: Nil else Nil
|
||||
(unmanagedSourceDirectories.value.map(_ ** filter) ++ baseSources).all.map(Stamped.file)
|
||||
},
|
||||
watchSources in ConfigGlobal := (watchSources in ConfigGlobal).value ++ {
|
||||
val baseDir = baseDirectory.value
|
||||
val bases = unmanagedSourceDirectories.value
|
||||
|
|
@ -404,6 +402,7 @@ object Defaults extends BuildCommon {
|
|||
managedSourceDirectories := Seq(sourceManaged.value),
|
||||
managedSources := generate(sourceGenerators).value,
|
||||
sourceGenerators :== Nil,
|
||||
sourceGenerators / outputs := Seq(managedDirectory.value ** AllPassFilter),
|
||||
sourceDirectories := Classpaths
|
||||
.concatSettings(unmanagedSourceDirectories, managedSourceDirectories)
|
||||
.value,
|
||||
|
|
@ -417,13 +416,11 @@ object Defaults extends BuildCommon {
|
|||
resourceDirectories := Classpaths
|
||||
.concatSettings(unmanagedResourceDirectories, managedResourceDirectories)
|
||||
.value,
|
||||
unmanagedResources := FileManagement
|
||||
.collectFiles(
|
||||
unmanagedResourceDirectories,
|
||||
includeFilter in unmanagedResources,
|
||||
excludeFilter in unmanagedResources
|
||||
)
|
||||
.value,
|
||||
unmanagedResources := {
|
||||
val filter =
|
||||
(includeFilter in unmanagedResources).value -- (excludeFilter in unmanagedResources).value
|
||||
unmanagedResourceDirectories.value.map(_ ** filter).all.map(Stamped.file)
|
||||
},
|
||||
watchSources in ConfigGlobal := (watchSources in ConfigGlobal).value ++ {
|
||||
val bases = unmanagedResourceDirectories.value
|
||||
val include = (includeFilter in unmanagedResources).value
|
||||
|
|
@ -437,7 +434,8 @@ object Defaults extends BuildCommon {
|
|||
managedResources := generate(resourceGenerators).value,
|
||||
resources := Classpaths.concat(managedResources, unmanagedResources).value
|
||||
)
|
||||
def addBaseSources = FileManagement.appendBaseSources
|
||||
// This exists for binary compatibility and probably never should have been public.
|
||||
def addBaseSources: Seq[Def.Setting[Task[Seq[File]]]] = Nil
|
||||
lazy val outputConfigPaths = Seq(
|
||||
classDirectory := crossTarget.value / (prefix(configuration.value.name) + "classes"),
|
||||
semanticdbTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "meta"),
|
||||
|
|
@ -568,9 +566,14 @@ object Defaults extends BuildCommon {
|
|||
globalDefaults(enableBinaryCompileAnalysis := true)
|
||||
|
||||
lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++ inTask(compile)(
|
||||
compileInputsSettings
|
||||
compileInputsSettings :+ (clean := Clean.taskIn(ThisScope).value)
|
||||
) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
|
||||
outputs := Seq(
|
||||
compileAnalysisFileTask.value.toGlob,
|
||||
classDirectory.value ** "*.class"
|
||||
) ++ (sourceGenerators / outputs).value,
|
||||
compile := compileTask.value,
|
||||
clean := Clean.taskIn(ThisScope).value,
|
||||
manipulateBytecode := compileIncremental.value,
|
||||
compileIncremental := (compileIncrementalTask tag (Tags.Compile, Tags.CPU)).value,
|
||||
printWarnings := printWarningsTask.value,
|
||||
|
|
@ -581,7 +584,7 @@ object Defaults extends BuildCommon {
|
|||
val extra =
|
||||
if (crossPaths.value) s"_$binVersion"
|
||||
else ""
|
||||
s"inc_compile${extra}.zip"
|
||||
s"inc_compile$extra.zip"
|
||||
},
|
||||
compileIncSetup := compileIncSetupTask.value,
|
||||
console := consoleTask.value,
|
||||
|
|
@ -614,8 +617,9 @@ object Defaults extends BuildCommon {
|
|||
|
||||
lazy val projectTasks: Seq[Setting[_]] = Seq(
|
||||
cleanFiles := cleanFilesTask.value,
|
||||
cleanKeepFiles := historyPath.value.toVector,
|
||||
clean := (Def.task { IO.delete(cleanFiles.value) } tag (Tags.Clean)).value,
|
||||
cleanKeepFiles := Vector.empty,
|
||||
cleanKeepGlobs := historyPath.value.map(_.toGlob).toSeq,
|
||||
clean := Clean.taskIn(ThisScope).value,
|
||||
consoleProject := consoleProjectTask.value,
|
||||
watchTransitiveSources := watchTransitiveSourcesTask.value,
|
||||
watchProjectTransitiveSources := watchTransitiveSourcesTaskImpl(watchProjectSources).value,
|
||||
|
|
@ -628,18 +632,22 @@ object Defaults extends BuildCommon {
|
|||
watchOnTermination := Watched.onTermination,
|
||||
watchConfig := {
|
||||
val sources = watchTransitiveSources.value ++ watchProjectTransitiveSources.value
|
||||
val globs = sources.map(
|
||||
s => Glob(s.base, s.includeFilter -- s.excludeFilter, if (s.recursive) Int.MaxValue else 0)
|
||||
)
|
||||
val wm = watchingMessage.?.value
|
||||
.map(w => (count: Int) => Some(w(WatchState.empty(sources).withCount(count))))
|
||||
.map(w => (count: Int) => Some(w(WatchState.empty(globs).withCount(count))))
|
||||
.getOrElse(watchStartMessage.value)
|
||||
val tm = triggeredMessage.?.value
|
||||
.map(
|
||||
tm => (_: TypedPath, count: Int) => Some(tm(WatchState.empty(sources).withCount(count)))
|
||||
)
|
||||
.map(tm => (_: NioPath, count: Int) => Some(tm(WatchState.empty(globs).withCount(count))))
|
||||
.getOrElse(watchTriggeredMessage.value)
|
||||
val logger = watchLogger.value
|
||||
val repo = FileManagement.toMonitoringRepository(FileManagement.repo.value)
|
||||
globs.foreach(repo.register)
|
||||
val monitor = FileManagement.monitor(repo, watchAntiEntropy.value, logger)
|
||||
WatchConfig.default(
|
||||
logger,
|
||||
fileTreeViewConfig.value.newMonitor(fileTreeView.value, sources, logger),
|
||||
monitor,
|
||||
watchHandleInput.value,
|
||||
watchPreWatch.value,
|
||||
watchOnEvent.value,
|
||||
|
|
@ -650,7 +658,7 @@ object Defaults extends BuildCommon {
|
|||
},
|
||||
watchStartMessage := Watched.projectOnWatchMessage(thisProjectRef.value.project),
|
||||
watch := watchSetting.value,
|
||||
fileTreeViewConfig := FileManagement.defaultFileTreeView.value
|
||||
outputs += target.value ** AllPassFilter,
|
||||
)
|
||||
|
||||
def generate(generators: SettingKey[Seq[Task[Seq[File]]]]): Initialize[Task[Seq[File]]] =
|
||||
|
|
@ -1180,10 +1188,14 @@ object Defaults extends BuildCommon {
|
|||
// drop base directories, since there are no valid mappings for these
|
||||
def sourceMappings: Initialize[Task[Seq[(File, String)]]] =
|
||||
Def.task {
|
||||
val srcs = unmanagedSources.value
|
||||
val sdirs = unmanagedSourceDirectories.value
|
||||
val base = baseDirectory.value
|
||||
(srcs --- sdirs --- base) pair (relativeTo(sdirs) | relativeTo(base) | flat)
|
||||
val relative = (f: File) => relativeTo(sdirs)(f).orElse(relativeTo(base)(f)).orElse(flat(f))
|
||||
val exclude = Set(sdirs, base)
|
||||
unmanagedSources.value.flatMap {
|
||||
case s if !exclude(s) => relative(s).map(s -> _)
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
def resourceMappings = relativeMappings(unmanagedResources, unmanagedResourceDirectories)
|
||||
def relativeMappings(
|
||||
|
|
@ -1191,26 +1203,32 @@ object Defaults extends BuildCommon {
|
|||
dirs: ScopedTaskable[Seq[File]]
|
||||
): Initialize[Task[Seq[(File, String)]]] =
|
||||
Def.task {
|
||||
val rs = files.toTask.value
|
||||
val rdirs = dirs.toTask.value
|
||||
(rs --- rdirs) pair (relativeTo(rdirs) | flat)
|
||||
val rdirs = dirs.toTask.value.toSet
|
||||
val relative = (f: File) => relativeTo(rdirs)(f).orElse(flat(f))
|
||||
files.toTask.value.flatMap {
|
||||
case r if !rdirs(r) => relative(r).map(r -> _)
|
||||
case _ => None
|
||||
}
|
||||
}
|
||||
def collectFiles(
|
||||
dirs: ScopedTaskable[Seq[File]],
|
||||
filter: ScopedTaskable[FileFilter],
|
||||
excludes: ScopedTaskable[FileFilter]
|
||||
): Initialize[Task[Seq[File]]] = FileManagement.collectFiles(dirs, filter, excludes)
|
||||
include: ScopedTaskable[FileFilter],
|
||||
exclude: ScopedTaskable[FileFilter]
|
||||
): Initialize[Task[Seq[File]]] = Def.task {
|
||||
val filter = include.toTask.value -- exclude.toTask.value
|
||||
dirs.toTask.value.map(_ ** filter).all.map(Stamped.file)
|
||||
}
|
||||
def artifactPathSetting(art: SettingKey[Artifact]): Initialize[File] =
|
||||
Def.setting {
|
||||
val f = artifactName.value
|
||||
(crossTarget.value / f(
|
||||
crossTarget.value / f(
|
||||
ScalaVersion(
|
||||
(scalaVersion in artifactName).value,
|
||||
(scalaBinaryVersion in artifactName).value
|
||||
),
|
||||
projectID.value,
|
||||
art.value
|
||||
)).asFile
|
||||
)
|
||||
}
|
||||
|
||||
def artifactSetting: Initialize[Artifact] =
|
||||
|
|
@ -1288,24 +1306,7 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
|
||||
/** Implements `cleanFiles` task. */
|
||||
def cleanFilesTask: Initialize[Task[Vector[File]]] =
|
||||
Def.task {
|
||||
val filesAndDirs = Vector(managedDirectory.value, target.value)
|
||||
val preserve = cleanKeepFiles.value
|
||||
val (dirs, fs) = filesAndDirs.filter(_.exists).partition(_.isDirectory)
|
||||
val preserveSet = preserve.filter(_.exists).toSet
|
||||
// performance reasons, only the direct items under `filesAndDirs` are allowed to be preserved.
|
||||
val dirItems = dirs flatMap { _.glob("*").get }
|
||||
(preserveSet diff dirItems.toSet) match {
|
||||
case xs if xs.isEmpty => ()
|
||||
case xs =>
|
||||
sys.error(
|
||||
s"cleanKeepFiles contains directory/file that are not directly under cleanFiles: $xs"
|
||||
)
|
||||
}
|
||||
val toClean = (dirItems filterNot { preserveSet(_) }) ++ fs
|
||||
toClean
|
||||
}
|
||||
private[sbt] def cleanFilesTask: Initialize[Task[Vector[File]]] = Def.task { Vector.empty[File] }
|
||||
|
||||
def bgRunMainTask(
|
||||
products: Initialize[Task[Classpath]],
|
||||
|
|
@ -1614,6 +1615,8 @@ object Defaults extends BuildCommon {
|
|||
incCompiler.compile(i, s.log)
|
||||
} finally x.close() // workaround for #937
|
||||
}
|
||||
private def compileAnalysisFileTask: Def.Initialize[Task[File]] =
|
||||
Def.task(streams.value.cacheDirectory / compileAnalysisFilename.value)
|
||||
def compileIncSetupTask = Def.task {
|
||||
val lookup = new PerClasspathEntryLookup {
|
||||
private val cachedAnalysisMap = analysisMap(dependencyClasspath.value)
|
||||
|
|
@ -1628,7 +1631,7 @@ object Defaults extends BuildCommon {
|
|||
lookup,
|
||||
(skip in compile).value,
|
||||
// TODO - this is kind of a bad way to grab the cache directory for streams...
|
||||
streams.value.cacheDirectory / compileAnalysisFilename.value,
|
||||
compileAnalysisFileTask.value,
|
||||
compilerCache.value,
|
||||
incOptions.value,
|
||||
(compilerReporter in compile).value,
|
||||
|
|
@ -1727,10 +1730,15 @@ object Defaults extends BuildCommon {
|
|||
def copyResourcesTask =
|
||||
Def.task {
|
||||
val t = classDirectory.value
|
||||
val dirs = resourceDirectories.value
|
||||
val dirs = resourceDirectories.value.toSet
|
||||
val s = streams.value
|
||||
val cacheStore = s.cacheStoreFactory make "copy-resources"
|
||||
val mappings = (resources.value --- dirs) pair (rebase(dirs, t) | flat(t))
|
||||
val flt: File => Option[File] = flat(t)
|
||||
val transform: File => Option[File] = (f: File) => rebase(dirs, t)(f).orElse(flt(f))
|
||||
val mappings: Seq[(File, File)] = resources.value.flatMap {
|
||||
case r if !dirs(r) => transform(r).map(r -> _)
|
||||
case _ => None
|
||||
}
|
||||
s.log.debug("Copy resource mappings: " + mappings.mkString("\n\t", "\n\t", ""))
|
||||
Sync.sync(cacheStore)(mappings)
|
||||
mappings
|
||||
|
|
@ -1794,8 +1802,7 @@ object Defaults extends BuildCommon {
|
|||
) :+ (classLoaderLayeringStrategy := ClassLoaderLayeringStrategy.RuntimeDependencies)
|
||||
|
||||
lazy val compileSettings: Seq[Setting[_]] =
|
||||
configSettings ++
|
||||
(mainBgRunMainTask +: mainBgRunTask +: FileManagement.appendBaseSources) ++
|
||||
configSettings ++ (mainBgRunMainTask +: mainBgRunTask) ++
|
||||
Classpaths.addUnmanagedLibrary ++ runtimeLayeringSettings
|
||||
|
||||
private val testLayeringSettings: Seq[Setting[_]] = TaskRepository.proxy(
|
||||
|
|
@ -2023,6 +2030,7 @@ object Classpaths {
|
|||
transitiveClassifiers :== Seq(SourceClassifier, DocClassifier),
|
||||
sourceArtifactTypes :== Artifact.DefaultSourceTypes.toVector,
|
||||
docArtifactTypes :== Artifact.DefaultDocTypes.toVector,
|
||||
outputs :== Nil,
|
||||
sbtDependency := {
|
||||
val app = appConfiguration.value
|
||||
val id = app.provider.id
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ package sbt
|
|||
|
||||
import java.io.{ File, InputStream }
|
||||
import java.net.URL
|
||||
import java.nio.file.Path
|
||||
|
||||
import org.apache.ivy.core.module.descriptor.ModuleDescriptor
|
||||
import org.apache.ivy.core.module.id.ModuleRevisionId
|
||||
|
|
@ -22,8 +23,7 @@ import sbt.internal.io.WatchState
|
|||
import sbt.internal.librarymanagement.{ CompatibilityWarningOptions, IvySbt }
|
||||
import sbt.internal.server.ServerHandler
|
||||
import sbt.internal.util.{ AttributeKey, SourcePosition }
|
||||
import sbt.io.FileEventMonitor.Event
|
||||
import sbt.io.{ FileFilter, FileTreeDataView, TypedPath, WatchService }
|
||||
import sbt.io._
|
||||
import sbt.librarymanagement.Configurations.CompilerPlugin
|
||||
import sbt.librarymanagement.LibraryManagementCodec._
|
||||
import sbt.librarymanagement._
|
||||
|
|
@ -93,14 +93,15 @@ object Keys {
|
|||
@deprecated("This is no longer used for continuous execution", "1.3.0")
|
||||
val watch = SettingKey(BasicKeys.watch)
|
||||
val suppressSbtShellNotification = settingKey[Boolean]("""True to suppress the "Executing in batch mode.." message.""").withRank(CSetting)
|
||||
val fileTreeView = taskKey[FileTreeDataView[FileCacheEntry]]("A view of the file system")
|
||||
val enableGlobalCachingFileTreeRepository = settingKey[Boolean]("Toggles whether or not to create a global cache of the file system that can be used by tasks to quickly list a path").withRank(DSetting)
|
||||
val fileTreeRepository = taskKey[FileTree.Repository]("A repository of the file system.")
|
||||
val pollInterval = settingKey[FiniteDuration]("Interval between checks for modified sources by the continuous execution command.").withRank(BMinusSetting)
|
||||
val pollingDirectories = settingKey[Seq[Watched.WatchSource]]("Directories that cannot be cached and must always be rescanned. Typically these will be NFS mounted or something similar.").withRank(DSetting)
|
||||
val pollingGlobs = settingKey[Seq[Glob]]("Directories that cannot be cached and must always be rescanned. Typically these will be NFS mounted or something similar.").withRank(DSetting)
|
||||
val watchAntiEntropy = settingKey[FiniteDuration]("Duration for which the watch EventMonitor will ignore events for a file after that file has triggered a build.").withRank(BMinusSetting)
|
||||
val watchConfig = taskKey[WatchConfig]("The configuration for continuous execution.").withRank(BMinusSetting)
|
||||
val watchLogger = taskKey[Logger]("A logger that reports watch events.").withRank(DSetting)
|
||||
val watchHandleInput = settingKey[InputStream => Watched.Action]("Function that is periodically invoked to determine if the continous build should be stopped or if a build should be triggered. It will usually read from stdin to respond to user commands.").withRank(BMinusSetting)
|
||||
val watchOnEvent = taskKey[Event[FileCacheEntry] => Watched.Action]("Determines how to handle a file event").withRank(BMinusSetting)
|
||||
val watchOnEvent = taskKey[FileAttributes.Event => Watched.Action]("Determines how to handle a file event").withRank(BMinusSetting)
|
||||
val watchOnTermination = taskKey[(Watched.Action, String, State) => State]("Transforms the input state after the continuous build completes.").withRank(BMinusSetting)
|
||||
val watchService = settingKey[() => WatchService]("Service to use to monitor file system changes.").withRank(BMinusSetting)
|
||||
val watchProjectSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources for the sbt meta project to watch to trigger a reload.").withRank(CSetting)
|
||||
|
|
@ -109,12 +110,11 @@ object Keys {
|
|||
val watchSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources in this project for continuous execution to watch for changes.").withRank(BMinusSetting)
|
||||
val watchStartMessage = settingKey[Int => Option[String]]("The message to show when triggered execution waits for sources to change. The parameter is the current watch iteration count.").withRank(DSetting)
|
||||
val watchTransitiveSources = taskKey[Seq[Watched.WatchSource]]("Defines the sources in all projects for continuous execution to watch.").withRank(CSetting)
|
||||
val watchTriggeredMessage = settingKey[(TypedPath, Int) => Option[String]]("The message to show before triggered execution executes an action after sources change. The parameters are the path that triggered the build and the current watch iteration count.").withRank(DSetting)
|
||||
val watchTriggeredMessage = settingKey[(Path, Int) => Option[String]]("The message to show before triggered execution executes an action after sources change. The parameters are the path that triggered the build and the current watch iteration count.").withRank(DSetting)
|
||||
@deprecated("Use watchStartMessage instead", "1.3.0")
|
||||
val watchingMessage = settingKey[WatchState => String]("The message to show when triggered execution waits for sources to change.").withRank(DSetting)
|
||||
@deprecated("Use watchTriggeredMessage instead", "1.3.0")
|
||||
val triggeredMessage = settingKey[WatchState => String]("The message to show before triggered execution executes an action after sources change.").withRank(DSetting)
|
||||
val fileTreeViewConfig = taskKey[FileTreeViewConfig]("Configures how sbt will traverse and monitor the file system.").withRank(BMinusSetting)
|
||||
|
||||
// Path Keys
|
||||
val baseDirectory = settingKey[File]("The base directory. Depending on the scope, this is the base directory for the build, project, configuration, or task.").withRank(AMinusSetting)
|
||||
|
|
@ -150,10 +150,14 @@ object Keys {
|
|||
|
||||
// Output paths
|
||||
val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting)
|
||||
@deprecated("Clean is now implemented using globs.", "1.3.0")
|
||||
val cleanFiles = taskKey[Seq[File]]("The files to recursively delete during a clean.").withRank(BSetting)
|
||||
@deprecated("Clean is now implemented using globs. Prefer the cleanKeepGlobs task", "1.3.0")
|
||||
val cleanKeepFiles = settingKey[Seq[File]]("Files or directories to keep during a clean. Must be direct children of target.").withRank(CSetting)
|
||||
val cleanKeepGlobs = settingKey[Seq[Glob]]("Globs to keep during a clean. Must be direct children of target.").withRank(CSetting)
|
||||
val crossPaths = settingKey[Boolean]("If true, enables cross paths, which distinguish input and output directories for cross-building.").withRank(ASetting)
|
||||
val taskTemporaryDirectory = settingKey[File]("Directory used for temporary files for tasks that is deleted after each task execution.").withRank(DSetting)
|
||||
val outputs = taskKey[Seq[Glob]]("Describes the output files of a task")
|
||||
|
||||
// Generators
|
||||
val sourceGenerators = settingKey[Seq[Task[Seq[File]]]]("List of tasks that generate sources.").withRank(CSetting)
|
||||
|
|
@ -468,8 +472,8 @@ object Keys {
|
|||
@deprecated("No longer used", "1.3.0")
|
||||
private[sbt] val executeProgress = settingKey[State => TaskProgress]("Experimental task execution listener.").withRank(DTask)
|
||||
|
||||
private[sbt] val globalFileTreeView = AttributeKey[FileTreeDataView[FileCacheEntry]](
|
||||
"globalFileTreeView",
|
||||
private[sbt] val globalFileTreeRepository = AttributeKey[FileTreeRepository[FileAttributes]](
|
||||
"global-file-tree-repository",
|
||||
"Provides a view into the file system that may or may not cache the tree in memory",
|
||||
1000
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ package sbt
|
|||
|
||||
import java.io.{ File, IOException }
|
||||
import java.net.URI
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
import java.util.{ Locale, Properties }
|
||||
|
||||
import sbt.BasicCommandStrings.{ Shell, TemplateCommand }
|
||||
|
|
@ -21,8 +22,8 @@ import sbt.internal.inc.ScalaInstance
|
|||
import sbt.internal.util.Types.{ const, idFun }
|
||||
import sbt.internal.util._
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
import sbt.io.{ FileTreeDataView, IO }
|
||||
import sbt.util.{ Level, Logger, Show }
|
||||
import xsbti.compile.CompilerCache
|
||||
|
||||
|
|
@ -852,27 +853,26 @@ object BuiltinCommands {
|
|||
}
|
||||
s.put(Keys.stateCompilerCache, cache)
|
||||
}
|
||||
private[sbt] def registerGlobalCaches(s: State): State = {
|
||||
val extracted = Project.extract(s)
|
||||
private[sbt] def registerGlobalCaches(s: State): State =
|
||||
try {
|
||||
val extracted = Project.extract(s)
|
||||
val cleanedUp = new AtomicBoolean(false)
|
||||
def cleanup(): Unit = {
|
||||
s.get(Keys.globalFileTreeView).foreach(_.close())
|
||||
s.attributes.remove(Keys.globalFileTreeView)
|
||||
s.get(Keys.globalFileTreeRepository).foreach(_.close())
|
||||
s.attributes.remove(Keys.globalFileTreeRepository)
|
||||
s.get(Keys.taskRepository).foreach(_.close())
|
||||
s.attributes.remove(Keys.taskRepository)
|
||||
()
|
||||
}
|
||||
val (_, config: FileTreeViewConfig) = extracted.runTask(Keys.fileTreeViewConfig, s)
|
||||
val view: FileTreeDataView[FileCacheEntry] = config.newDataView()
|
||||
val newState = s.addExitHook(cleanup())
|
||||
cleanup()
|
||||
val fileTreeRepository = FileManagement.defaultFileTreeRepository(s, extracted)
|
||||
val newState = s.addExitHook(if (cleanedUp.compareAndSet(false, true)) cleanup())
|
||||
newState
|
||||
.put(Keys.globalFileTreeView, view)
|
||||
.put(Keys.taskRepository, new TaskRepository.Repr)
|
||||
.put(Keys.globalFileTreeRepository, fileTreeRepository)
|
||||
} catch {
|
||||
case NonFatal(_) => s
|
||||
}
|
||||
}
|
||||
|
||||
def clearCaches: Command = {
|
||||
val help = Help.more(ClearCaches, ClearCachesDetailed)
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ object Opts {
|
|||
"sonatype-staging",
|
||||
"https://oss.sonatype.org/service/local/staging/deploy/maven2"
|
||||
)
|
||||
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository" asFile)(
|
||||
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository")(
|
||||
Resolver.defaultPatterns
|
||||
)
|
||||
val sbtSnapshots = Resolver.bintrayRepo("sbt", "maven-snapshots")
|
||||
|
|
|
|||
|
|
@ -11,11 +11,16 @@ package internal
|
|||
import java.io.File
|
||||
import java.net.URI
|
||||
import BuildLoader._
|
||||
import sbt.internal.io.Alternatives._
|
||||
import sbt.internal.util.Types.{ const, idFun }
|
||||
import sbt.util.Logger
|
||||
import sbt.librarymanagement.ModuleID
|
||||
|
||||
private[internal] object Alternatives {
|
||||
private[internal] implicit class Alternative[A, B](val f: A => Option[B]) {
|
||||
def |(g: A => Option[B]): A => Option[B] = (a: A) => f(a) orElse g(a)
|
||||
}
|
||||
}
|
||||
import Alternatives.Alternative
|
||||
final class MultiHandler[S, T](
|
||||
builtIn: S => Option[T],
|
||||
root: Option[S => Option[T]],
|
||||
|
|
|
|||
|
|
@ -329,5 +329,5 @@ object BuildStreams {
|
|||
def refTarget(ref: ResolvedReference, fallbackBase: File, data: Settings[Scope]): File =
|
||||
refTarget(GlobalScope.copy(project = Select(ref)), fallbackBase, data)
|
||||
def refTarget(scope: Scope, fallbackBase: File, data: Settings[Scope]): File =
|
||||
(Keys.target in scope get data getOrElse outputDirectory(fallbackBase).asFile) / StreamsDirectory
|
||||
(Keys.target in scope get data getOrElse outputDirectory(fallbackBase)) / StreamsDirectory
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import java.io.IOException
|
||||
import java.nio.file.{ DirectoryNotEmptyException, Files }
|
||||
|
||||
import sbt.Def._
|
||||
import sbt.Keys._
|
||||
import sbt.Project.richInitializeTask
|
||||
import sbt.io.syntax._
|
||||
import sbt.io.{ AllPassFilter, FileTreeView, TypedPath }
|
||||
import sbt.util.Level
|
||||
|
||||
object Clean {
|
||||
|
||||
def deleteContents(file: File, exclude: TypedPath => Boolean): Unit =
|
||||
deleteContents(file, exclude, FileTreeView.DEFAULT, tryDelete((_: String) => {}))
|
||||
def deleteContents(
|
||||
file: File,
|
||||
exclude: TypedPath => Boolean,
|
||||
view: FileTreeView,
|
||||
delete: File => Unit
|
||||
): Unit = {
|
||||
def deleteRecursive(file: File): Unit = {
|
||||
view.list(file * AllPassFilter).filterNot(exclude).foreach {
|
||||
case dir if dir.isDirectory =>
|
||||
deleteRecursive(dir.toPath.toFile)
|
||||
delete(dir.toPath.toFile)
|
||||
case f => delete(f.toPath.toFile)
|
||||
}
|
||||
}
|
||||
deleteRecursive(file)
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides an implementation for the clean task. It delegates to [[taskIn]] using the
|
||||
* resolvedScoped key to set the scope.
|
||||
* @return the clean task definition.
|
||||
*/
|
||||
def task: Def.Initialize[Task[Unit]] =
|
||||
Def.taskDyn(taskIn(Keys.resolvedScoped.value.scope)) tag Tags.Clean
|
||||
|
||||
/**
|
||||
* Implements the clean task in a given scope. It uses the outputs task value in the provided
|
||||
* scope to determine which files to delete.
|
||||
* @param scope the scope in which the clean task is implemented
|
||||
* @return the clean task definition.
|
||||
*/
|
||||
def taskIn(scope: Scope): Def.Initialize[Task[Unit]] =
|
||||
Def.task {
|
||||
val excludes = cleanKeepFiles.value.map {
|
||||
// This mimics the legacy behavior of cleanFilesTask
|
||||
case f if f.isDirectory => f * AllPassFilter
|
||||
case f => f.toGlob
|
||||
} ++ cleanKeepGlobs.value
|
||||
val excludeFilter: TypedPath => Boolean = excludes.toTypedPathFilter
|
||||
val debug = (logLevel in scope).?.value.orElse(state.value.get(logLevel.key)) match {
|
||||
case Some(Level.Debug) =>
|
||||
(string: String) =>
|
||||
println(s"[debug] $string")
|
||||
case _ =>
|
||||
(_: String) =>
|
||||
{}
|
||||
}
|
||||
val delete = tryDelete(debug)
|
||||
cleanFiles.value.sorted.reverseIterator.foreach(delete)
|
||||
(outputs in scope).value.foreach { g =>
|
||||
val filter: TypedPath => Boolean = {
|
||||
val globFilter = g.toTypedPathFilter
|
||||
tp =>
|
||||
!globFilter(tp) || excludeFilter(tp)
|
||||
}
|
||||
deleteContents(g.base.toFile, filter, FileTreeView.DEFAULT, delete)
|
||||
delete(g.base.toFile)
|
||||
}
|
||||
} tag Tags.Clean
|
||||
private def tryDelete(debug: String => Unit): File => Unit = file => {
|
||||
try {
|
||||
debug(s"clean -- deleting file $file")
|
||||
Files.deleteIfExists(file.toPath)
|
||||
()
|
||||
} catch {
|
||||
case _: DirectoryNotEmptyException =>
|
||||
debug(s"clean -- unable to delete non-empty directory $file")
|
||||
case e: IOException =>
|
||||
debug(s"Caught unexpected exception $e deleting $file")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -339,7 +339,7 @@ defaults
|
|||
|
||||
def sbtRCs(s: State): Seq[File] =
|
||||
(Path.userHome / sbtrc) ::
|
||||
(s.baseDir / sbtrc asFile) ::
|
||||
(s.baseDir / sbtrc) ::
|
||||
Nil
|
||||
|
||||
val CrossCommand = "+"
|
||||
|
|
|
|||
|
|
@ -6,13 +6,14 @@
|
|||
*/
|
||||
|
||||
package sbt.internal
|
||||
import java.nio.file.Paths
|
||||
|
||||
import java.nio.file.{ Path, Paths }
|
||||
import java.util.Optional
|
||||
|
||||
import sbt.Stamped
|
||||
import sbt.internal.inc.ExternalLookup
|
||||
import sbt.io.syntax.File
|
||||
import sbt.io.{ FileTreeRepository, FileTreeDataView, TypedPath }
|
||||
import sbt.io.syntax._
|
||||
import sbt.io.{ AllPassFilter, Glob, TypedPath }
|
||||
import sbt.Stamped
|
||||
import xsbti.compile._
|
||||
import xsbti.compile.analysis.Stamp
|
||||
|
||||
|
|
@ -20,10 +21,8 @@ import scala.collection.mutable
|
|||
|
||||
private[sbt] object ExternalHooks {
|
||||
private val javaHome = Option(System.getProperty("java.home")).map(Paths.get(_))
|
||||
def apply(
|
||||
options: CompileOptions,
|
||||
view: FileTreeDataView[FileCacheEntry]
|
||||
): DefaultExternalHooks = {
|
||||
def apply(options: CompileOptions, repo: FileTree.Repository): DefaultExternalHooks = {
|
||||
def listEntries(glob: Glob): Seq[(Path, FileAttributes)] = repo.get(glob)
|
||||
import scala.collection.JavaConverters._
|
||||
val sources = options.sources()
|
||||
val cachedSources = new java.util.HashMap[File, Stamp]
|
||||
|
|
@ -32,30 +31,16 @@ private[sbt] object ExternalHooks {
|
|||
case sf: Stamped => cachedSources.put(sf, sf.stamp)
|
||||
case f: File => cachedSources.put(f, converter(f))
|
||||
}
|
||||
view match {
|
||||
case r: FileTreeRepository[FileCacheEntry] =>
|
||||
r.register(options.classesDirectory.toPath, Integer.MAX_VALUE)
|
||||
options.classpath.foreach { f =>
|
||||
r.register(f.toPath, Integer.MAX_VALUE)
|
||||
}
|
||||
case _ =>
|
||||
}
|
||||
val allBinaries = new java.util.HashMap[File, Stamp]
|
||||
options.classpath.foreach { f =>
|
||||
view.listEntries(f.toPath, Integer.MAX_VALUE, _ => true) foreach { e =>
|
||||
e.value match {
|
||||
case Right(value) => allBinaries.put(e.typedPath.toPath.toFile, value.stamp)
|
||||
case _ =>
|
||||
options.classpath.foreach {
|
||||
case f if f.getName.endsWith(".jar") =>
|
||||
// This gives us the entry for the path itself, which is necessary if the path is a jar file
|
||||
// rather than a directory.
|
||||
listEntries(f.toGlob) foreach { case (p, a) => allBinaries.put(p.toFile, a.stamp) }
|
||||
case f =>
|
||||
listEntries(f ** AllPassFilter) foreach {
|
||||
case (p, a) => allBinaries.put(p.toFile, a.stamp)
|
||||
}
|
||||
}
|
||||
// This gives us the entry for the path itself, which is necessary if the path is a jar file
|
||||
// rather than a directory.
|
||||
view.listEntries(f.toPath, -1, _ => true) foreach { e =>
|
||||
e.value match {
|
||||
case Right(value) => allBinaries.put(e.typedPath.toPath.toFile, value.stamp)
|
||||
case _ =>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val lookup = new ExternalLookup {
|
||||
|
|
|
|||
|
|
@ -5,98 +5,181 @@
|
|||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import java.io.IOException
|
||||
import java.nio.file.Path
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
|
||||
import sbt.BasicCommandStrings.ContinuousExecutePrefix
|
||||
import sbt.Keys._
|
||||
import sbt._
|
||||
import sbt.io.FileTreeDataView.Entry
|
||||
import sbt.io.syntax.File
|
||||
import sbt.io.{ FileFilter, FileTreeDataView, FileTreeRepository }
|
||||
import sbt.internal.io.HybridPollingFileTreeRepository
|
||||
import sbt.internal.util.Util
|
||||
import sbt.io.FileTreeDataView.{ Entry, Observable, Observer, Observers }
|
||||
import sbt.io.{ FileTreeRepository, _ }
|
||||
import sbt.util.{ Level, Logger }
|
||||
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.mutable
|
||||
import scala.concurrent.duration._
|
||||
|
||||
private[sbt] object FileManagement {
|
||||
private[sbt] def defaultFileTreeView: Def.Initialize[Task[FileTreeViewConfig]] = Def.task {
|
||||
val remaining = state.value.remainingCommands.map(_.commandLine.trim)
|
||||
private[sbt] def defaultFileTreeRepository(
|
||||
state: State,
|
||||
extracted: Extracted
|
||||
): FileTreeRepository[FileAttributes] = {
|
||||
val pollingGlobs = extracted.getOpt(Keys.pollingGlobs).getOrElse(Nil)
|
||||
val remaining = state.remainingCommands.map(_.commandLine)
|
||||
// If the session is interactive or if the commands include a continuous build, then use
|
||||
// the default configuration. Otherwise, use the sbt1_2_compat config, which does not cache
|
||||
// anything, which makes it less likely to cause issues with CI.
|
||||
val interactive = remaining.contains("shell") || remaining.lastOption.contains("iflast shell")
|
||||
val interactive =
|
||||
remaining.contains("shell") || remaining.lastOption.contains("iflast shell")
|
||||
val scripted = remaining.contains("setUpScripted")
|
||||
|
||||
val continuous = remaining.lastOption.exists(_.startsWith(ContinuousExecutePrefix))
|
||||
if (!scripted && (interactive || continuous)) {
|
||||
FileTreeViewConfig
|
||||
.default(watchAntiEntropy.value, pollInterval.value, pollingDirectories.value)
|
||||
} else FileTreeViewConfig.sbt1_2_compat(pollInterval.value, watchAntiEntropy.value)
|
||||
}
|
||||
private[sbt] implicit class FileTreeDataViewOps[+T](val fileTreeDataView: FileTreeDataView[T]) {
|
||||
def register(path: Path, maxDepth: Int): Either[IOException, Boolean] = {
|
||||
fileTreeDataView match {
|
||||
case r: FileTreeRepository[T] => r.register(path, maxDepth)
|
||||
case _ => Right(false)
|
||||
val enableCache = extracted
|
||||
.getOpt(Keys.enableGlobalCachingFileTreeRepository)
|
||||
.getOrElse(!scripted && (interactive || continuous))
|
||||
val pollInterval = extracted.getOpt(Keys.pollInterval).getOrElse(500.milliseconds)
|
||||
val watchLogger: WatchLogger = extracted.getOpt(Keys.logLevel) match {
|
||||
case Level.Debug =>
|
||||
new WatchLogger { override def debug(msg: => Any): Unit = println(s"[watch-debug] $msg") }
|
||||
case _ => new WatchLogger { override def debug(msg: => Any): Unit = {} }
|
||||
}
|
||||
if (enableCache) {
|
||||
if (pollingGlobs.isEmpty) FileTreeRepository.default(FileAttributes.default)
|
||||
else
|
||||
new HybridMonitoringRepository[FileAttributes](
|
||||
FileTreeRepository.hybrid(FileAttributes.default, pollingGlobs: _*),
|
||||
pollInterval,
|
||||
watchLogger
|
||||
)
|
||||
} else {
|
||||
if (Util.isWindows) new PollingFileRepository(FileAttributes.default)
|
||||
else {
|
||||
val service = Watched.createWatchService(pollInterval)
|
||||
FileTreeRepository.legacy(FileAttributes.default _, (_: Any) => {}, service)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def collectFiles(
|
||||
dirs: ScopedTaskable[Seq[File]],
|
||||
filter: ScopedTaskable[FileFilter],
|
||||
excludes: ScopedTaskable[FileFilter]
|
||||
): Def.Initialize[Task[Seq[File]]] =
|
||||
Def.task {
|
||||
val sourceDirs = dirs.toTask.value
|
||||
val view = fileTreeView.value
|
||||
val include = filter.toTask.value
|
||||
val ex = excludes.toTask.value
|
||||
val sourceFilter: Entry[FileCacheEntry] => Boolean = (entry: Entry[FileCacheEntry]) => {
|
||||
val typedPath = entry.typedPath
|
||||
val file = new java.io.File(typedPath.toPath.toString) {
|
||||
override def isDirectory: Boolean = typedPath.isDirectory
|
||||
override def isFile: Boolean = typedPath.isFile
|
||||
private[sbt] def monitor(
|
||||
repository: FileTreeRepository[FileAttributes],
|
||||
antiEntropy: FiniteDuration,
|
||||
logger: Logger
|
||||
): FileEventMonitor[FileAttributes] = {
|
||||
// Forwards callbacks to the repository. The close method removes all of these
|
||||
// callbacks.
|
||||
val copied: Observable[FileAttributes] = new Observable[FileAttributes] {
|
||||
private[this] val observers = new Observers[FileAttributes]
|
||||
val underlying = repository match {
|
||||
case h: HybridPollingFileTreeRepository[FileAttributes] =>
|
||||
h.toPollingRepository(antiEntropy, (msg: Any) => logger.debug(msg.toString))
|
||||
case r => r
|
||||
}
|
||||
private[this] val handle = underlying.addObserver(observers)
|
||||
override def addObserver(observer: Observer[FileAttributes]): Int =
|
||||
observers.addObserver(observer)
|
||||
override def removeObserver(handle: Int): Unit = observers.removeObserver(handle)
|
||||
override def close(): Unit = {
|
||||
underlying.removeObserver(handle)
|
||||
underlying.close()
|
||||
}
|
||||
}
|
||||
new FileEventMonitor[FileAttributes] {
|
||||
val monitor =
|
||||
FileEventMonitor.antiEntropy(
|
||||
copied,
|
||||
antiEntropy,
|
||||
new WatchLogger { override def debug(msg: => Any): Unit = logger.debug(msg.toString) },
|
||||
50.millis,
|
||||
10.minutes
|
||||
)
|
||||
override def poll(duration: Duration): Seq[FileEventMonitor.Event[FileAttributes]] =
|
||||
monitor.poll(duration)
|
||||
override def close(): Unit = monitor.close()
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def repo: Def.Initialize[Task[FileTreeRepository[FileAttributes]]] = Def.task {
|
||||
lazy val msg = s"Tried to get FileTreeRepository for uninitialized state."
|
||||
state.value.get(Keys.globalFileTreeRepository).getOrElse(throw new IllegalStateException(msg))
|
||||
}
|
||||
|
||||
private[sbt] class HybridMonitoringRepository[T](
|
||||
underlying: HybridPollingFileTreeRepository[T],
|
||||
delay: FiniteDuration,
|
||||
logger: WatchLogger
|
||||
) extends FileTreeRepository[T] {
|
||||
private val registered: mutable.Set[Glob] = ConcurrentHashMap.newKeySet[Glob].asScala
|
||||
override def listEntries(glob: Glob): Seq[Entry[T]] = underlying.listEntries(glob)
|
||||
override def list(glob: Glob): Seq[TypedPath] = underlying.list(glob)
|
||||
override def addObserver(observer: Observer[T]): Int = underlying.addObserver(observer)
|
||||
override def removeObserver(handle: Int): Unit = underlying.removeObserver(handle)
|
||||
override def close(): Unit = underlying.close()
|
||||
override def register(glob: Glob): Either[IOException, Boolean] = {
|
||||
registered.add(glob)
|
||||
underlying.register(glob)
|
||||
}
|
||||
override def unregister(glob: Glob): Unit = underlying.unregister(glob)
|
||||
private[sbt] def toMonitoringRepository: FileTreeRepository[T] = {
|
||||
val polling = underlying.toPollingRepository(delay, logger)
|
||||
registered.foreach(polling.register)
|
||||
polling
|
||||
}
|
||||
}
|
||||
private[sbt] def toMonitoringRepository[T](
|
||||
repository: FileTreeRepository[T]
|
||||
): FileTreeRepository[T] = repository match {
|
||||
case p: PollingFileRepository[T] => p.toMonitoringRepository
|
||||
case h: HybridMonitoringRepository[T] => h.toMonitoringRepository
|
||||
case r: FileTreeRepository[T] => new CopiedFileRepository(r)
|
||||
}
|
||||
private class CopiedFileRepository[T](underlying: FileTreeRepository[T])
|
||||
extends FileTreeRepository[T] {
|
||||
def addObserver(observer: Observer[T]) = underlying.addObserver(observer)
|
||||
def close(): Unit = {} // Don't close the underlying observable
|
||||
def list(glob: Glob): Seq[TypedPath] = underlying.list(glob)
|
||||
def listEntries(glob: Glob): Seq[Entry[T]] = underlying.listEntries(glob)
|
||||
def removeObserver(handle: Int): Unit = underlying.removeObserver(handle)
|
||||
def register(glob: Glob): Either[IOException, Boolean] = underlying.register(glob)
|
||||
def unregister(glob: Glob): Unit = underlying.unregister(glob)
|
||||
}
|
||||
private[sbt] class PollingFileRepository[T](converter: TypedPath => T)
|
||||
extends FileTreeRepository[T] { self =>
|
||||
private val registered: mutable.Set[Glob] = ConcurrentHashMap.newKeySet[Glob].asScala
|
||||
private[this] val view = FileTreeView.DEFAULT
|
||||
private[this] val dataView = view.asDataView(converter)
|
||||
private[this] val handles: mutable.Map[FileTreeRepository[T], Int] =
|
||||
new ConcurrentHashMap[FileTreeRepository[T], Int].asScala
|
||||
private val observers: Observers[T] = new Observers
|
||||
override def addObserver(observer: Observer[T]): Int = observers.addObserver(observer)
|
||||
override def close(): Unit = {
|
||||
handles.foreach { case (repo, handle) => repo.removeObserver(handle) }
|
||||
observers.close()
|
||||
}
|
||||
override def list(glob: Glob): Seq[TypedPath] = view.list(glob)
|
||||
override def listEntries(glob: Glob): Seq[Entry[T]] = dataView.listEntries(glob)
|
||||
override def removeObserver(handle: Int): Unit = observers.removeObserver(handle)
|
||||
override def register(glob: Glob): Either[IOException, Boolean] = Right(registered.add(glob))
|
||||
override def unregister(glob: Glob): Unit = registered -= glob
|
||||
|
||||
private[sbt] def toMonitoringRepository: FileTreeRepository[T] = {
|
||||
val legacy = FileTreeRepository.legacy(converter)
|
||||
registered.foreach(legacy.register)
|
||||
handles += legacy -> legacy.addObserver(observers)
|
||||
new FileTreeRepository[T] {
|
||||
override def listEntries(glob: Glob): Seq[Entry[T]] = legacy.listEntries(glob)
|
||||
override def list(glob: Glob): Seq[TypedPath] = legacy.list(glob)
|
||||
def addObserver(observer: Observer[T]): Int = legacy.addObserver(observer)
|
||||
override def removeObserver(handle: Int): Unit = legacy.removeObserver(handle)
|
||||
override def close(): Unit = legacy.close()
|
||||
override def register(glob: Glob): Either[IOException, Boolean] = {
|
||||
self.register(glob)
|
||||
legacy.register(glob)
|
||||
}
|
||||
include.accept(file) && !ex.accept(file)
|
||||
}
|
||||
sourceDirs.flatMap { dir =>
|
||||
view.register(dir.toPath, maxDepth = Integer.MAX_VALUE)
|
||||
view
|
||||
.listEntries(dir.toPath, maxDepth = Integer.MAX_VALUE, sourceFilter)
|
||||
.flatMap(e => e.value.toOption.map(Stamped.file(e.typedPath, _)))
|
||||
override def unregister(glob: Glob): Unit = legacy.unregister(glob)
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def appendBaseSources: Seq[Def.Setting[Task[Seq[File]]]] = Seq(
|
||||
unmanagedSources := {
|
||||
val sources = unmanagedSources.value
|
||||
val f = (includeFilter in unmanagedSources).value
|
||||
val excl = (excludeFilter in unmanagedSources).value
|
||||
val baseDir = baseDirectory.value
|
||||
val view = fileTreeView.value
|
||||
if (sourcesInBase.value) {
|
||||
view.register(baseDir.toPath, maxDepth = 0)
|
||||
sources ++
|
||||
view
|
||||
.listEntries(
|
||||
baseDir.toPath,
|
||||
maxDepth = 0,
|
||||
e => {
|
||||
val tp = e.typedPath
|
||||
/*
|
||||
* The TypedPath has the isDirectory and isFile properties embedded. By overriding
|
||||
* these methods in java.io.File, FileFilters may be applied without needing to
|
||||
* stat the file (which is expensive) for isDirectory and isFile checks.
|
||||
*/
|
||||
val file = new java.io.File(tp.toPath.toString) {
|
||||
override def isDirectory: Boolean = tp.isDirectory
|
||||
override def isFile: Boolean = tp.isFile
|
||||
}
|
||||
f.accept(file) && !excl.accept(file)
|
||||
}
|
||||
)
|
||||
.flatMap(e => e.value.toOption.map(Stamped.file(e.typedPath, _)))
|
||||
} else sources
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import java.nio.file.{ Path, WatchService => _ }
|
||||
|
||||
import sbt.internal.util.appmacro.MacroDefaults
|
||||
import sbt.io.FileTreeDataView.Entry
|
||||
import sbt.io._
|
||||
|
||||
import scala.language.experimental.macros
|
||||
|
||||
private[sbt] object FileTree {
|
||||
private def toPair(e: Entry[FileAttributes]): Option[(Path, FileAttributes)] =
|
||||
e.value.toOption.map(a => e.typedPath.toPath -> a)
|
||||
trait Repository extends sbt.internal.Repository[Seq, Glob, (Path, FileAttributes)]
|
||||
private[sbt] object Repository {
|
||||
|
||||
/**
|
||||
* Provide a default [[Repository]] that works within a task definition, e.g. Def.task. It's
|
||||
* implemented as a macro so that it can call `.value` on a TaskKey. Using a macro also allows
|
||||
* us to use classes that aren't actually available in this project, e.g. sbt.Keys.
|
||||
* @return a [[Repository]] instance
|
||||
*/
|
||||
implicit def default: FileTree.Repository = macro MacroDefaults.fileTreeRepository
|
||||
private[sbt] object polling extends Repository {
|
||||
val view = FileTreeView.DEFAULT.asDataView(FileAttributes.default)
|
||||
override def get(key: Glob): Seq[(Path, FileAttributes)] =
|
||||
view.listEntries(key).flatMap(toPair)
|
||||
override def close(): Unit = {}
|
||||
}
|
||||
}
|
||||
private class ViewRepository(underlying: FileTreeDataView[FileAttributes]) extends Repository {
|
||||
override def get(key: Glob): Seq[(Path, FileAttributes)] =
|
||||
underlying.listEntries(key).flatMap(toPair)
|
||||
override def close(): Unit = {}
|
||||
}
|
||||
private class CachingRepository(underlying: FileTreeRepository[FileAttributes])
|
||||
extends Repository {
|
||||
override def get(key: Glob): Seq[(Path, FileAttributes)] = {
|
||||
underlying.register(key)
|
||||
underlying.listEntries(key).flatMap(toPair)
|
||||
}
|
||||
override def close(): Unit = underlying.close()
|
||||
}
|
||||
private[sbt] def repository(underlying: FileTreeDataView[FileAttributes]): Repository =
|
||||
underlying match {
|
||||
case r: FileTreeRepository[FileAttributes] => new CachingRepository(r)
|
||||
case v => new ViewRepository(v)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,96 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import java.nio.file.Path
|
||||
|
||||
import sbt.io.Glob
|
||||
|
||||
/**
|
||||
* Retrieve files from a repository. This should usually be an extension class for
|
||||
* sbt.io.internal.Glob (or a Traversable collection of source instances) that allows us to
|
||||
* actually retrieve the files corresponding to those sources.
|
||||
*/
|
||||
private[sbt] sealed trait GlobLister extends Any {
|
||||
|
||||
/**
|
||||
* Get the sources described this [[GlobLister]].
|
||||
*
|
||||
* @param repository the [[FileTree.Repository]] to delegate file i/o.
|
||||
* @return the files described by this [[GlobLister]].
|
||||
*/
|
||||
def all(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)]
|
||||
|
||||
/**
|
||||
* Get the unique sources described this [[GlobLister]].
|
||||
*
|
||||
* @param repository the [[FileTree.Repository]] to delegate file i/o.
|
||||
* @return the files described by this [[GlobLister]] with any duplicates removed.
|
||||
*/
|
||||
def unique(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)]
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides implicit definitions to provide a [[GlobLister]] given a Glob or
|
||||
* Traversable[Glob].
|
||||
*/
|
||||
object GlobLister extends GlobListers
|
||||
|
||||
/**
|
||||
* Provides implicit definitions to provide a [[GlobLister]] given a Glob or
|
||||
* Traversable[Glob].
|
||||
*/
|
||||
private[sbt] trait GlobListers {
|
||||
import GlobListers._
|
||||
|
||||
/**
|
||||
* Generate a [[GlobLister]] given a particular [[Glob]]s.
|
||||
*
|
||||
* @param source the input Glob
|
||||
*/
|
||||
implicit def fromGlob(source: Glob): GlobLister = new impl(source :: Nil)
|
||||
|
||||
/**
|
||||
* Generate a [[GlobLister]] given a collection of Globs. If the input collection type
|
||||
* preserves uniqueness, e.g. `Set[Glob]`, then the output of [[GlobLister.all]] will be
|
||||
* the unique source list. Otherwise duplicates are possible in all and it is necessary to call
|
||||
* [[GlobLister.unique]] to de-duplicate the files.
|
||||
*
|
||||
* @param sources the collection of sources
|
||||
* @tparam T the source collection type
|
||||
*/
|
||||
implicit def fromTraversableGlob[T <: Traversable[Glob]](sources: T): GlobLister =
|
||||
new impl(sources)
|
||||
}
|
||||
private[internal] object GlobListers {
|
||||
|
||||
/**
|
||||
* Implements [[GlobLister]] given a collection of Globs. If the input collection type
|
||||
* preserves uniqueness, e.g. `Set[Glob]`, then the output will be the unique source list.
|
||||
* Otherwise duplicates are possible.
|
||||
*
|
||||
* @param globs the input globs
|
||||
* @tparam T the collection type
|
||||
*/
|
||||
private class impl[T <: Traversable[Glob]](val globs: T) extends AnyVal with GlobLister {
|
||||
private def get[T0 <: Traversable[Glob]](
|
||||
traversable: T0,
|
||||
repository: FileTree.Repository
|
||||
): Seq[(Path, FileAttributes)] =
|
||||
traversable.flatMap { glob =>
|
||||
val sourceFilter = glob.toFileFilter
|
||||
repository.get(glob).filter { case (p, _) => sourceFilter.accept(p.toFile) }
|
||||
}.toIndexedSeq
|
||||
|
||||
override def all(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)] =
|
||||
get(globs, repository)
|
||||
override def unique(implicit repository: FileTree.Repository): Seq[(Path, FileAttributes)] =
|
||||
get(globs.toSet[Glob], repository)
|
||||
}
|
||||
}
|
||||
|
|
@ -9,12 +9,12 @@ object Dependencies {
|
|||
val baseScalaVersion = scala212
|
||||
|
||||
// sbt modules
|
||||
private val ioVersion = "1.3.0-M5"
|
||||
private val ioVersion = "1.3.0-M7"
|
||||
private val utilVersion = "1.3.0-M5"
|
||||
private val lmVersion =
|
||||
sys.props.get("sbt.build.lm.version") match {
|
||||
case Some(version) => version
|
||||
case _ => "1.3.0-M1"
|
||||
case _ => "1.3.0-M1"
|
||||
}
|
||||
private val zincVersion = "1.3.0-M2"
|
||||
|
||||
|
|
@ -34,13 +34,13 @@ object Dependencies {
|
|||
val lmOrganization =
|
||||
sys.props.get("sbt.build.lm.organization") match {
|
||||
case Some(impl) => impl
|
||||
case _ => "org.scala-sbt"
|
||||
case _ => "org.scala-sbt"
|
||||
}
|
||||
|
||||
val lmModuleName =
|
||||
sys.props.get("sbt.build.lm.moduleName") match {
|
||||
case Some(impl) => impl
|
||||
case _ => "librarymanagement-ivy"
|
||||
case _ => "librarymanagement-ivy"
|
||||
}
|
||||
|
||||
lmOrganization %% lmModuleName % lmVersion
|
||||
|
|
@ -98,7 +98,8 @@ object Dependencies {
|
|||
|
||||
def addSbtLmCore(p: Project): Project =
|
||||
addSbtModule(p, sbtLmPath, "lmCore", libraryManagementCore)
|
||||
def addSbtLmImpl(p: Project): Project = addSbtModule(p, sbtLmPath, "lmImpl", libraryManagementImpl)
|
||||
def addSbtLmImpl(p: Project): Project =
|
||||
addSbtModule(p, sbtLmPath, "lmImpl", libraryManagementImpl)
|
||||
|
||||
def addSbtCompilerInterface(p: Project): Project =
|
||||
addSbtModule(p, sbtZincPath, "compilerInterface212", compilerInterface)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import sbt.io.Path._
|
||||
import sbt.Keys._
|
||||
import sbt._
|
||||
import Keys._
|
||||
import sbt.io.CopyOptions
|
||||
|
||||
object SbtLauncherPlugin extends AutoPlugin {
|
||||
|
|
@ -43,7 +42,12 @@ object SbtLauncherPlugin extends AutoPlugin {
|
|||
IO.unzip(jar, dir)
|
||||
IO.copy(overrides.map({ case (n, f) => (f, dir / n) }), CopyOptions().withOverwrite(true))
|
||||
// TODO - is the ok for creating a jar?
|
||||
IO.zip((dir.allPaths --- dir) pair relativeTo(dir), target)
|
||||
val rebase: File => Seq[(File, String)] = {
|
||||
val path = dir.toPath
|
||||
f =>
|
||||
if (f != dir) f -> path.relativize(f.toPath).toString :: Nil else Nil
|
||||
}
|
||||
IO.zip(dir.allPaths.get().flatMap(rebase), target)
|
||||
}
|
||||
target
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,7 +21,8 @@ package object sbt
|
|||
with sbt.BuildSyntax
|
||||
with sbt.OptionSyntax
|
||||
with sbt.SlashSyntax
|
||||
with sbt.Import {
|
||||
with sbt.Import
|
||||
with sbt.internal.GlobListers {
|
||||
// IO
|
||||
def uri(s: String): URI = new URI(s)
|
||||
def file(s: String): File = new File(s)
|
||||
|
|
|
|||
|
|
@ -7,15 +7,12 @@
|
|||
|
||||
package sbt
|
||||
|
||||
// Todo share this this io.syntax
|
||||
private[sbt] trait IOSyntax0 extends IOSyntax1 {
|
||||
implicit def alternative[A, B](f: A => Option[B]): Alternative[A, B] =
|
||||
g => a => f(a) orElse g(a)
|
||||
implicit def alternative[A, B](f: A => Option[B]): Alternative[A, B] = new Alternative[A, B] {
|
||||
override def |(g: A => Option[B]): A => Option[B] = (a: A) => f(a) orElse g(a)
|
||||
}
|
||||
}
|
||||
private[sbt] trait IOSyntax1 extends sbt.io.IOSyntax
|
||||
private[sbt] trait Alternative[A, B] {
|
||||
def |(g: A => Option[B]): A => Option[B]
|
||||
}
|
||||
|
||||
private[sbt] trait IOSyntax1 {
|
||||
implicit def singleFileFinder(file: File): sbt.io.PathFinder = sbt.io.PathFinder(file)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
cleanKeepFiles ++= Seq(
|
||||
target.value / "keep",
|
||||
target.value / "keepfile",
|
||||
target.value / "keepdir"
|
||||
target.value / "keepfile"
|
||||
)
|
||||
|
||||
cleanKeepGlobs += target.value / "keepdir" ** AllPassFilter
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
Compile / sourceGenerators += Def.task {
|
||||
val files = Seq(sourceManaged.value / "foo.txt", sourceManaged.value / "bar.txt")
|
||||
files.foreach(IO.touch(_))
|
||||
files
|
||||
}
|
||||
|
||||
cleanKeepGlobs += (sourceManaged.value / "bar.txt").toGlob
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
> compile
|
||||
$ exists target/scala-2.12/src_managed/foo.txt target/scala-2.12/src_managed/bar.txt
|
||||
|
||||
> clean
|
||||
$ absent target/scala-2.12/src_managed/foo.txt
|
||||
$ exists target/scala-2.12/src_managed/bar.txt
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
cleanKeepGlobs in Compile +=
|
||||
((classDirectory in Compile in compile).value / "X.class").toGlob
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
class A {
|
||||
val x: Int = 1
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
class X {
|
||||
val y: Int = 0
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
class B {
|
||||
val x: Int = 2
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
$ touch target/cant-touch-this
|
||||
|
||||
> Test/compile
|
||||
$ exists target/scala-2.12/classes/A.class
|
||||
$ exists target/scala-2.12/test-classes/B.class
|
||||
|
||||
> Test/clean
|
||||
$ exists target/cant-touch-this
|
||||
# it should clean only compile classes
|
||||
$ exists target/scala-2.12/classes/A.class
|
||||
$ exists target/scala-2.12/classes/X.class
|
||||
$ absent target/scala-2.12/test-classes/B.class
|
||||
|
||||
# compiling everything again, but now cleaning only compile classes
|
||||
> Test/compile
|
||||
> Compile/clean
|
||||
$ exists target/cant-touch-this
|
||||
# it should clean only compile classes
|
||||
$ absent target/scala-2.12/classes/A.class
|
||||
$ exists target/scala-2.12/test-classes/B.class
|
||||
# and X has to be kept, because of the cleanKeepFiles override
|
||||
$ exists target/scala-2.12/classes/X.class
|
||||
|
|
@ -0,0 +1 @@
|
|||
val root = Build.root
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
import java.nio.file.{ Path, Paths }
|
||||
import sbt._
|
||||
import sbt.io.Glob
|
||||
import sbt.Keys._
|
||||
|
||||
object Build {
|
||||
val simpleTest = taskKey[Unit]("Check that glob file selectors work")
|
||||
val relativeSubdir = Paths.get("subdir")
|
||||
val relativeFiles =
|
||||
Seq(Paths.get("foo.txt"), Paths.get("bar.json"), relativeSubdir.resolve("baz.yml"))
|
||||
val files = taskKey[Path]("The files subdirectory")
|
||||
val subdir = taskKey[Path]("The subdir path in the files subdirectory")
|
||||
val allFiles = taskKey[Seq[Path]]("Returns all of the regular files in the files subdirectory")
|
||||
private def check(actual: Any, expected: Any): Unit =
|
||||
if (actual != expected) throw new IllegalStateException(s"$actual did not equal $expected")
|
||||
val root = (project in file("."))
|
||||
.settings(
|
||||
files := (baseDirectory.value / "files").toPath,
|
||||
subdir := files.value.resolve("subdir"),
|
||||
allFiles := {
|
||||
val f = files.value
|
||||
relativeFiles.map(f.resolve(_))
|
||||
},
|
||||
simpleTest := {
|
||||
val allPaths: Glob = files.value.allPaths
|
||||
val af = allFiles.value.toSet
|
||||
val sub = subdir.value
|
||||
check(allPaths.all.map(_._1).toSet, af + sub)
|
||||
check(allPaths.all.filter(_._2.isRegularFile).map(_._1).toSet, af)
|
||||
check(allPaths.all.filter(_._2.isDirectory).map(_._1).toSet, Set(sub))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
> simpleTest
|
||||
|
|
@ -1 +0,0 @@
|
|||
scalaSource in Compile := baseDirectory.value / "src"
|
||||
|
|
@ -1 +0,0 @@
|
|||
scalaSource in Compile := file("src")
|
||||
|
|
@ -1 +0,0 @@
|
|||
object A
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
$ copy-file changes/relative.sbt build.sbt
|
||||
> reload
|
||||
-> compile
|
||||
|
||||
$ copy-file changes/absolute.sbt build.sbt
|
||||
> reload
|
||||
> compile
|
||||
Loading…
Reference in New Issue