Merge branch 'develop' into wip/versionscheme

This commit is contained in:
eugene yokota 2020-08-06 21:04:21 -04:00 committed by GitHub
commit a59d9fbb97
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 1089 additions and 460 deletions

View File

@ -155,4 +155,6 @@ for:
- '%USERPROFILE%\.sbt'
test_script:
- sbt "scripted actions/* classloader-cache/* nio/* watch/*" "serverTestProj/test"
# The server tests often fail in CI when run together so just run a single test to ensure
# that the thin client works on windows
- sbt "scripted actions/* classloader-cache/* nio/* watch/*" "serverTestProj/testOnly testpkg.ClientTest"

View File

@ -11,7 +11,7 @@ env:
# WHITESOURCE_PASSWORD=
- secure: d3bu2KNwsVHwfhbGgO+gmRfDKBJhfICdCJFGWKf2w3Gv86AJZX9nuTYRxz0KtdvEHO5Xw8WTBZLPb2thSJqhw9OCm4J8TBAVqCP0ruUj4+aqBUFy4bVexQ6WKE6nWHs4JPzPk8c6uC1LG3hMuzlC8RGETXtL/n81Ef1u7NjyXjs=
matrix:
- SBT_CMD="mimaReportBinaryIssues ; javafmtCheck ; Test / javafmtCheck; scalafmtCheckAll ; scalafmtSbtCheck; headerCheck ;test:headerCheck ;whitesourceOnPush ;test:compile; publishLocal; test; serverTestProj/test; doc; $UTIL_TESTS; ++$SCALA_213; $UTIL_TESTS"
- SBT_CMD="mimaReportBinaryIssues ; javafmtCheck ; Test / javafmtCheck; scalafmtCheckAll ; scalafmtSbtCheck; serverTestProj/scalafmtCheckAll; headerCheck ;test:headerCheck ;whitesourceOnPush ;test:compile; publishLocal; test; serverTestProj/test; doc; $UTIL_TESTS; ++$SCALA_213; $UTIL_TESTS"
- SBT_CMD="scripted actions/* apiinfo/* compiler-project/* ivy-deps-management/* reporter/* tests/* watch/* classloader-cache/* package/*"
- SBT_CMD="scripted dependency-management/* plugins/* project-load/* java/* run/* nio/*"
- SBT_CMD="repoOverrideTest:scripted dependency-management/*; scripted source-dependencies/* project/*"

View File

@ -973,7 +973,18 @@ lazy val mainProj = (project in file("main"))
// the binary compatible version.
exclude[IncompatibleMethTypeProblem]("sbt.internal.server.NetworkChannel.this"),
exclude[IncompatibleSignatureProblem]("sbt.internal.DeprecatedContinuous.taskDefinitions"),
exclude[MissingClassProblem]("sbt.internal.SettingsGraph*")
exclude[MissingClassProblem]("sbt.internal.SettingsGraph*"),
// Tasks include non-Files, but it's ok
exclude[IncompatibleSignatureProblem]("sbt.Defaults.outputConfigPaths"),
// private[sbt]
exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedProducts"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.trackedExportedJarProducts"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.unmanagedDependencies0"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependenciesImplTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.internalDependencyJarsImplTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.interDependencies"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.productsTask"),
exclude[DirectMissingMethodProblem]("sbt.Classpaths.jarProductsTask"),
)
)
.configure(

View File

@ -57,11 +57,14 @@ object LineReader {
* `testOnly testOnly\ com.foo.FooSpec` instead of `testOnly com.foo.FooSpec`.
*/
if (c.append.nonEmpty) {
if (!pl.line().endsWith(" ")) {
candidates.add(new Candidate(pl.line().split(" ").last + c.append))
} else {
candidates.add(new Candidate(c.append))
}
val comp =
if (!pl.line().endsWith(" ")) pl.line().split(" ").last + c.append else c.append
// tell jline to append a " " if the completion would be valid with a " " appended
// which can be the case for input tasks and some commands. We need to exclude
// the empty string and ";" which always seem to be present.
val complete = (Parser.completions(parser, comp + " ", 10).get.map(_.display) --
Set(";", "")).nonEmpty
candidates.add(new Candidate(comp, comp, null, null, null, null, complete))
}
}
}

View File

@ -7,7 +7,7 @@
package sbt.internal.util
import java.io.{ InputStream, InterruptedIOException, OutputStream, PrintStream }
import java.io.{ InputStream, InterruptedIOException, IOException, OutputStream, PrintStream }
import java.nio.channels.ClosedChannelException
import java.util.{ Arrays, Locale }
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
@ -163,7 +163,7 @@ trait Terminal extends AutoCloseable {
if (lines.nonEmpty) lines.tail.foldLeft(lines.headOption.fold(0)(count))(_ + count(_))
else 0
}
private[sbt] def flush(): Unit = printStream.flush()
}
object Terminal {
@ -171,7 +171,8 @@ object Terminal {
if (System.getProperty("sbt.jline.verbose", "false") != "true")
jline.internal.Log.setOutput(new PrintStream(_ => {}, false))
def consoleLog(string: String): Unit = {
Terminal.console.printStream.println(s"[info] $string")
try Terminal.console.printStream.println(s"[info] $string")
catch { case _: IOException => }
}
private[sbt] def set(terminal: Terminal) = {
activeTerminal.set(terminal)

View File

@ -11,7 +11,7 @@ import scala.annotation.tailrec
import java.io.File
import sbt.io.syntax._
import sbt.io.IO
import sbt.internal.inc.{ PlainVirtualFile, RawCompiler, ScalaInstance }
import sbt.internal.inc.{ RawCompiler, ScalaInstance }
import sbt.internal.util.Types.:+:
import sbt.internal.util.HListFormats._
import sbt.internal.util.HNil
@ -88,11 +88,7 @@ object RawCompileLike {
def rawCompile(instance: ScalaInstance, cpOptions: ClasspathOptions): Gen =
(sources, classpath, outputDirectory, options, _, log) => {
val compiler = new RawCompiler(instance, cpOptions, log)
compiler(sources map { x =>
PlainVirtualFile(x.toPath)
}, classpath map { x =>
PlainVirtualFile(x.toPath)
}, outputDirectory.toPath, options)
compiler(sources.map(_.toPath), classpath.map(_.toPath), outputDirectory.toPath, options)
}
def compile(

View File

@ -18,10 +18,12 @@ import sbt.internal.util.complete.Parser
import sbt.internal.util._
import Util._
import sbt.util.Show
import xsbti.VirtualFile
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits {
type Classpath = Seq[Attributed[File]]
type VirtualClasspath = Seq[Attributed[VirtualFile]]
def settings(ss: SettingsDefinition*): Seq[Setting[_]] = ss.flatMap(_.settings)

View File

@ -18,4 +18,5 @@ final class PromiseWrap[A] {
}
def success(value: A): Unit = underlying.success(value)
def failure(cause: Throwable): Unit = underlying.failure(cause)
def isCompleted: Boolean = underlying.isCompleted
}

View File

@ -26,7 +26,8 @@ import sbt.Project.{
inTask,
richInitialize,
richInitializeTask,
richTaskSessionVar
richTaskSessionVar,
sbtRichTaskPromise,
}
import sbt.Scope.{ GlobalScope, ThisScope, fillTaskAxis }
import sbt.coursierint._
@ -35,7 +36,14 @@ import sbt.internal._
import sbt.internal.classpath.AlternativeZincUtil
import sbt.internal.inc.JavaInterfaceUtil._
import sbt.internal.inc.classpath.{ ClassLoaderCache, ClasspathFilter, ClasspathUtil }
import sbt.internal.inc.{ MappedFileConverter, PlainVirtualFile, Stamps, ZincLmUtil, ZincUtil }
import sbt.internal.inc.{
CompileOutput,
MappedFileConverter,
PlainVirtualFile,
Stamps,
ZincLmUtil,
ZincUtil
}
import sbt.internal.io.{ Source, WatchState }
import sbt.internal.librarymanagement.mavenint.{
PomExtraDependencyAttributes,
@ -68,7 +76,6 @@ import sbt.librarymanagement.Configurations.{
Provided,
Runtime,
Test,
names
}
import sbt.librarymanagement.CrossVersion.{ binarySbtVersion, binaryScalaVersion, partialVersion }
import sbt.librarymanagement._
@ -82,7 +89,7 @@ import sbt.nio.Watch
import sbt.std.TaskExtra._
import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint }
import sbt.util.CacheImplicits._
import sbt.util.InterfaceUtil.{ toJavaFunction => f1 }
import sbt.util.InterfaceUtil.{ toJavaFunction => f1, t2 }
import sbt.util._
import sjsonnew._
import sjsonnew.support.scalajson.unsafe.Converter
@ -97,7 +104,6 @@ import sbt.SlashSyntax0._
import sbt.internal.inc.{
Analysis,
AnalyzingCompiler,
FileValueCache,
Locate,
ManagedLoggedReporter,
MixedAnalyzingCompiler,
@ -112,6 +118,7 @@ import xsbti.compile.{
CompileOptions,
CompileOrder,
CompileResult,
CompileProgress,
CompilerCache,
Compilers,
DefinesClass,
@ -180,16 +187,10 @@ object Defaults extends BuildCommon {
apiMappings := Map.empty,
autoScalaLibrary :== true,
managedScalaInstance :== true,
classpathEntryDefinesClass := {
val converter = fileConverter.value
val f = FileValueCache({ x: NioPath =>
if (x.getFileName.toString != "rt.jar") Locate.definesClass(converter.toVirtualFile(x))
else ((_: String) => false): DefinesClass
}).get;
{ (x: File) =>
f(x.toPath)
}
classpathEntryDefinesClass := { (file: File) =>
sys.error("use classpathEntryDefinesClassVF instead")
},
extraIncOptions :== Seq("JAVA_CLASS_VERSION" -> sys.props("java.class.version")),
allowMachinePath :== true,
rootPaths := {
val app = appConfiguration.value
@ -374,6 +375,7 @@ object Defaults extends BuildCommon {
() => Clean.deleteContents(tempDirectory, _ => false)
},
turbo :== SysProp.turbo,
usePipelining :== SysProp.pipelining,
useSuperShell := { if (insideCI.value) false else Terminal.console.isSupershellEnabled },
progressReports := {
val rs = EvaluateTask.taskTimingProgress.toVector ++ EvaluateTask.taskTraceEvent.toVector
@ -544,10 +546,16 @@ object Defaults extends BuildCommon {
)
// This exists for binary compatibility and probably never should have been public.
def addBaseSources: Seq[Def.Setting[Task[Seq[File]]]] = Nil
lazy val outputConfigPaths = Seq(
lazy val outputConfigPaths: Seq[Setting[_]] = Seq(
classDirectory := crossTarget.value / (prefix(configuration.value.name) + "classes"),
// TODO: Use FileConverter once Zinc can handle non-Path
backendOutput := PlainVirtualFile(classDirectory.value.toPath),
earlyOutput / artifactPath := earlyArtifactPathSetting(artifact).value,
// TODO: Use FileConverter once Zinc can handle non-Path
earlyOutput := PlainVirtualFile((earlyOutput / artifactPath).value.toPath),
semanticdbTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "meta"),
compileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "zinc"),
earlyCompileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "early-zinc"),
target in doc := crossTarget.value / (prefix(configuration.value.name) + "api")
)
@ -671,9 +679,10 @@ object Defaults extends BuildCommon {
def defaultCompileSettings: Seq[Setting[_]] =
globalDefaults(enableBinaryCompileAnalysis := true)
lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++ inTask(compile)(
compileInputsSettings
) ++ configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++
inTask(compile)(compileInputsSettings) ++
inTask(compileJava)(compileInputsSettings(dependencyVirtualClasspath)) ++
configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
compileOutputs := {
import scala.collection.JavaConverters._
val c = fileConverter.value
@ -685,9 +694,31 @@ object Defaults extends BuildCommon {
},
compileOutputs := compileOutputs.triggeredBy(compile).value,
clean := (compileOutputs / clean).value,
earlyOutputPing := Def.promise[Boolean],
compileProgress := {
val s = streams.value
val promise = earlyOutputPing.value
val mn = moduleName.value
val c = configuration.value
new CompileProgress {
override def afterEarlyOutput(isSuccess: Boolean): Unit = {
if (isSuccess) s.log.debug(s"[$mn / $c] early output is success")
else s.log.debug(s"[$mn / $c] early output can't be made because of macros")
promise.complete(Value(isSuccess))
}
}
},
compileEarly := compileEarlyTask.value,
compile := compileTask.value,
compileScalaBackend := compileScalaBackendTask.value,
compileJava := compileJavaTask.value,
compileSplit := {
// conditional task
if (incOptions.value.pipelining) compileJava.value
else compileScalaBackend.value
},
internalDependencyConfigurations := InternalDependencies.configurations.value,
manipulateBytecode := compileIncremental.value,
manipulateBytecode := compileSplit.value,
compileIncremental := compileIncrementalTask.tag(Tags.Compile, Tags.CPU).value,
printWarnings := printWarningsTask.value,
compileAnalysisFilename := {
@ -699,6 +730,9 @@ object Defaults extends BuildCommon {
else ""
s"inc_compile$extra.zip"
},
earlyCompileAnalysisFile := {
earlyCompileAnalysisTargetRoot.value / compileAnalysisFilename.value
},
compileAnalysisFile := {
compileAnalysisTargetRoot.value / compileAnalysisFilename.value
},
@ -716,6 +750,22 @@ object Defaults extends BuildCommon {
): ClassFileManagerType
).toOptional
)
.withPipelining(usePipelining.value)
},
scalacOptions := {
val old = scalacOptions.value
val converter = fileConverter.value
if (usePipelining.value)
Vector("-Ypickle-java", "-Ypickle-write", converter.toPath(earlyOutput.value).toString) ++ old
else old
},
classpathEntryDefinesClassVF := {
val converter = fileConverter.value
val f = VirtualFileValueCache(converter)({ x: VirtualFile =>
if (x.name.toString != "rt.jar") Locate.definesClass(x)
else ((_: String) => false): DefinesClass
}).get
f
},
compileIncSetup := compileIncSetupTask.value,
console := consoleTask.value,
@ -1430,6 +1480,19 @@ object Defaults extends BuildCommon {
excludes: ScopedTaskable[FileFilter]
): Initialize[Task[Seq[File]]] = collectFiles(dirs: Taskable[Seq[File]], filter, excludes)
private[sbt] def earlyArtifactPathSetting(art: SettingKey[Artifact]): Initialize[File] =
Def.setting {
val f = artifactName.value
crossTarget.value / "early" / f(
ScalaVersion(
(scalaVersion in artifactName).value,
(scalaBinaryVersion in artifactName).value
),
projectID.value,
art.value
)
}
def artifactPathSetting(art: SettingKey[Artifact]): Initialize[File] =
Def.setting {
val f = artifactName.value
@ -1837,6 +1900,43 @@ object Defaults extends BuildCommon {
finally w.close() // workaround for #937
}
/** Handles traditional Scalac compilation. For non-pipelined compilation,
* this also handles Java compilation.
*/
private[sbt] def compileScalaBackendTask: Initialize[Task[CompileResult]] = Def.task {
val setup: Setup = compileIncSetup.value
val useBinary: Boolean = enableBinaryCompileAnalysis.value
val analysisResult: CompileResult = compileIncremental.value
// Save analysis midway if pipelining is enabled
if (analysisResult.hasModified && setup.incrementalCompilerOptions.pipelining) {
val store =
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup())
store.set(contents)
}
analysisResult
}
/** Block on earlyOutputPing promise, which will be completed by `compile` midway
* via `compileProgress` implementation.
*/
private[sbt] def compileEarlyTask: Initialize[Task[CompileAnalysis]] = Def.task {
if ({
streams.value.log
.debug(s"${name.value}: compileEarly: blocking on earlyOutputPing")
earlyOutputPing.await.value
}) {
val useBinary: Boolean = enableBinaryCompileAnalysis.value
val store =
MixedAnalyzingCompiler.staticCachedStore(earlyCompileAnalysisFile.value.toPath, !useBinary)
store.get.toOption match {
case Some(contents) => contents.getAnalysis
case _ => Analysis.empty
}
} else {
compile.value
}
}
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
val setup: Setup = compileIncSetup.value
val useBinary: Boolean = enableBinaryCompileAnalysis.value
@ -1861,11 +1961,31 @@ object Defaults extends BuildCommon {
def compileIncrementalTask = Def.task {
BspCompileTask.compute(bspTargetIdentifier.value, thisProjectRef.value, configuration.value) {
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
compileIncrementalTaskImpl(streams.value, (compileInputs in compile).value)
compileIncrementalTaskImpl(
streams.value,
(compile / compileInputs).value,
earlyOutputPing.value
)
}
}
private val incCompiler = ZincUtil.defaultIncrementalCompiler
private[this] def compileIncrementalTaskImpl(s: TaskStreams, ci: Inputs): CompileResult = {
private[sbt] def compileJavaTask: Initialize[Task[CompileResult]] = Def.task {
val s = streams.value
val in = (compileJava / compileInputs).value
val _ = compileScalaBackend.value
try {
incCompiler.asInstanceOf[sbt.internal.inc.IncrementalCompilerImpl].compileAllJava(in, s.log)
} finally {
in.setup.reporter match {
case r: BuildServerReporter => r.sendFinalReport()
}
}
}
private[this] def compileIncrementalTaskImpl(
s: TaskStreams,
ci: Inputs,
promise: PromiseWrap[Boolean]
): CompileResult = {
lazy val x = s.text(ExportStream)
def onArgs(cs: Compilers) = {
cs.withScalac(
@ -1875,13 +1995,14 @@ object Defaults extends BuildCommon {
}
)
}
// .withJavac(
// cs.javac.onArgs(exported(x, "javac"))
//)
val compilers: Compilers = ci.compilers
val i = ci.withCompilers(onArgs(compilers))
try {
incCompiler.compile(i, s.log)
} catch {
case e: Throwable if !promise.isCompleted =>
promise.failure(e)
throw e
} finally {
i.setup.reporter match {
case r: BuildServerReporter => r.sendFinalReport()
@ -1890,47 +2011,44 @@ object Defaults extends BuildCommon {
}
}
def compileIncSetupTask = Def.task {
val converter = fileConverter.value
val cp = dependencyPicklePath.value
val lookup = new PerClasspathEntryLookup {
private val cachedAnalysisMap: File => Option[CompileAnalysis] =
analysisMap(dependencyClasspath.value)
private val cachedPerEntryDefinesClassLookup: File => DefinesClass =
Keys.classpathEntryDefinesClass.value
private val cachedAnalysisMap: VirtualFile => Option[CompileAnalysis] =
analysisMap(cp)
private val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass =
Keys.classpathEntryDefinesClassVF.value
override def analysis(classpathEntry: VirtualFile): Optional[CompileAnalysis] =
cachedAnalysisMap(converter.toPath(classpathEntry).toFile).toOptional
cachedAnalysisMap(classpathEntry).toOptional
override def definesClass(classpathEntry: VirtualFile): DefinesClass =
cachedPerEntryDefinesClassLookup(converter.toPath(classpathEntry).toFile)
cachedPerEntryDefinesClassLookup(classpathEntry)
}
val extra = extraIncOptions.value.map(t2)
Setup.of(
lookup,
(skip in compile).value,
// TODO - this is kind of a bad way to grab the cache directory for streams...
compileAnalysisFile.value.toPath,
compilerCache.value,
incOptions.value,
(compilerReporter in compile).value,
// TODO - task / setting for compile progress
None.toOptional: Optional[xsbti.compile.CompileProgress],
// TODO - task / setting for extra,
Array.empty: Array[xsbti.T2[String, String]],
Some((compile / compileProgress).value).toOptional,
extra.toArray,
)
}
def compileInputsSettings: Seq[Setting[_]] = {
def compileInputsSettings: Seq[Setting[_]] =
compileInputsSettings(dependencyPicklePath)
def compileInputsSettings(classpathTask: TaskKey[VirtualClasspath]): Seq[Setting[_]] = {
Seq(
compileOptions := {
val c = fileConverter.value
val cp0 = classDirectory.value +: data(dependencyClasspath.value)
val cp = cp0 map { x =>
PlainVirtualFile(x.toPath)
}
val cp0 = classpathTask.value
val cp = backendOutput.value +: data(cp0)
val vs = sources.value.toVector map { x =>
c.toVirtualFile(x.toPath)
}
CompileOptions.of(
cp.toArray: Array[VirtualFile],
cp.toArray,
vs.toArray,
classDirectory.value.toPath,
c.toPath(backendOutput.value),
scalacOptions.value.toArray,
javacOptions.value.toArray,
maxErrors.value,
@ -1939,7 +2057,7 @@ object Defaults extends BuildCommon {
None.toOptional: Optional[NioPath],
Some(fileConverter.value).toOptional,
Some(reusableStamper.value).toOptional,
None.toOptional: Optional[xsbti.compile.Output],
Some(CompileOutput(c.toPath(earlyOutput.value))).toOptional,
)
},
compilerReporter := {
@ -2167,8 +2285,10 @@ object Classpaths {
def concatSettings[T](a: SettingKey[Seq[T]], b: SettingKey[Seq[T]]): Initialize[Seq[T]] =
concatSettings(a: Initialize[Seq[T]], b) // forward to widened variant
// Included as part of JvmPlugin#projectSettings.
lazy val configSettings: Seq[Setting[_]] = classpaths ++ Seq(
products := makeProducts.value,
pickleProducts := makePickleProducts.value,
productDirectories := classDirectory.value :: Nil,
classpathConfiguration := findClasspathConfig(
internalConfigurationMap.value,
@ -2182,7 +2302,7 @@ object Classpaths {
externalDependencyClasspath := concat(unmanagedClasspath, managedClasspath).value,
dependencyClasspath := concat(internalDependencyClasspath, externalDependencyClasspath).value,
fullClasspath := concatDistinct(exportedProducts, dependencyClasspath).value,
internalDependencyClasspath := internalDependencies.value,
internalDependencyClasspath := ClasspathImpl.internalDependencyClasspathTask.value,
unmanagedClasspath := unmanagedDependencies.value,
managedClasspath := {
val isMeta = isMetaBuild.value
@ -2199,12 +2319,20 @@ object Classpaths {
if (isMeta && !force && !csr) mjars ++ sbtCp
else mjars
},
exportedProducts := trackedExportedProducts(TrackLevel.TrackAlways).value,
exportedProductsIfMissing := trackedExportedProducts(TrackLevel.TrackIfMissing).value,
exportedProductsNoTracking := trackedExportedProducts(TrackLevel.NoTracking).value,
exportedProductJars := trackedExportedJarProducts(TrackLevel.TrackAlways).value,
exportedProductJarsIfMissing := trackedExportedJarProducts(TrackLevel.TrackIfMissing).value,
exportedProductJarsNoTracking := trackedExportedJarProducts(TrackLevel.NoTracking).value,
exportedProducts := ClasspathImpl.trackedExportedProducts(TrackLevel.TrackAlways).value,
exportedProductsIfMissing := ClasspathImpl
.trackedExportedProducts(TrackLevel.TrackIfMissing)
.value,
exportedProductsNoTracking := ClasspathImpl
.trackedExportedProducts(TrackLevel.NoTracking)
.value,
exportedProductJars := ClasspathImpl.trackedExportedJarProducts(TrackLevel.TrackAlways).value,
exportedProductJarsIfMissing := ClasspathImpl
.trackedExportedJarProducts(TrackLevel.TrackIfMissing)
.value,
exportedProductJarsNoTracking := ClasspathImpl
.trackedExportedJarProducts(TrackLevel.NoTracking)
.value,
internalDependencyAsJars := internalDependencyJarsTask.value,
dependencyClasspathAsJars := concat(internalDependencyAsJars, externalDependencyClasspath).value,
fullClasspathAsJars := concatDistinct(exportedProductJars, dependencyClasspathAsJars).value,
@ -2222,7 +2350,38 @@ object Classpaths {
dependencyClasspathFiles.value.flatMap(
p => FileStamp(stamper.library(converter.toVirtualFile(p))).map(p -> _)
)
}
},
dependencyVirtualClasspath := {
// TODO: Use converter
val cp0 = dependencyClasspath.value
cp0 map {
_ map { file =>
PlainVirtualFile(file.toPath): VirtualFile
}
}
},
// Note: invoking this task from shell would block indefinately because it will
// wait for the upstream compilation to start.
dependencyPicklePath := {
// This is a conditional task. Do not refactor.
if (incOptions.value.pipelining) {
concat(
internalDependencyPicklePath,
Def.task {
// TODO: Use converter
externalDependencyClasspath.value map {
_ map { file =>
PlainVirtualFile(file.toPath): VirtualFile
}
}
}
).value
} else {
dependencyVirtualClasspath.value
}
},
internalDependencyPicklePath := ClasspathImpl.internalDependencyPicklePathTask.value,
exportedPickles := ClasspathImpl.exportedPicklesTask.value,
)
private[this] def exportClasspath(s: Setting[Task[Classpath]]): Setting[Task[Classpath]] =
@ -3190,15 +3349,15 @@ object Classpaths {
}
/*
// can't cache deliver/publish easily since files involved are hidden behind patterns. publish will be difficult to verify target-side anyway
def cachedPublish(cacheFile: File)(g: (IvySbt#Module, PublishConfiguration) => Unit, module: IvySbt#Module, config: PublishConfiguration) => Unit =
{ case module :+: config :+: HNil =>
/* implicit val publishCache = publishIC
val f = cached(cacheFile) { (conf: IvyConfiguration, settings: ModuleSettings, config: PublishConfiguration) =>*/
g(module, config)
/*}
f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)*/
}*/
// can't cache deliver/publish easily since files involved are hidden behind patterns. publish will be difficult to verify target-side anyway
def cachedPublish(cacheFile: File)(g: (IvySbt#Module, PublishConfiguration) => Unit, module: IvySbt#Module, config: PublishConfiguration) => Unit =
{ case module :+: config :+: HNil =>
/* implicit val publishCache = publishIC
val f = cached(cacheFile) { (conf: IvyConfiguration, settings: ModuleSettings, config: PublishConfiguration) =>*/
g(module, config)
/*}
f(module.owner.configuration :+: module.moduleSettings :+: config :+: HNil)*/
}*/
def defaultRepositoryFilter: MavenRepository => Boolean = repo => !repo.root.startsWith("file:")
@ -3291,140 +3450,37 @@ object Classpaths {
new RawRepository(resolver, resolver.getName)
}
def analyzed[T](data: T, analysis: CompileAnalysis) =
Attributed.blank(data).put(Keys.analysis, analysis)
def analyzed[T](data: T, analysis: CompileAnalysis) = ClasspathImpl.analyzed[T](data, analysis)
def makeProducts: Initialize[Task[Seq[File]]] = Def.task {
val c = fileConverter.value
compile.value
copyResources.value
classDirectory.value :: Nil
c.toPath(backendOutput.value).toFile :: Nil
}
private[sbt] def trackedExportedProducts(track: TrackLevel): Initialize[Task[Classpath]] =
Def.task {
val _ = (packageBin / dynamicDependency).value
val art = (artifact in packageBin).value
val module = projectID.value
val config = configuration.value
for { (f, analysis) <- trackedExportedProductsImplTask(track).value } yield APIMappings
.store(analyzed(f, analysis), apiURL.value)
.put(artifact.key, art)
.put(moduleID.key, module)
.put(configuration.key, config)
}
private[sbt] def trackedExportedJarProducts(track: TrackLevel): Initialize[Task[Classpath]] =
Def.task {
val _ = (packageBin / dynamicDependency).value
val art = (artifact in packageBin).value
val module = projectID.value
val config = configuration.value
for { (f, analysis) <- trackedJarProductsImplTask(track).value } yield APIMappings
.store(analyzed(f, analysis), apiURL.value)
.put(artifact.key, art)
.put(moduleID.key, module)
.put(configuration.key, config)
}
private[this] def trackedExportedProductsImplTask(
track: TrackLevel
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
Def.taskDyn {
val _ = (packageBin / dynamicDependency).value
val useJars = exportJars.value
if (useJars) trackedJarProductsImplTask(track)
else trackedNonJarProductsImplTask(track)
}
private[this] def trackedNonJarProductsImplTask(
track: TrackLevel
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
Def.taskDyn {
val dirs = productDirectories.value
val view = fileTreeView.value
def containsClassFile(): Boolean =
view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).nonEmpty
TrackLevel.intersection(track, exportToInternal.value) match {
case TrackLevel.TrackAlways =>
Def.task {
products.value map { (_, compile.value) }
}
case TrackLevel.TrackIfMissing if !containsClassFile() =>
Def.task {
products.value map { (_, compile.value) }
}
case _ =>
Def.task {
val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty)
dirs.map(_ -> analysis)
}
}
}
private[this] def trackedJarProductsImplTask(
track: TrackLevel
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
Def.taskDyn {
val jar = (artifactPath in packageBin).value
TrackLevel.intersection(track, exportToInternal.value) match {
case TrackLevel.TrackAlways =>
Def.task {
Seq((packageBin.value, compile.value))
}
case TrackLevel.TrackIfMissing if !jar.exists =>
Def.task {
Seq((packageBin.value, compile.value))
}
case _ =>
Def.task {
val analysisOpt = previousCompile.value.analysis.toOption
Seq(jar) map { x =>
(
x,
if (analysisOpt.isDefined) analysisOpt.get
else Analysis.empty
)
}
}
private[sbt] def makePickleProducts: Initialize[Task[Seq[VirtualFile]]] = Def.task {
// This is a conditional task.
if (earlyOutputPing.await.value) {
// TODO: copyResources.value
earlyOutput.value :: Nil
} else {
val c = fileConverter.value
products.value map { x: File =>
c.toVirtualFile(x.toPath)
}
}
}
def constructBuildDependencies: Initialize[BuildDependencies] =
loadedBuild(lb => BuildUtil.dependencies(lb.units))
@deprecated("not used", "1.4.0")
def internalDependencies: Initialize[Task[Classpath]] =
Def.taskDyn {
val _ = (
(exportedProductsNoTracking / transitiveClasspathDependency).value,
(exportedProductsIfMissing / transitiveClasspathDependency).value,
(exportedProducts / transitiveClasspathDependency).value,
(exportedProductJarsNoTracking / transitiveClasspathDependency).value,
(exportedProductJarsIfMissing / transitiveClasspathDependency).value,
(exportedProductJars / transitiveClasspathDependency).value
)
internalDependenciesImplTask(
thisProjectRef.value,
classpathConfiguration.value,
configuration.value,
settingsData.value,
buildDependencies.value,
trackInternalDependencies.value
)
}
ClasspathImpl.internalDependencyClasspathTask
def internalDependencyJarsTask: Initialize[Task[Classpath]] =
Def.taskDyn {
internalDependencyJarsImplTask(
thisProjectRef.value,
classpathConfiguration.value,
configuration.value,
settingsData.value,
buildDependencies.value,
trackInternalDependencies.value
)
}
def unmanagedDependencies: Initialize[Task[Classpath]] =
Def.taskDyn {
unmanagedDependencies0(
thisProjectRef.value,
configuration.value,
settingsData.value,
buildDependencies.value
)
}
ClasspathImpl.internalDependencyJarsTask
def unmanagedDependencies: Initialize[Task[Classpath]] = ClasspathImpl.unmanagedDependenciesTask
def mkIvyConfiguration: Initialize[Task[InlineIvyConfiguration]] =
Def.task {
val (rs, other) = (fullResolvers.value.toVector, otherResolvers.value.toVector)
@ -3443,37 +3499,12 @@ object Classpaths {
.withLog(s.log)
}
import java.util.LinkedHashSet
import collection.JavaConverters._
def interSort(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
deps: BuildDependencies
): Seq[(ProjectRef, String)] = {
val visited = (new LinkedHashSet[(ProjectRef, String)]).asScala
def visit(p: ProjectRef, c: Configuration): Unit = {
val applicableConfigs = allConfigs(c)
for (ac <- applicableConfigs) // add all configurations in this project
visited add (p -> ac.name)
val masterConfs = names(getConfigurations(projectRef, data).toVector)
for (ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)) {
val configurations = getConfigurations(dep, data)
val mapping =
mapped(confMapping, masterConfs, names(configurations.toVector), "compile", "*->compile")
// map master configuration 'c' and all extended configurations to the appropriate dependency configuration
for (ac <- applicableConfigs; depConfName <- mapping(ac.name)) {
for (depConf <- confOpt(configurations, depConfName))
if (!visited((dep, depConfName)))
visit(dep, depConf)
}
}
}
visit(projectRef, conf)
visited.toSeq
}
): Seq[(ProjectRef, String)] = ClasspathImpl.interSort(projectRef, conf, data, deps)
def interSortConfigurations(
projectRef: ProjectRef,
@ -3485,143 +3516,50 @@ object Classpaths {
case (projectRef, configName) => (projectRef, ConfigRef(configName))
}
private[sbt] def unmanagedDependencies0(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
deps: BuildDependencies
): Initialize[Task[Classpath]] =
Def.value {
interDependencies(
projectRef,
deps,
conf,
conf,
data,
TrackLevel.TrackAlways,
true,
(dep, conf, data, _) => unmanagedLibs(dep, conf, data),
)
}
private[sbt] def internalDependenciesImplTask(
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
deps: BuildDependencies,
track: TrackLevel
): Initialize[Task[Classpath]] =
Def.value { interDependencies(projectRef, deps, conf, self, data, track, false, productsTask) }
private[sbt] def internalDependencyJarsImplTask(
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
deps: BuildDependencies,
track: TrackLevel
): Initialize[Task[Classpath]] =
Def.value {
interDependencies(projectRef, deps, conf, self, data, track, false, jarProductsTask)
}
private[sbt] def interDependencies(
projectRef: ProjectRef,
deps: BuildDependencies,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
track: TrackLevel,
includeSelf: Boolean,
f: (ProjectRef, String, Settings[Scope], TrackLevel) => Task[Classpath]
): Task[Classpath] = {
val visited = interSort(projectRef, conf, data, deps)
val tasks = (new LinkedHashSet[Task[Classpath]]).asScala
for ((dep, c) <- visited)
if (includeSelf || (dep != projectRef) || (conf.name != c && self.name != c))
tasks += f(dep, c, data, track)
(tasks.toSeq.join).map(_.flatten.distinct)
}
def mapped(
confString: Option[String],
masterConfs: Seq[String],
depConfs: Seq[String],
default: String,
defaultMapping: String
): String => Seq[String] = {
lazy val defaultMap = parseMapping(defaultMapping, masterConfs, depConfs, _ :: Nil)
parseMapping(confString getOrElse default, masterConfs, depConfs, defaultMap)
}
): String => Seq[String] =
ClasspathImpl.mapped(confString, masterConfs, depConfs, default, defaultMapping)
def parseMapping(
confString: String,
masterConfs: Seq[String],
depConfs: Seq[String],
default: String => Seq[String]
): String => Seq[String] =
union(confString.split(";") map parseSingleMapping(masterConfs, depConfs, default))
ClasspathImpl.parseMapping(confString, masterConfs, depConfs, default)
def parseSingleMapping(
masterConfs: Seq[String],
depConfs: Seq[String],
default: String => Seq[String]
)(confString: String): String => Seq[String] = {
val ms: Seq[(String, Seq[String])] =
trim(confString.split("->", 2)) match {
case x :: Nil => for (a <- parseList(x, masterConfs)) yield (a, default(a))
case x :: y :: Nil =>
val target = parseList(y, depConfs);
for (a <- parseList(x, masterConfs)) yield (a, target)
case _ => sys.error("Invalid configuration '" + confString + "'") // shouldn't get here
}
val m = ms.toMap
s => m.getOrElse(s, Nil)
}
)(confString: String): String => Seq[String] =
ClasspathImpl.parseSingleMapping(masterConfs, depConfs, default)(confString)
def union[A, B](maps: Seq[A => Seq[B]]): A => Seq[B] =
a => maps.foldLeft(Seq[B]()) { _ ++ _(a) } distinct;
ClasspathImpl.union[A, B](maps)
def parseList(s: String, allConfs: Seq[String]): Seq[String] =
(trim(s split ",") flatMap replaceWildcard(allConfs)).distinct
def replaceWildcard(allConfs: Seq[String])(conf: String): Seq[String] = conf match {
case "" => Nil
case "*" => allConfs
case _ => conf :: Nil
}
ClasspathImpl.parseList(s, allConfs)
def replaceWildcard(allConfs: Seq[String])(conf: String): Seq[String] =
ClasspathImpl.replaceWildcard(allConfs)(conf)
private def trim(a: Array[String]): List[String] = a.toList.map(_.trim)
def missingConfiguration(in: String, conf: String) =
sys.error("Configuration '" + conf + "' not defined in '" + in + "'")
def allConfigs(conf: Configuration): Seq[Configuration] =
Dag.topologicalSort(conf)(_.extendsConfigs)
def allConfigs(conf: Configuration): Seq[Configuration] = ClasspathImpl.allConfigs(conf)
def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] =
ivyConfigurations in p get data getOrElse Nil
ClasspathImpl.getConfigurations(p, data)
def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] =
configurations.find(_.name == conf)
private[sbt] def productsTask(
dep: ResolvedReference,
conf: String,
data: Settings[Scope],
track: TrackLevel
): Task[Classpath] =
track match {
case TrackLevel.NoTracking => getClasspath(exportedProductsNoTracking, dep, conf, data)
case TrackLevel.TrackIfMissing => getClasspath(exportedProductsIfMissing, dep, conf, data)
case TrackLevel.TrackAlways => getClasspath(exportedProducts, dep, conf, data)
}
private[sbt] def jarProductsTask(
dep: ResolvedReference,
conf: String,
data: Settings[Scope],
track: TrackLevel
): Task[Classpath] =
track match {
case TrackLevel.NoTracking => getClasspath(exportedProductJarsNoTracking, dep, conf, data)
case TrackLevel.TrackIfMissing => getClasspath(exportedProductJarsIfMissing, dep, conf, data)
case TrackLevel.TrackAlways => getClasspath(exportedProductJars, dep, conf, data)
}
ClasspathImpl.confOpt(configurations, conf)
def unmanagedLibs(dep: ResolvedReference, conf: String, data: Settings[Scope]): Task[Classpath] =
getClasspath(unmanagedJars, dep, conf, data)
ClasspathImpl.unmanagedLibs(dep, conf, data)
def getClasspath(
key: TaskKey[Classpath],
@ -3629,7 +3567,7 @@ object Classpaths {
conf: String,
data: Settings[Scope]
): Task[Classpath] =
(key in (dep, ConfigKey(conf))) get data getOrElse constant(Nil)
ClasspathImpl.getClasspath(key, dep, conf, data)
def defaultConfigurationTask(p: ResolvedReference, data: Settings[Scope]): Configuration =
flatten(defaultConfiguration in p get data) getOrElse Configurations.Default
@ -3712,13 +3650,14 @@ object Classpaths {
val ref = thisProjectRef.value
val data = settingsData.value
val deps = buildDependencies.value
internalDependenciesImplTask(
ClasspathImpl.internalDependenciesImplTask(
ref,
CompilerPlugin,
CompilerPlugin,
data,
deps,
TrackLevel.TrackAlways
TrackLevel.TrackAlways,
streams.value.log
)
}

View File

@ -36,7 +36,7 @@ import sbt.librarymanagement.ivy.{ Credentials, IvyConfiguration, IvyPaths, Upda
import sbt.nio.file.Glob
import sbt.testing.Framework
import sbt.util.{ Level, Logger }
import xsbti.FileConverter
import xsbti.{ FileConverter, VirtualFile }
import xsbti.compile._
import xsbti.compile.analysis.ReadStamps
@ -151,6 +151,8 @@ object Keys {
// Output paths
val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting)
val earlyOutput = settingKey[VirtualFile]("JAR file for pickles used for build pipelining")
val backendOutput = settingKey[VirtualFile]("Directory or JAR file for compiled classes and copied resources")
val cleanFiles = taskKey[Seq[File]]("The files to recursively delete during a clean.").withRank(BSetting)
val cleanKeepFiles = settingKey[Seq[File]]("Files or directories to keep during a clean. Must be direct children of target.").withRank(CSetting)
val cleanKeepGlobs = settingKey[Seq[Glob]]("Globs to keep during a clean. Must be direct children of target.").withRank(CSetting)
@ -167,6 +169,7 @@ object Keys {
val scalacOptions = taskKey[Seq[String]]("Options for the Scala compiler.").withRank(BPlusTask)
val javacOptions = taskKey[Seq[String]]("Options for the Java compiler.").withRank(BPlusTask)
val incOptions = taskKey[IncOptions]("Options for the incremental compiler.").withRank(BTask)
val extraIncOptions = taskKey[Seq[(String, String)]]("Extra options for the incremental compiler").withRank(CTask)
val compileOrder = settingKey[CompileOrder]("Configures the order in which Java and sources within a single compilation are compiled. Valid values are: JavaThenScala, ScalaThenJava, or Mixed.").withRank(BPlusSetting)
val initialCommands = settingKey[String]("Initial commands to execute when starting up the Scala interpreter.").withRank(AMinusSetting)
val cleanupCommands = settingKey[String]("Commands to execute before the Scala interpreter exits.").withRank(BMinusSetting)
@ -211,14 +214,24 @@ object Keys {
val manipulateBytecode = taskKey[CompileResult]("Manipulates generated bytecode").withRank(BTask)
val compileIncremental = taskKey[CompileResult]("Actually runs the incremental compilation").withRank(DTask)
val previousCompile = taskKey[PreviousResult]("Read the incremental compiler analysis from disk").withRank(DTask)
private[sbt] val compileScalaBackend = taskKey[CompileResult]("Compiles only Scala sources if pipelining is enabled. Compiles both Scala and Java sources otherwise").withRank(Invisible)
private[sbt] val compileEarly = taskKey[CompileAnalysis]("Compiles only Scala sources if pipelining is enabled, and produce an early output (pickle JAR)").withRank(Invisible)
private[sbt] val earlyOutputPing = taskKey[PromiseWrap[Boolean]]("When pipelining is enabled, this returns true when early output (pickle JAR) is created; false otherwise").withRank(Invisible)
private[sbt] val compileJava = taskKey[CompileResult]("Compiles only Java sources (called only for pipelining)").withRank(Invisible)
private[sbt] val compileSplit = taskKey[CompileResult]("When pipelining is enabled, compile Scala then Java; otherwise compile both").withRank(Invisible)
val compileProgress = taskKey[CompileProgress]("Callback used by the compiler to report phase progress")
val compilers = taskKey[Compilers]("Defines the Scala and Java compilers to use for compilation.").withRank(DTask)
val compileAnalysisFilename = taskKey[String]("Defines the filename used for compileAnalysisFile.").withRank(DTask)
val compileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting)
val earlyCompileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting)
val compileAnalysisFile = taskKey[File]("Zinc analysis storage.").withRank(DSetting)
val earlyCompileAnalysisFile = taskKey[File]("Zinc analysis storage for early compilation").withRank(DSetting)
val compileIncSetup = taskKey[Setup]("Configures aspects of incremental compilation.").withRank(DTask)
val compilerCache = taskKey[GlobalsCache]("Cache of scala.tools.nsc.Global instances. This should typically be cached so that it isn't recreated every task run.").withRank(DTask)
val stateCompilerCache = AttributeKey[GlobalsCache]("stateCompilerCache", "Internal use: Global cache.")
val classpathEntryDefinesClass = taskKey[File => DefinesClass]("Internal use: provides a function that determines whether the provided file contains a given class.").withRank(Invisible)
val classpathEntryDefinesClassVF = taskKey[VirtualFile => DefinesClass]("Internal use: provides a function that determines whether the provided file contains a given class.").withRank(Invisible)
val doc = taskKey[File]("Generates API documentation.").withRank(AMinusTask)
val copyResources = taskKey[Seq[(File, File)]]("Copies resources to the output directory.").withRank(AMinusTask)
val aggregate = settingKey[Boolean]("Configures task aggregation.").withRank(BMinusSetting)
@ -302,6 +315,7 @@ object Keys {
// Classpath/Dependency Management Keys
type Classpath = Def.Classpath
type VirtualClasspath = Def.VirtualClasspath
val name = settingKey[String]("Project name.").withRank(APlusSetting)
val normalizedName = settingKey[String]("Project name transformed from mixed case and spaces to lowercase and dash-separated.").withRank(BSetting)
@ -333,12 +347,17 @@ object Keys {
val internalDependencyClasspath = taskKey[Classpath]("The internal (inter-project) classpath.").withRank(CTask)
val externalDependencyClasspath = taskKey[Classpath]("The classpath consisting of library dependencies, both managed and unmanaged.").withRank(BMinusTask)
val dependencyClasspath = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(BPlusTask)
val dependencyVirtualClasspath = taskKey[VirtualClasspath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(CTask)
val dependencyPicklePath = taskKey[VirtualClasspath]("The classpath consisting of internal pickles and external, managed and unmanaged dependencies. This task is promise-blocked.")
val internalDependencyPicklePath = taskKey[VirtualClasspath]("The internal (inter-project) pickles. This task is promise-blocked.")
val fullClasspath = taskKey[Classpath]("The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies.").withRank(BPlusTask)
val trackInternalDependencies = settingKey[TrackLevel]("The level of tracking for the internal (inter-project) dependency.").withRank(BSetting)
val exportToInternal = settingKey[TrackLevel]("The level of tracking for this project by the internal callers.").withRank(BSetting)
val exportedProductJars = taskKey[Classpath]("Build products that go on the exported classpath as JARs.")
val exportedProductJarsIfMissing = taskKey[Classpath]("Build products that go on the exported classpath as JARs if missing.")
val exportedProductJarsNoTracking = taskKey[Classpath]("Just the exported classpath as JARs without triggering the compilation.")
val exportedPickles = taskKey[VirtualClasspath]("Build products that go on the exported compilation classpath as JARs. Note this is promise-blocked.").withRank(DTask)
val pickleProducts = taskKey[Seq[VirtualFile]]("Pickle JARs").withRank(DTask)
val internalDependencyAsJars = taskKey[Classpath]("The internal (inter-project) classpath as JARs.")
val dependencyClasspathAsJars = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies, all as JARs.")
val fullClasspathAsJars = taskKey[Classpath]("The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies, all as JARs.")
@ -357,6 +376,7 @@ object Keys {
val pushRemoteCacheConfiguration = taskKey[PublishConfiguration]("")
val pushRemoteCacheTo = settingKey[Option[Resolver]]("The resolver to publish remote cache to.")
val remoteCachePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.")
val usePipelining = settingKey[Boolean]("Use subproject pipelining for compilation.").withRank(BSetting)
val bspTargetIdentifier = settingKey[BuildTargetIdentifier]("Id for BSP build target.").withRank(DSetting)
val bspWorkspace = settingKey[Map[BuildTargetIdentifier, Scope]]("Mapping of BSP build targets to sbt scopes").withRank(DSetting)
@ -387,7 +407,7 @@ object Keys {
val csrExtraCredentials = taskKey[Seq[lmcoursier.credentials.Credentials]]("")
val csrPublications = taskKey[Seq[(lmcoursier.definitions.Configuration, lmcoursier.definitions.Publication)]]("")
val csrReconciliations = settingKey[Seq[(ModuleMatchers, Reconciliation)]]("Strategy to reconcile version conflicts.")
val internalConfigurationMap = settingKey[Configuration => Configuration]("Maps configurations to the actual configuration used to define the classpath.").withRank(CSetting)
val classpathConfiguration = taskKey[Configuration]("The configuration used to define the classpath.").withRank(CTask)
val ivyConfiguration = taskKey[IvyConfiguration]("General dependency management (Ivy) settings, such as the resolvers and paths to use.").withRank(DTask)
@ -457,7 +477,7 @@ object Keys {
val fullResolvers = taskKey[Seq[Resolver]]("Combines the project resolver, default resolvers, and user-defined resolvers.").withRank(CTask)
val otherResolvers = taskKey[Seq[Resolver]]("Resolvers not included in the main resolver chain, such as those in module configurations.").withRank(CSetting)
val scalaCompilerBridgeResolvers = taskKey[Seq[Resolver]]("Resolvers used to resolve compiler bridges.").withRank(CSetting)
val includePluginResolvers = settingKey[Boolean]("Include the resolvers from the metabuild.").withRank(CSetting)
val includePluginResolvers = settingKey[Boolean]("Include the resolvers from the metabuild.").withRank(CSetting)
val useJCenter = settingKey[Boolean]("Use JCenter as the default repository.").withRank(CSetting)
val moduleConfigurations = settingKey[Seq[ModuleConfiguration]]("Defines module configurations, which override resolvers on a per-module basis.").withRank(BMinusSetting)
val retrievePattern = settingKey[String]("Pattern used to retrieve managed dependencies to the current build.").withRank(DSetting)

View File

@ -971,22 +971,23 @@ object BuiltinCommands {
}
private[sbt] def waitCmd: Command =
Command.arb(_ => (ContinuousCommands.waitWatch: Parser[String]).examples()) { (s0, _) =>
Command.arb(
_ => ContinuousCommands.waitWatch.examples() ~> " ".examples() ~> matched(any.*).examples()
) { (s0, channel) =>
val exchange = StandardMain.exchange
if (exchange.channels.exists(ContinuousCommands.isInWatch)) {
val s1 = exchange.run(s0)
exchange.channels.foreach {
case c if ContinuousCommands.isPending(c) =>
case c => c.prompt(ConsolePromptEvent(s1))
}
val exec: Exec = getExec(s1, Duration.Inf)
val remaining: List[Exec] =
Exec(ContinuousCommands.waitWatch, None) ::
Exec(FailureWall, None) :: s1.remainingCommands
val newState = s1.copy(remainingCommands = exec +: remaining)
if (exec.commandLine.trim.isEmpty) newState
else newState.clearGlobalLog
} else s0
exchange.channelForName(channel) match {
case Some(c) if ContinuousCommands.isInWatch(s0, c) =>
c.prompt(ConsolePromptEvent(s0))
val s1 = exchange.run(s0)
val exec: Exec = getExec(s1, Duration.Inf)
val remaining: List[Exec] =
Exec(s"${ContinuousCommands.waitWatch} $channel", None) ::
Exec(FailureWall, None) :: s1.remainingCommands
val newState = s1.copy(remainingCommands = exec +: remaining)
if (exec.commandLine.trim.isEmpty) newState
else newState.clearGlobalLog
case _ => s0
}
}
private[sbt] def promptChannel = Command.arb(_ => reportParser(PromptChannel)) {

View File

@ -23,6 +23,7 @@ import sbt.util.Logger
import scala.annotation.tailrec
import scala.util.control.NonFatal
import sbt.internal.FastTrackCommands
object MainLoop {
@ -201,7 +202,18 @@ object MainLoop {
StandardMain.exchange.setState(progressState)
StandardMain.exchange.setExec(Some(exec))
StandardMain.exchange.unprompt(ConsoleUnpromptEvent(exec.source))
val newState = Command.process(exec.commandLine, progressState)
/*
* FastTrackCommands.evaluate can be significantly faster than Command.process because
* it avoids an expensive parsing step for internal commands that are easy to parse.
* Dropping (FastTrackCommands.evaluate ... getOrElse) should be functionally identical
* but slower.
*/
val newState = FastTrackCommands.evaluate(progressState, exec.commandLine) getOrElse
Command.process(exec.commandLine, progressState)
// Flush the terminal output after command evaluation to ensure that all output
// is displayed in the thin client before we report the command status.
val terminal = channelName.flatMap(exchange.channelForName(_).map(_.terminal))
terminal.foreach(_.flush())
if (exec.execId.fold(true)(!_.startsWith(networkExecPrefix)) &&
!exec.commandLine.startsWith(networkExecPrefix)) {
val doneEvent = ExecStatusEvent(

View File

@ -131,7 +131,7 @@ object Aggregation {
if (get(showSuccess)) {
if (get(showTiming)) {
val msg = timingString(start, stop, structure.data, currentRef)
if (success) log.success(msg) else log.error(msg)
if (success) log.success(msg) else if (Terminal.get.isSuccessEnabled) log.error(msg)
} else if (success)
log.success("")
}

View File

@ -0,0 +1,427 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import java.io.File
import java.util.LinkedHashSet
import sbt.SlashSyntax0._
import sbt.Keys._
import sbt.nio.Keys._
import sbt.nio.file.{ Glob, RecursiveGlob }
import sbt.Def.Initialize
import sbt.internal.inc.Analysis
import sbt.internal.inc.JavaInterfaceUtil._
import sbt.internal.util.{ Attributed, Dag, Settings }
import sbt.librarymanagement.{ Configuration, TrackLevel }
import sbt.librarymanagement.Configurations.names
import sbt.std.TaskExtra._
import sbt.util._
import scala.collection.JavaConverters._
import xsbti.compile.CompileAnalysis
private[sbt] object ClasspathImpl {
// Since we can't predict the path for pickleProduct,
// we can't reduce the track level.
def exportedPicklesTask: Initialize[Task[VirtualClasspath]] =
Def.task {
val module = projectID.value
val config = configuration.value
val products = pickleProducts.value
val analysis = compileEarly.value
val xs = products map { _ -> analysis }
for { (f, analysis) <- xs } yield APIMappings
.store(analyzed(f, analysis), apiURL.value)
.put(moduleID.key, module)
.put(configuration.key, config)
}
def trackedExportedProducts(track: TrackLevel): Initialize[Task[Classpath]] =
Def.task {
val _ = (packageBin / dynamicDependency).value
val art = (artifact in packageBin).value
val module = projectID.value
val config = configuration.value
for { (f, analysis) <- trackedExportedProductsImplTask(track).value } yield APIMappings
.store(analyzed(f, analysis), apiURL.value)
.put(artifact.key, art)
.put(moduleID.key, module)
.put(configuration.key, config)
}
def trackedExportedJarProducts(track: TrackLevel): Initialize[Task[Classpath]] =
Def.task {
val _ = (packageBin / dynamicDependency).value
val art = (artifact in packageBin).value
val module = projectID.value
val config = configuration.value
for { (f, analysis) <- trackedJarProductsImplTask(track).value } yield APIMappings
.store(analyzed(f, analysis), apiURL.value)
.put(artifact.key, art)
.put(moduleID.key, module)
.put(configuration.key, config)
}
private[this] def trackedExportedProductsImplTask(
track: TrackLevel
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
Def.taskDyn {
val _ = (packageBin / dynamicDependency).value
val useJars = exportJars.value
if (useJars) trackedJarProductsImplTask(track)
else trackedNonJarProductsImplTask(track)
}
private[this] def trackedNonJarProductsImplTask(
track: TrackLevel
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
Def.taskDyn {
val dirs = productDirectories.value
val view = fileTreeView.value
def containsClassFile(): Boolean =
view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).nonEmpty
TrackLevel.intersection(track, exportToInternal.value) match {
case TrackLevel.TrackAlways =>
Def.task {
products.value map { (_, compile.value) }
}
case TrackLevel.TrackIfMissing if !containsClassFile() =>
Def.task {
products.value map { (_, compile.value) }
}
case _ =>
Def.task {
val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty)
dirs.map(_ -> analysis)
}
}
}
private[this] def trackedJarProductsImplTask(
track: TrackLevel
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
Def.taskDyn {
val jar = (artifactPath in packageBin).value
TrackLevel.intersection(track, exportToInternal.value) match {
case TrackLevel.TrackAlways =>
Def.task {
Seq((packageBin.value, compile.value))
}
case TrackLevel.TrackIfMissing if !jar.exists =>
Def.task {
Seq((packageBin.value, compile.value))
}
case _ =>
Def.task {
val analysisOpt = previousCompile.value.analysis.toOption
Seq(jar) map { x =>
(
x,
if (analysisOpt.isDefined) analysisOpt.get
else Analysis.empty
)
}
}
}
}
def internalDependencyClasspathTask: Initialize[Task[Classpath]] = {
Def.taskDyn {
val _ = (
(exportedProductsNoTracking / transitiveClasspathDependency).value,
(exportedProductsIfMissing / transitiveClasspathDependency).value,
(exportedProducts / transitiveClasspathDependency).value,
(exportedProductJarsNoTracking / transitiveClasspathDependency).value,
(exportedProductJarsIfMissing / transitiveClasspathDependency).value,
(exportedProductJars / transitiveClasspathDependency).value
)
internalDependenciesImplTask(
thisProjectRef.value,
classpathConfiguration.value,
configuration.value,
settingsData.value,
buildDependencies.value,
trackInternalDependencies.value,
streams.value.log,
)
}
}
def internalDependenciesImplTask(
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
deps: BuildDependencies,
track: TrackLevel,
log: Logger
): Initialize[Task[Classpath]] =
Def.value {
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
exportedProductsNoTracking,
exportedProductsIfMissing,
exportedProducts
)
}
def internalDependencyPicklePathTask: Initialize[Task[VirtualClasspath]] = {
def implTask(
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
deps: BuildDependencies,
track: TrackLevel,
log: Logger
): Initialize[Task[VirtualClasspath]] =
Def.value {
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
exportedPickles,
exportedPickles,
exportedPickles
)
}
Def.taskDyn {
implTask(
thisProjectRef.value,
classpathConfiguration.value,
configuration.value,
settingsData.value,
buildDependencies.value,
TrackLevel.TrackAlways,
streams.value.log,
)
}
}
def internalDependencyJarsTask: Initialize[Task[Classpath]] =
Def.taskDyn {
internalDependencyJarsImplTask(
thisProjectRef.value,
classpathConfiguration.value,
configuration.value,
settingsData.value,
buildDependencies.value,
trackInternalDependencies.value,
streams.value.log,
)
}
private def internalDependencyJarsImplTask(
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
deps: BuildDependencies,
track: TrackLevel,
log: Logger
): Initialize[Task[Classpath]] =
Def.value {
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
exportedProductJarsNoTracking,
exportedProductJarsIfMissing,
exportedProductJars
)
}
def unmanagedDependenciesTask: Initialize[Task[Classpath]] =
Def.taskDyn {
unmanagedDependencies0(
thisProjectRef.value,
configuration.value,
settingsData.value,
buildDependencies.value,
streams.value.log
)
}
def unmanagedDependencies0(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
deps: BuildDependencies,
log: Logger
): Initialize[Task[Classpath]] =
Def.value {
interDependencies(
projectRef,
deps,
conf,
conf,
data,
TrackLevel.TrackAlways,
true,
log
)(
unmanagedJars,
unmanagedJars,
unmanagedJars
)
}
def unmanagedLibs(
dep: ResolvedReference,
conf: String,
data: Settings[Scope]
): Task[Classpath] =
getClasspath(unmanagedJars, dep, conf, data)
def interDependencies[A](
projectRef: ProjectRef,
deps: BuildDependencies,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
track: TrackLevel,
includeSelf: Boolean,
log: Logger
)(
noTracking: TaskKey[Seq[A]],
trackIfMissing: TaskKey[Seq[A]],
trackAlways: TaskKey[Seq[A]]
): Task[Seq[A]] = {
val interDepConfigs = interSort(projectRef, conf, data, deps) filter {
case (dep, c) =>
includeSelf || (dep != projectRef) || (conf.name != c && self.name != c)
}
val tasks = (new LinkedHashSet[Task[Seq[A]]]).asScala
for {
(dep, c) <- interDepConfigs
} {
tasks += (track match {
case TrackLevel.NoTracking =>
getClasspath(noTracking, dep, c, data)
case TrackLevel.TrackIfMissing =>
getClasspath(trackIfMissing, dep, c, data)
case TrackLevel.TrackAlways =>
getClasspath(trackAlways, dep, c, data)
})
}
(tasks.toSeq.join).map(_.flatten.distinct)
}
def analyzed[A](data: A, analysis: CompileAnalysis) =
Attributed.blank(data).put(Keys.analysis, analysis)
def interSort(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
deps: BuildDependencies
): Seq[(ProjectRef, String)] = {
val visited = (new LinkedHashSet[(ProjectRef, String)]).asScala
def visit(p: ProjectRef, c: Configuration): Unit = {
val applicableConfigs = allConfigs(c)
for {
ac <- applicableConfigs
} // add all configurations in this project
visited add (p -> ac.name)
val masterConfs = names(getConfigurations(projectRef, data).toVector)
for {
ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)
} {
val configurations = getConfigurations(dep, data)
val mapping =
mapped(confMapping, masterConfs, names(configurations.toVector), "compile", "*->compile")
// map master configuration 'c' and all extended configurations to the appropriate dependency configuration
for {
ac <- applicableConfigs
depConfName <- mapping(ac.name)
} {
for {
depConf <- confOpt(configurations, depConfName)
} if (!visited((dep, depConfName))) {
visit(dep, depConf)
}
}
}
}
visit(projectRef, conf)
visited.toSeq
}
def mapped(
confString: Option[String],
masterConfs: Seq[String],
depConfs: Seq[String],
default: String,
defaultMapping: String
): String => Seq[String] = {
lazy val defaultMap = parseMapping(defaultMapping, masterConfs, depConfs, _ :: Nil)
parseMapping(confString getOrElse default, masterConfs, depConfs, defaultMap)
}
def parseMapping(
confString: String,
masterConfs: Seq[String],
depConfs: Seq[String],
default: String => Seq[String]
): String => Seq[String] =
union(confString.split(";") map parseSingleMapping(masterConfs, depConfs, default))
def parseSingleMapping(
masterConfs: Seq[String],
depConfs: Seq[String],
default: String => Seq[String]
)(confString: String): String => Seq[String] = {
val ms: Seq[(String, Seq[String])] =
trim(confString.split("->", 2)) match {
case x :: Nil => for (a <- parseList(x, masterConfs)) yield (a, default(a))
case x :: y :: Nil =>
val target = parseList(y, depConfs);
for (a <- parseList(x, masterConfs)) yield (a, target)
case _ => sys.error("Invalid configuration '" + confString + "'") // shouldn't get here
}
val m = ms.toMap
s => m.getOrElse(s, Nil)
}
def union[A, B](maps: Seq[A => Seq[B]]): A => Seq[B] =
a => maps.foldLeft(Seq[B]()) { _ ++ _(a) } distinct;
def parseList(s: String, allConfs: Seq[String]): Seq[String] =
(trim(s split ",") flatMap replaceWildcard(allConfs)).distinct
def replaceWildcard(allConfs: Seq[String])(conf: String): Seq[String] = conf match {
case "" => Nil
case "*" => allConfs
case _ => conf :: Nil
}
private def trim(a: Array[String]): List[String] = a.toList.map(_.trim)
def allConfigs(conf: Configuration): Seq[Configuration] =
Dag.topologicalSort(conf)(_.extendsConfigs)
def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] =
(p / ivyConfigurations).get(data).getOrElse(Nil)
def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] =
configurations.find(_.name == conf)
def getClasspath[A](
key: TaskKey[Seq[A]],
dep: ResolvedReference,
conf: Configuration,
data: Settings[Scope]
): Task[Seq[A]] = getClasspath(key, dep, conf.name, data)
def getClasspath[A](
key: TaskKey[Seq[A]],
dep: ResolvedReference,
conf: String,
data: Settings[Scope]
): Task[Seq[A]] =
(dep / ConfigKey(conf) / key).get(data) match {
case Some(x) => x
case _ => constant(Nil)
}
}

View File

@ -170,17 +170,15 @@ private[sbt] final class CommandExchange {
currentExec.filter(_.source.map(_.channelName) == Some(c.name)).foreach { e =>
Util.ignoreResult(NetworkChannel.cancel(e.execId, e.execId.getOrElse("0")))
}
if (ContinuousCommands.isInWatch(c)) {
try commandQueue.put(Exec(s"${ContinuousCommands.stopWatch} ${c.name}", None))
catch { case _: InterruptedException => }
}
try commandQueue.put(Exec(s"${ContinuousCommands.stopWatch} ${c.name}", None))
catch { case _: InterruptedException => }
}
private[this] def mkAskUser(
name: String,
): (State, CommandChannel) => UITask = { (state, channel) =>
ContinuousCommands
.watchUITaskFor(channel)
.watchUITaskFor(state, channel)
.getOrElse(new UITask.AskUserTask(state, channel))
}
@ -353,8 +351,8 @@ private[sbt] final class CommandExchange {
def prompt(event: ConsolePromptEvent): Unit = {
currentExecRef.set(null)
channels.foreach {
case c if ContinuousCommands.isInWatch(c) =>
case c => c.prompt(event)
case c if ContinuousCommands.isInWatch(lastState.get, c) =>
case c => c.prompt(event)
}
}
def unprompt(event: ConsoleUnpromptEvent): Unit = channels.foreach(_.unprompt(event))
@ -459,10 +457,9 @@ private[sbt] final class CommandExchange {
Option(currentExecRef.get).foreach(cancel)
mt.channel.prompt(ConsolePromptEvent(lastState.get))
case t if t.startsWith(ContinuousCommands.stopWatch) =>
ContinuousCommands.stopWatchImpl(mt.channel.name)
mt.channel match {
case c: NetworkChannel if !c.isInteractive => exit(mt)
case _ => mt.channel.prompt(ConsolePromptEvent(lastState.get))
case _ =>
}
commandQueue.add(Exec(t, None, None))
case `TerminateAction` => exit(mt)

View File

@ -108,8 +108,8 @@ private[sbt] object Continuous extends DeprecatedContinuous {
case Some(c) => s -> c
case None => StandardMain.exchange.run(s) -> ConsoleChannel.defaultName
}
ContinuousCommands.setupWatchState(channel, initialCount, commands, s1)
s"${ContinuousCommands.runWatch} $channel" :: s1
val ws = ContinuousCommands.setupWatchState(channel, initialCount, commands, s1)
s"${ContinuousCommands.runWatch} $channel" :: ws
}
@deprecated("The input task version of watch is no longer available", "1.4.0")
@ -1056,7 +1056,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
val commands: Seq[String],
beforeCommandImpl: (State, mutable.Set[DynamicInput]) => State,
val afterCommand: State => State,
val afterWatch: () => Unit,
val afterWatch: State => State,
val callbacks: Callbacks,
val dynamicInputs: mutable.Set[DynamicInput],
val pending: Boolean,
@ -1102,7 +1102,8 @@ private[sbt] object ContinuousCommands {
"",
Int.MaxValue
)
private[this] val watchStates = new ConcurrentHashMap[String, ContinuousState]
private[this] val watchStates =
AttributeKey[Map[String, ContinuousState]]("sbt-watch-states", Int.MaxValue)
private[sbt] val runWatch = networkExecPrefix + "runWatch"
private[sbt] val preWatch = networkExecPrefix + "preWatch"
private[sbt] val postWatch = networkExecPrefix + "postWatch"
@ -1120,10 +1121,10 @@ private[sbt] object ContinuousCommands {
"",
Int.MaxValue
)
private[sbt] val setupWatchState: (String, Int, Seq[String], State) => Unit =
private[sbt] val setupWatchState: (String, Int, Seq[String], State) => State =
(channelName, count, commands, state) => {
watchStates.get(channelName) match {
case null =>
state.get(watchStates).flatMap(_.get(channelName)) match {
case None =>
val extracted = Project.extract(state)
val repo = state.get(globalFileTreeRepository) match {
case Some(r) => localRepo(r)
@ -1161,27 +1162,37 @@ private[sbt] object ContinuousCommands {
stateWithCache.put(Continuous.DynamicInputs, dynamicInputs)
},
afterCommand = state => {
watchStates.get(channelName) match {
case null =>
case ws => watchStates.put(channelName, ws.incremented)
val newWatchState = state.get(watchStates) match {
case None => state
case Some(ws) =>
ws.get(channelName) match {
case None => state
case Some(cs) => state.put(watchStates, ws + (channelName -> cs.incremented))
}
}
val restoredState = state.get(stashedRepo) match {
val restoredState = newWatchState.get(stashedRepo) match {
case None => throw new IllegalStateException(s"No stashed repository for $state")
case Some(r) => state.put(globalFileTreeRepository, r)
case Some(r) => newWatchState.put(globalFileTreeRepository, r)
}
restoredState.remove(persistentFileStampCache).remove(Continuous.DynamicInputs)
},
afterWatch = () => {
watchStates.remove(channelName)
afterWatch = state => {
LogExchange.unbindLoggerAppenders(channelName + "-watch")
repo.close()
state.get(watchStates) match {
case None => state
case Some(ws) => state.put(watchStates, ws - channelName)
}
},
callbacks = cb,
dynamicInputs = dynamicInputs,
pending = false,
)
Util.ignoreResult(watchStates.put(channelName, s))
case cs =>
state.get(watchStates) match {
case None => state.put(watchStates, Map(channelName -> s))
case Some(ws) => state.put(watchStates, ws + (channelName -> s))
}
case Some(cs) =>
val cmd = cs.commands.mkString("; ")
val msg =
s"Tried to start new watch while channel, '$channelName', was already watching '$cmd'"
@ -1194,28 +1205,26 @@ private[sbt] object ContinuousCommands {
Command.arb { state =>
(cmdParser(name) ~> channelParser).map(channel => () => updateState(channel, state))
} { case (_, newState) => newState() }
private[this] val runWatchCommand = watchCommand(runWatch) { (channel, state) =>
watchStates.get(channel) match {
case null => state
case cs =>
private[sbt] val runWatchCommand = watchCommand(runWatch) { (channel, state) =>
state.get(watchStates).flatMap(_.get(channel)) match {
case None => state
case Some(cs) =>
val pre = StashOnFailure :: s"$SetTerminal $channel" :: s"$preWatch $channel" :: Nil
val post = FailureWall :: PopOnFailure :: s"$SetTerminal ${ConsoleChannel.defaultName}" ::
s"$postWatch $channel" :: waitWatch :: Nil
s"$postWatch $channel" :: s"$waitWatch $channel" :: Nil
pre ::: cs.commands.toList ::: post ::: state
}
}
private[sbt] def watchUITaskFor(channel: CommandChannel): Option[UITask] =
watchStates.get(channel.name) match {
case null => None
case cs => Some(new WatchUITask(channel, cs))
}
private[sbt] def isInWatch(channel: CommandChannel): Boolean =
watchStates.get(channel.name) != null
private[sbt] def isPending(channel: CommandChannel): Boolean =
Option(watchStates.get(channel.name)).fold(false)(_.pending)
private[sbt] def watchUITaskFor(state: State, channel: CommandChannel): Option[UITask] =
state.get(watchStates).flatMap(_.get(channel.name)).map(new WatchUITask(channel, _, state))
private[sbt] def isInWatch(state: State, channel: CommandChannel): Boolean =
state.get(watchStates).exists(_.contains(channel.name))
private[sbt] def isPending(state: State, channel: CommandChannel): Boolean =
state.get(watchStates).exists(_.get(channel.name).exists(_.pending))
private[this] class WatchUITask(
override private[sbt] val channel: CommandChannel,
cs: ContinuousState,
state: State
) extends Thread(s"sbt-${channel.name}-watch-ui-thread")
with UITask {
override private[sbt] def reader: UITask.Reader = () => {
@ -1229,8 +1238,12 @@ private[sbt] object ContinuousCommands {
recursive = false
)
}
val ws = watchState(channel.name)
watchStates.put(channel.name, ws.withPending(true))
val ws = state.get(watchStates) match {
case None => throw new IllegalStateException("no watch states")
case Some(ws) =>
ws.get(channel.name)
.getOrElse(throw new IllegalStateException(s"no watch state for ${channel.name}"))
}
exitAction match {
// Use a Left so that the client can immediately exit watch via <enter>
case Watch.CancelWatch => Left(s"$stopWatch ${channel.name}")
@ -1248,33 +1261,43 @@ private[sbt] object ContinuousCommands {
}
}
@inline
private[this] def watchState(channel: String): ContinuousState = watchStates.get(channel) match {
case null => throw new IllegalStateException(s"No watch state for $channel")
case s => s
}
private[this] def watchState(state: State, channel: String): ContinuousState =
state.get(watchStates).flatMap(_.get(channel)) match {
case None => throw new IllegalStateException(s"no watch state for $channel")
case Some(s) => s
}
private[this] val preWatchCommand = watchCommand(preWatch) { (channel, state) =>
StandardMain.exchange.channelForName(channel).foreach(_.terminal.setPrompt(Prompt.Watch))
watchState(channel).beforeCommand(state)
private[sbt] val preWatchCommand = watchCommand(preWatch) { (channel, state) =>
watchState(state, channel).beforeCommand(state)
}
private[this] val postWatchCommand = watchCommand(postWatch) { (channel, state) =>
StandardMain.exchange.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channel))))
val ws = watchState(channel)
watchStates.put(channel, ws.withPending(false))
ws.afterCommand(state)
private[sbt] val postWatchCommand = watchCommand(postWatch) { (channel, state) =>
val cs = watchState(state, channel)
StandardMain.exchange.channelForName(channel).foreach { c =>
c.terminal.setPrompt(Prompt.Watch)
c.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channel))))
}
val postState = state.get(watchStates) match {
case None => state
case Some(ws) => state.put(watchStates, ws + (channel -> cs.withPending(false)))
}
cs.afterCommand(postState)
}
private[this] val stopWatchCommand = watchCommand(stopWatch) { (channel, state) =>
stopWatchImpl(channel)
state
}
private[sbt] def stopWatchImpl(channelName: String): Unit = {
StandardMain.exchange.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channelName))))
Option(watchStates.get(channelName)).foreach { ws =>
ws.afterWatch()
ws.callbacks.onExit()
private[sbt] val stopWatchCommand = watchCommand(stopWatch) { (channel, state) =>
state.get(watchStates).flatMap(_.get(channel)) match {
case Some(cs) =>
val afterWatchState = cs.afterWatch(state)
cs.callbacks.onExit()
StandardMain.exchange
.channelForName(channel)
.foreach(_.unprompt(ConsoleUnpromptEvent(Some(CommandSource(channel)))))
afterWatchState.get(watchStates) match {
case None => afterWatchState
case Some(w) => afterWatchState.put(watchStates, w - channel)
}
case _ => state
}
}
private[this] val failWatchCommand = watchCommand(failWatch) { (channel, state) =>
private[sbt] val failWatchCommand = watchCommand(failWatch) { (channel, state) =>
state.fail
}
/*

View File

@ -0,0 +1,53 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import BasicCommandStrings._
import BasicCommands._
import BuiltinCommands.{ setTerminalCommand, shell, waitCmd }
import ContinuousCommands._
import sbt.internal.util.complete.Parser
/** This is used to speed up command parsing. */
private[sbt] object FastTrackCommands {
private def fromCommand(
cmd: String,
command: Command,
arguments: Boolean = true,
): (State, String) => Option[State] =
(s, c) =>
Parser.parse(if (arguments) c else "", command.parser(s)) match {
case Right(newState) => Some(newState())
case l => None
}
private val commands = Map[String, (State, String) => Option[State]](
FailureWall -> { case (s, c) => if (c == FailureWall) Some(s) else None },
StashOnFailure -> fromCommand(StashOnFailure, stashOnFailure, arguments = false),
PopOnFailure -> fromCommand(PopOnFailure, popOnFailure, arguments = false),
Shell -> fromCommand(Shell, shell),
SetTerminal -> fromCommand(SetTerminal, setTerminalCommand),
failWatch -> fromCommand(failWatch, failWatchCommand),
preWatch -> fromCommand(preWatch, preWatchCommand),
postWatch -> fromCommand(postWatch, postWatchCommand),
runWatch -> fromCommand(runWatch, runWatchCommand),
stopWatch -> fromCommand(stopWatch, stopWatchCommand),
waitWatch -> fromCommand(waitWatch, waitCmd),
)
private[sbt] def evaluate(state: State, cmd: String): Option[State] = {
cmd.trim.split(" ") match {
case Array(h, _*) =>
commands.get(h) match {
case Some(command) => command(state, cmd)
case _ => None
}
case _ => None
}
}
}

View File

@ -116,6 +116,7 @@ object SysProp {
def banner: Boolean = getOrTrue("sbt.banner")
def turbo: Boolean = getOrFalse("sbt.turbo")
def pipelining: Boolean = getOrFalse("sbt.pipelining")
def taskTimings: Boolean = getOrFalse("sbt.task.timings")
def taskTimingsOnShutdown: Boolean = getOrFalse("sbt.task.timings.on.shutdown")

View File

@ -0,0 +1,69 @@
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package internal
import java.util.concurrent.ConcurrentHashMap
import sbt.internal.inc.Stamper
import xsbti.{ FileConverter, VirtualFile, VirtualFileRef }
import xsbti.compile.analysis.{ Stamp => XStamp }
/**
* Cache based on path and its stamp.
*/
sealed trait VirtualFileValueCache[A] {
def clear(): Unit
def get: VirtualFile => A
}
object VirtualFileValueCache {
def apply[A](converter: FileConverter)(f: VirtualFile => A): VirtualFileValueCache[A] = {
import collection.mutable.{ HashMap, Map }
val stampCache: Map[VirtualFileRef, (Long, XStamp)] = new HashMap
make(
Stamper.timeWrap(stampCache, converter, {
case (vf: VirtualFile) => Stamper.forContentHash(vf)
})
)(f)
}
def make[A](stamp: VirtualFile => XStamp)(f: VirtualFile => A): VirtualFileValueCache[A] =
new VirtualFileValueCache0[A](stamp, f)
}
private[this] final class VirtualFileValueCache0[A](
getStamp: VirtualFile => XStamp,
make: VirtualFile => A
)(
implicit equiv: Equiv[XStamp]
) extends VirtualFileValueCache[A] {
private[this] val backing = new ConcurrentHashMap[VirtualFile, VirtualFileCache]
def clear(): Unit = backing.clear()
def get = file => {
val ifAbsent = new VirtualFileCache(file)
val cache = backing.putIfAbsent(file, ifAbsent)
(if (cache eq null) ifAbsent else cache).get()
}
private[this] final class VirtualFileCache(file: VirtualFile) {
private[this] var stampedValue: Option[(XStamp, A)] = None
def get(): A = synchronized {
val latest = getStamp(file)
stampedValue match {
case Some((stamp, value)) if (equiv.equiv(latest, stamp)) => value
case _ => update(latest)
}
}
private[this] def update(stamp: XStamp): A = {
val value = make(file)
stampedValue = Some((stamp, value))
value
}
}
}

View File

@ -149,7 +149,7 @@ final class NetworkChannel(
protected def authOptions: Set[ServerAuthentication] = auth
override def mkUIThread: (State, CommandChannel) => UITask = (state, command) => {
if (interactive.get || ContinuousCommands.isInWatch(this)) mkUIThreadImpl(state, command)
if (interactive.get || ContinuousCommands.isInWatch(state, this)) mkUIThreadImpl(state, command)
else
new UITask {
override private[sbt] def channel = NetworkChannel.this
@ -653,6 +653,13 @@ final class NetworkChannel(
import sjsonnew.BasicJsonProtocol._
import scala.collection.JavaConverters._
private[this] val outputBuffer = new LinkedBlockingQueue[Byte]
private[this] val flushFuture = new AtomicReference[java.util.concurrent.Future[_]]
private[this] def doFlush()() = {
val list = new java.util.ArrayList[Byte]
outputBuffer.synchronized(outputBuffer.drainTo(list))
if (!list.isEmpty) jsonRpcNotify(Serialization.systemOut, list.asScala.toSeq)
}
private[this] lazy val outputStream: OutputStream with AutoCloseable = new OutputStream
with AutoCloseable {
/*
@ -670,28 +677,21 @@ final class NetworkChannel(
Executors.newSingleThreadScheduledExecutor(
r => new Thread(r, s"$name-output-buffer-timer-thread")
)
private[this] val buffer = new LinkedBlockingQueue[Byte]
private[this] val future = new AtomicReference[java.util.concurrent.Future[_]]
private[this] def doFlush()() = {
val list = new java.util.ArrayList[Byte]
buffer.synchronized(buffer.drainTo(list))
if (!list.isEmpty) jsonRpcNotify(Serialization.systemOut, list.asScala.toSeq)
}
override def close(): Unit = {
Util.ignoreResult(executor.shutdownNow())
doFlush()
}
override def write(b: Int): Unit = buffer.synchronized {
buffer.put(b.toByte)
override def write(b: Int): Unit = outputBuffer.synchronized {
outputBuffer.put(b.toByte)
}
override def flush(): Unit = {
future.get match {
flushFuture.get match {
case null =>
try {
future.set(
flushFuture.set(
executor.schedule(
(() => {
future.set(null)
flushFuture.set(null)
doFlush()
}): Runnable,
20,
@ -702,8 +702,8 @@ final class NetworkChannel(
case f =>
}
}
override def write(b: Array[Byte]): Unit = buffer.synchronized {
b.foreach(buffer.put)
override def write(b: Array[Byte]): Unit = outputBuffer.synchronized {
b.foreach(outputBuffer.put)
}
override def write(b: Array[Byte], off: Int, len: Int): Unit = {
write(java.util.Arrays.copyOfRange(b, off, off + len))
@ -789,7 +789,8 @@ final class NetworkChannel(
override def isAnsiSupported: Boolean = getProperty(_.isAnsiSupported, false).getOrElse(false)
override def isEchoEnabled: Boolean = waitForPending(_.isEchoEnabled)
override def isSuccessEnabled: Boolean =
prompt != Prompt.Batch || ContinuousCommands.isInWatch(NetworkChannel.this)
prompt != Prompt.Batch ||
StandardMain.exchange.withState(ContinuousCommands.isInWatch(_, NetworkChannel.this))
override lazy val isColorEnabled: Boolean = waitForPending(_.isColorEnabled)
override lazy val isSupershellEnabled: Boolean = waitForPending(_.isSupershellEnabled)
getProperties(false)
@ -880,6 +881,7 @@ final class NetworkChannel(
catch { case _: InterruptedException => }
}
override def flush(): Unit = doFlush()
override def toString: String = s"NetworkTerminal($name)"
override def close(): Unit = if (closed.compareAndSet(false, true)) {
val threads = blockedThreads.synchronized {

View File

@ -14,7 +14,7 @@ object Dependencies {
private val ioVersion = nightlyVersion.getOrElse("1.4.0-M6")
private val lmVersion =
sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("1.4.0-M2")
val zincVersion = nightlyVersion.getOrElse("1.4.0-M7")
val zincVersion = nightlyVersion.getOrElse("1.4.0-M8")
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion

View File

@ -0,0 +1,13 @@
ThisBuild / scalaVersion := "2.13.3"
ThisBuild / usePipelining := true
lazy val root = (project in file("."))
.aggregate(dep, use)
.settings(
name := "pipelining Java",
)
lazy val dep = project
lazy val use = project
.dependsOn(dep)

View File

@ -0,0 +1,2 @@
public class Break {
}

View File

@ -0,0 +1,3 @@
public class A {
public static int x = 3;
}

View File

@ -0,0 +1,5 @@
> use/compile
$ delete dep/A.java
$ copy-file changes/Break.java dep/Break.java
-> use/compile

View File

@ -0,0 +1,3 @@
public class B {
public static int y = A.x;
}

View File

@ -0,0 +1,22 @@
ThisBuild / scalaVersion := "2.13.3"
ThisBuild / usePipelining := true
lazy val root = (project in file("."))
.aggregate(dep, use)
.settings(
name := "pipelining basics",
)
lazy val dep = project
lazy val use = project
.dependsOn(dep)
.settings(
TaskKey[Unit]("checkPickle") := {
val s = streams.value
val x = (dep / Compile / compile).value
val picklePath = (Compile / internalDependencyPicklePath).value
assert(picklePath.size == 1 &&
picklePath.head.data.name == "dep_2.13-0.1.0-SNAPSHOT.jar", s"picklePath = ${picklePath}")
},
)

View File

@ -0,0 +1,3 @@
package example
object Break

View File

@ -0,0 +1,5 @@
package example
object A {
val x = 3
}

View File

@ -0,0 +1,9 @@
> dep/compile
> use/checkPickle
> compile
# making subproject dep should trigger failure
$ copy-file changes/Break.scala dep/A.scala
-> compile

View File

@ -0,0 +1,5 @@
package example
object B {
val y = A.x
}

View File

@ -1,7 +1,17 @@
import sbt.legacy.sources.Build._
Global / watchSources += new sbt.internal.io.Source(baseDirectory.value, "global.txt", NothingFilter, false)
val setStringValue = inputKey[Unit]("set a global string to a value")
val checkStringValue = inputKey[Unit]("check the value of a global")
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
IO.write(file(stringFile), string)
}
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed
assert(IO.read(file(stringFile)) == string)
}
watchSources in setStringValue += new sbt.internal.io.Source(baseDirectory.value, "foo.txt", NothingFilter, false)
setStringValue := setStringValueImpl.evaluated

View File

@ -1,17 +0,0 @@
package sbt.legacy.sources
import sbt._
import Keys._
object Build {
val setStringValue = inputKey[Unit]("set a global string to a value")
val checkStringValue = inputKey[Unit]("check the value of a global")
def setStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed.map(_.trim)
IO.write(file(stringFile), string)
}
def checkStringValueImpl: Def.Initialize[InputTask[Unit]] = Def.inputTask {
val Seq(stringFile, string) = Def.spaceDelimited().parsed
assert(IO.read(file(stringFile)) == string)
}
}

View File

@ -93,11 +93,17 @@ object ClientTest extends AbstractServerTest {
"compileAnalysisFile",
"compileAnalysisFilename",
"compileAnalysisTargetRoot",
"compileEarly",
"compileIncSetup",
"compileIncremental",
"compileJava",
"compileOutputs",
"compileProgress",
"compileScalaBackend",
"compileSplit",
"compilers",
)
assert(complete("compi") == expected)
}
test("testOnly completions") { _ =>

View File

@ -66,7 +66,7 @@ object EventsTest extends AbstractServerTest {
})
}
/* This test is timing out.
/* This test is timing out.
test("cancel on-going task with string id") { _ =>
import sbt.Exec
val id = Exec.newExecId
@ -84,5 +84,5 @@ object EventsTest extends AbstractServerTest {
s contains """"result":{"status":"Task cancelled""""
})
}
*/
*/
}