mirror of https://github.com/sbt/sbt.git
Merge branch 'develop' into parser-fix
This commit is contained in:
commit
556a9384f3
|
|
@ -5,6 +5,7 @@ import com.typesafe.tools.mima.core._, ProblemFilters._
|
|||
import local.Scripted
|
||||
import scala.xml.{ Node => XmlNode, NodeSeq => XmlNodeSeq, _ }
|
||||
import scala.xml.transform.{ RewriteRule, RuleTransformer }
|
||||
import scala.util.Try
|
||||
|
||||
ThisBuild / version := {
|
||||
val v = "1.3.0-SNAPSHOT"
|
||||
|
|
@ -402,7 +403,9 @@ lazy val scriptedSbtReduxProj = (project in file("scripted-sbt-redux"))
|
|||
val extDepsCp = (externalDependencyClasspath in Compile in LocalProject("sbtProj")).value
|
||||
val cpStrings = (mainClassDir +: testClassDir +: classDirs) ++ extDepsCp.files map (_.toString)
|
||||
val file = (resourceManaged in Compile).value / "RunFromSource.classpath"
|
||||
IO.writeLines(file, cpStrings)
|
||||
if (!file.exists || Try(IO.readLines(file)).getOrElse(Nil).toSet != cpStrings.toSet) {
|
||||
IO.writeLines(file, cpStrings)
|
||||
}
|
||||
List(file)
|
||||
},
|
||||
mimaSettings,
|
||||
|
|
@ -564,7 +567,7 @@ lazy val commandProj = (project in file("main-command"))
|
|||
)
|
||||
|
||||
// The core macro project defines the main logic of the DSL, abstracted
|
||||
// away from several sbt implementators (tasks, settings, et cetera).
|
||||
// away from several sbt implementors (tasks, settings, et cetera).
|
||||
lazy val coreMacrosProj = (project in file("core-macros"))
|
||||
.dependsOn(collectionProj)
|
||||
.settings(
|
||||
|
|
|
|||
|
|
@ -9,8 +9,10 @@ package sbt.internal.classpath;
|
|||
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
public class WrappedLoader extends URLClassLoader {
|
||||
private final AtomicBoolean invalidated = new AtomicBoolean(false);
|
||||
static {
|
||||
ClassLoader.registerAsParallelCapable();
|
||||
}
|
||||
|
|
@ -19,6 +21,14 @@ public class WrappedLoader extends URLClassLoader {
|
|||
super(new URL[] {}, parent);
|
||||
}
|
||||
|
||||
void invalidate() {
|
||||
invalidated.set(true);
|
||||
}
|
||||
|
||||
boolean invalidated() {
|
||||
return invalidated.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL[] getURLs() {
|
||||
final ClassLoader parent = getParent();
|
||||
|
|
|
|||
|
|
@ -21,8 +21,6 @@ final case class Reboot(
|
|||
def arguments = argsList.toArray
|
||||
}
|
||||
|
||||
private[sbt] case object Reload extends Exception(null, null, false, false)
|
||||
|
||||
final case class ApplicationID(
|
||||
groupID: String,
|
||||
name: String,
|
||||
|
|
|
|||
|
|
@ -71,18 +71,26 @@ private[sbt] class ClassLoaderCache(
|
|||
new java.util.concurrent.ConcurrentHashMap[Key, Reference[ClassLoader]]()
|
||||
private[this] val referenceQueue = new ReferenceQueue[ClassLoader]
|
||||
|
||||
private[this] def closeExpiredLoaders(): Unit = {
|
||||
val toClose = lock.synchronized(delegate.asScala.groupBy(_._1.files.toSet).flatMap {
|
||||
private[this] def clearExpiredLoaders(): Unit = lock.synchronized {
|
||||
val clear = (k: Key, ref: Reference[ClassLoader]) => {
|
||||
ref.get() match {
|
||||
case w: WrappedLoader => w.invalidate()
|
||||
case _ =>
|
||||
}
|
||||
delegate.remove(k)
|
||||
()
|
||||
}
|
||||
def isInvalidated(classLoader: ClassLoader): Boolean = classLoader match {
|
||||
case w: WrappedLoader => w.invalidated()
|
||||
case _ => false
|
||||
}
|
||||
delegate.asScala.groupBy { case (k, _) => k.parent -> k.files.toSet }.foreach {
|
||||
case (_, pairs) if pairs.size > 1 =>
|
||||
val max = pairs.maxBy(_._1.maxStamp)._1
|
||||
pairs.filterNot(_._1 == max).flatMap {
|
||||
case (k, v) =>
|
||||
delegate.remove(k)
|
||||
Option(v.get)
|
||||
}
|
||||
case _ => Nil
|
||||
})
|
||||
toClose.foreach(close)
|
||||
val max = pairs.map(_._1.maxStamp).max
|
||||
pairs.foreach { case (k, v) => if (k.maxStamp != max) clear(k, v) }
|
||||
case _ =>
|
||||
}
|
||||
delegate.forEach((k, v) => if (isInvalidated(k.parent)) clear(k, v))
|
||||
}
|
||||
private[this] class CleanupThread(private[this] val id: Int)
|
||||
extends Thread(s"classloader-cache-cleanup-$id") {
|
||||
|
|
@ -97,7 +105,7 @@ private[sbt] class ClassLoaderCache(
|
|||
delegate.remove(key)
|
||||
case _ =>
|
||||
}
|
||||
closeExpiredLoaders()
|
||||
clearExpiredLoaders()
|
||||
false
|
||||
} catch {
|
||||
case _: InterruptedException => true
|
||||
|
|
@ -178,7 +186,7 @@ private[sbt] class ClassLoaderCache(
|
|||
val ref = mkReference(key, f())
|
||||
val loader = ref.get
|
||||
delegate.put(key, ref)
|
||||
closeExpiredLoaders()
|
||||
clearExpiredLoaders()
|
||||
loader
|
||||
}
|
||||
lock.synchronized {
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal;
|
||||
|
||||
import java.io.File;
|
||||
import scala.collection.immutable.Map;
|
||||
import scala.collection.Seq;
|
||||
|
||||
final class ResourceLoader extends ResourceLoaderImpl {
|
||||
ResourceLoader(
|
||||
final Seq<File> classpath, final ClassLoader parent, final Map<String, String> resources) {
|
||||
super(classpath, parent, resources);
|
||||
}
|
||||
|
||||
static {
|
||||
ClassLoader.registerAsParallelCapable();
|
||||
}
|
||||
}
|
||||
|
|
@ -346,7 +346,7 @@ object EvaluateTask {
|
|||
ExceptionCategory(ex) match {
|
||||
case AlreadyHandled => ()
|
||||
case m: MessageOnly => if (msg.isEmpty) log.error(m.message)
|
||||
case f: Full => if (f.exception != Reload) log.trace(f.exception)
|
||||
case f: Full => log.trace(f.exception)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -354,7 +354,7 @@ object EvaluateTask {
|
|||
val msgString = (msg.toList ++ ex.toList.map(ErrorHandling.reducedToString)).mkString("\n\t")
|
||||
val log = getStreams(key, streams).log
|
||||
val display = contextDisplay(state, ConsoleAppender.formatEnabledInEnv)
|
||||
if (!ex.contains(Reload)) log.error("(" + display.show(key) + ") " + msgString)
|
||||
log.error("(" + display.show(key) + ") " + msgString)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -144,10 +144,7 @@ object MainLoop {
|
|||
case Right(s) => s
|
||||
case Left(t: xsbti.FullReload) => throw t
|
||||
case Left(t: RebootCurrent) => throw t
|
||||
case Left(Reload) =>
|
||||
val remaining = state.currentCommand.toList ::: state.remainingCommands
|
||||
state.copy(remainingCommands = Exec("reload", None, None) :: remaining)
|
||||
case Left(t) => state.handleError(t)
|
||||
case Left(t) => state.handleError(t)
|
||||
}
|
||||
} catch {
|
||||
case oom: OutOfMemoryError if oom.getMessage.contains("Metaspace") =>
|
||||
|
|
|
|||
|
|
@ -9,17 +9,14 @@ package sbt
|
|||
package internal
|
||||
|
||||
import java.text.DateFormat
|
||||
import java.util.{ Collections, IdentityHashMap }
|
||||
|
||||
import Def.ScopedKey
|
||||
import Keys.{ showSuccess, showTiming, timingFormat }
|
||||
import sbt.Def.ScopedKey
|
||||
import sbt.Keys.{ showSuccess, showTiming, timingFormat }
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util.{ AttributeKey, Dag, HList, Settings, Util }
|
||||
import sbt.internal.util.complete.Parser.{ failure, seq, success }
|
||||
import sbt.internal.util._
|
||||
import sbt.std.Transform.DummyTaskMap
|
||||
import sbt.util.{ Logger, Show }
|
||||
import Parser.{ failure, seq, success }
|
||||
import std.Transform.DummyTaskMap
|
||||
|
||||
import scala.annotation.tailrec
|
||||
|
||||
sealed trait Aggregation
|
||||
object Aggregation {
|
||||
|
|
@ -115,25 +112,7 @@ object Aggregation {
|
|||
)(implicit display: Show[ScopedKey[_]]): State = {
|
||||
val complete = timedRun[T](s, ts, extra)
|
||||
showRun(complete, show)
|
||||
/*
|
||||
* In the first implementation, we tried to use Set[Incomplete] for visited. It had very poor
|
||||
* performance because hashCode can be expensive on Incomplete -- especially when the
|
||||
* Incomplete has many instances in the causes field.
|
||||
*/
|
||||
lazy val visited = Collections
|
||||
.newSetFromMap[Incomplete](new IdentityHashMap[Incomplete, java.lang.Boolean])
|
||||
@tailrec def findReload(incomplete: Incomplete, remaining: List[Incomplete]): Boolean = {
|
||||
visited.add(incomplete)
|
||||
incomplete.directCause.contains(Reload) || ((remaining ::: incomplete.causes.toList)
|
||||
.filterNot(visited.contains) match {
|
||||
case Nil => false
|
||||
case h :: tail => findReload(h, tail.filterNot(visited.contains))
|
||||
})
|
||||
}
|
||||
complete.results match {
|
||||
case Inc(i) if findReload(i, i.causes.toList) =>
|
||||
val remaining = s.currentCommand.toList ::: s.remainingCommands
|
||||
complete.state.copy(remainingCommands = Exec("reload", None, None) :: remaining)
|
||||
case Inc(i) => complete.state.handleError(i)
|
||||
case Value(_) => complete.state
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,13 +35,11 @@ private[sbt] object ClassLoaders {
|
|||
if (si.isManagedVersion) rawCP
|
||||
else si.libraryJars.map(j => j -> IO.getModifiedTimeOrZero(j)).toSeq ++ rawCP
|
||||
val exclude = dependencyJars(exportedProducts).value.toSet ++ si.libraryJars
|
||||
val resourceCP = modifiedTimes((outputFileStamps in resources).value)
|
||||
buildLayers(
|
||||
strategy = classLoaderLayeringStrategy.value,
|
||||
si = si,
|
||||
fullCP = fullCP,
|
||||
resourceCP = resourceCP,
|
||||
allDependencies = dependencyJars(dependencyClasspath).value.filterNot(exclude),
|
||||
allDependenciesSet = dependencyJars(dependencyClasspath).value.filterNot(exclude).toSet,
|
||||
cache = extendedClassLoaderCache.value,
|
||||
resources = ClasspathUtilities.createClasspathResources(fullCP.map(_._1), si),
|
||||
tmp = IO.createUniqueDirectory(taskTemporaryDirectory.value),
|
||||
|
|
@ -55,7 +53,6 @@ private[sbt] object ClassLoaders {
|
|||
val s = streams.value
|
||||
val opts = forkOptions.value
|
||||
val options = javaOptions.value
|
||||
val resourceCP = modifiedTimes((outputFileStamps in resources).value)
|
||||
if (fork.value) {
|
||||
s.log.debug(s"javaOptions: $options")
|
||||
Def.task(new ForkRun(opts))
|
||||
|
|
@ -85,8 +82,7 @@ private[sbt] object ClassLoaders {
|
|||
strategy = classLoaderLayeringStrategy.value: @sbtUnchecked,
|
||||
si = instance,
|
||||
fullCP = classpath.map(f => f -> IO.getModifiedTimeOrZero(f)),
|
||||
resourceCP = resourceCP,
|
||||
allDependencies = transformedDependencies,
|
||||
allDependenciesSet = transformedDependencies.toSet,
|
||||
cache = extendedClassLoaderCache.value: @sbtUnchecked,
|
||||
resources = ClasspathUtilities.createClasspathResources(classpath, instance),
|
||||
tmp = taskTemporaryDirectory.value: @sbtUnchecked,
|
||||
|
|
@ -118,8 +114,7 @@ private[sbt] object ClassLoaders {
|
|||
strategy: ClassLoaderLayeringStrategy,
|
||||
si: ScalaInstance,
|
||||
fullCP: Seq[(File, Long)],
|
||||
resourceCP: Seq[(File, Long)],
|
||||
allDependencies: Seq[File],
|
||||
allDependenciesSet: Set[File],
|
||||
cache: ClassLoaderCache,
|
||||
resources: Map[String, String],
|
||||
tmp: File,
|
||||
|
|
@ -142,6 +137,7 @@ private[sbt] object ClassLoaders {
|
|||
}
|
||||
val cpFiles = fullCP.map(_._1)
|
||||
|
||||
val allDependencies = cpFiles.filter(allDependenciesSet)
|
||||
val scalaReflectJar = allDependencies.collectFirst {
|
||||
case f if f.getName == "scala-reflect.jar" =>
|
||||
si.allJars.find(_.getName == "scala-reflect.jar")
|
||||
|
|
@ -156,34 +152,28 @@ private[sbt] object ClassLoaders {
|
|||
}
|
||||
.getOrElse(scalaLibraryLayer)
|
||||
|
||||
// layer 2 (resources)
|
||||
val resourceLayer =
|
||||
if (layerDependencies)
|
||||
getResourceLayer(cpFiles, resourceCP, scalaReflectLayer, cache, resources)
|
||||
else scalaReflectLayer
|
||||
|
||||
// layer 3 (optional if in the test config and the runtime layer is not shared)
|
||||
val dependencyLayer =
|
||||
// layer 2 (optional if in the test config and the runtime layer is not shared)
|
||||
val dependencyLayer: ClassLoader =
|
||||
if (layerDependencies && allDependencies.nonEmpty) {
|
||||
cache(
|
||||
allDependencies.toList.map(f => f -> IO.getModifiedTimeOrZero(f)),
|
||||
resourceLayer,
|
||||
() => new ReverseLookupClassLoaderHolder(allDependencies, resourceLayer)
|
||||
scalaReflectLayer,
|
||||
() => new ReverseLookupClassLoaderHolder(allDependencies, scalaReflectLayer)
|
||||
)
|
||||
} else resourceLayer
|
||||
} else scalaReflectLayer
|
||||
|
||||
val scalaJarNames = (si.libraryJars ++ scalaReflectJar).map(_.getName).toSet
|
||||
// layer 4
|
||||
// layer 3
|
||||
val filteredSet =
|
||||
if (layerDependencies) allDependencies.toSet ++ si.libraryJars ++ scalaReflectJar
|
||||
else Set(si.libraryJars ++ scalaReflectJar: _*)
|
||||
val dynamicClasspath = cpFiles.filterNot(f => filteredSet(f) || scalaJarNames(f.getName))
|
||||
dependencyLayer match {
|
||||
case dl: ReverseLookupClassLoaderHolder =>
|
||||
dl.checkout(dynamicClasspath, tmp)
|
||||
dl.checkout(cpFiles, tmp)
|
||||
case cl =>
|
||||
cl.getParent match {
|
||||
case dl: ReverseLookupClassLoaderHolder => dl.checkout(dynamicClasspath, tmp)
|
||||
case dl: ReverseLookupClassLoaderHolder => dl.checkout(cpFiles, tmp)
|
||||
case _ => new LayeredClassLoader(dynamicClasspath, cl, tmp)
|
||||
}
|
||||
}
|
||||
|
|
@ -194,24 +184,6 @@ private[sbt] object ClassLoaders {
|
|||
key: sbt.TaskKey[Seq[Attributed[File]]]
|
||||
): Def.Initialize[Task[Seq[File]]] = Def.task(data(key.value).filter(_.getName.endsWith(".jar")))
|
||||
|
||||
// Creates a one or two layered classloader for the provided classpaths depending on whether
|
||||
// or not the classpath contains any snapshots. If it does, the snapshots are placed in a layer
|
||||
// above the regular jar layer. This allows the snapshot layer to be invalidated without
|
||||
// invalidating the regular jar layer. If the classpath is empty, it just returns the parent
|
||||
// loader.
|
||||
private def getResourceLayer(
|
||||
classpath: Seq[File],
|
||||
resources: Seq[(File, Long)],
|
||||
parent: ClassLoader,
|
||||
cache: ClassLoaderCache,
|
||||
resourceMap: Map[String, String]
|
||||
): ClassLoader = {
|
||||
if (resources.nonEmpty) {
|
||||
val mkLoader = () => new ResourceLoader(classpath, parent, resourceMap)
|
||||
cache(resources.toList, parent, mkLoader)
|
||||
} else parent
|
||||
}
|
||||
|
||||
// helper methods
|
||||
private def flatLoader(classpath: Seq[File], parent: ClassLoader): ClassLoader =
|
||||
new FlatLoader(classpath.map(_.toURI.toURL).toArray, parent)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@
|
|||
package sbt.internal
|
||||
|
||||
import java.io.File
|
||||
import java.net.URLClassLoader
|
||||
import java.net.{ URL, URLClassLoader }
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
|
||||
|
||||
|
|
@ -69,7 +69,7 @@ private[internal] final class ReverseLookupClassLoaderHolder(
|
|||
*
|
||||
* @return a ClassLoader
|
||||
*/
|
||||
def checkout(dependencyClasspath: Seq[File], tempDir: File): ClassLoader = {
|
||||
def checkout(fullClasspath: Seq[File], tempDir: File): ClassLoader = {
|
||||
if (closed.get()) {
|
||||
val msg = "Tried to extract class loader from closed ReverseLookupClassLoaderHolder. " +
|
||||
"Try running the `clearCaches` command and re-trying."
|
||||
|
|
@ -79,8 +79,8 @@ private[internal] final class ReverseLookupClassLoaderHolder(
|
|||
case null => new ReverseLookupClassLoader
|
||||
case c => c
|
||||
}
|
||||
reverseLookupClassLoader.setTempDir(tempDir)
|
||||
new BottomClassLoader(dependencyClasspath, reverseLookupClassLoader, tempDir)
|
||||
reverseLookupClassLoader.setup(tempDir, fullClasspath)
|
||||
new BottomClassLoader(fullClasspath, reverseLookupClassLoader, tempDir)
|
||||
}
|
||||
|
||||
private def checkin(reverseLookupClassLoader: ReverseLookupClassLoader): Unit = {
|
||||
|
|
@ -149,6 +149,19 @@ private[internal] final class ReverseLookupClassLoaderHolder(
|
|||
private[this] val classLoadingLock = new ClassLoadingLock
|
||||
def isDirty: Boolean = dirty.get()
|
||||
def setDescendant(classLoader: BottomClassLoader): Unit = directDescendant.set(classLoader)
|
||||
private[this] val resourceLoader = new AtomicReference[ResourceLoader](null)
|
||||
private class ResourceLoader(cp: Seq[File])
|
||||
extends URLClassLoader(cp.map(_.toURI.toURL).toArray, parent) {
|
||||
def lookup(name: String): URL = findResource(name)
|
||||
}
|
||||
private[ReverseLookupClassLoaderHolder] def setup(tmpDir: File, cp: Seq[File]): Unit = {
|
||||
setTempDir(tmpDir)
|
||||
resourceLoader.set(new ResourceLoader(cp))
|
||||
}
|
||||
override def findResource(name: String): URL = resourceLoader.get() match {
|
||||
case null => null
|
||||
case l => l.lookup(name)
|
||||
}
|
||||
def loadClass(name: String, resolve: Boolean, reverseLookup: Boolean): Class[_] = {
|
||||
classLoadingLock.withLock(name) {
|
||||
try super.loadClass(name, resolve)
|
||||
|
|
|
|||
|
|
@ -15,12 +15,12 @@ import sbt.nio.Keys._
|
|||
import sbt.nio.file.{ ChangedFiles, Glob, RecursiveGlob }
|
||||
|
||||
private[sbt] object CheckBuildSources {
|
||||
private[sbt] def needReloadImpl: Def.Initialize[Task[Unit]] = Def.task {
|
||||
private[sbt] def needReloadImpl: Def.Initialize[Task[StateTransform]] = Def.task {
|
||||
val logger = streams.value.log
|
||||
val checkMetaBuildParam = state.value.get(hasCheckedMetaBuild)
|
||||
val firstTime = checkMetaBuildParam.fold(true)(_.get == false)
|
||||
val st: State = state.value
|
||||
val firstTime = st.get(hasCheckedMetaBuild).fold(true)(_.compareAndSet(false, true))
|
||||
(onChangedBuildSource in Scope.Global).value match {
|
||||
case IgnoreSourceChanges => ()
|
||||
case IgnoreSourceChanges => new StateTransform(st)
|
||||
case o =>
|
||||
logger.debug("Checking for meta build source updates")
|
||||
(changedInputFiles in checkBuildSources).value match {
|
||||
|
|
@ -37,18 +37,20 @@ private[sbt] object CheckBuildSources {
|
|||
val prefix = rawPrefix.linesIterator.filterNot(_.trim.isEmpty).mkString("\n")
|
||||
if (o == ReloadOnSourceChanges) {
|
||||
logger.info(s"$prefix\nReloading sbt...")
|
||||
throw Reload
|
||||
val remaining =
|
||||
Exec("reload", None, None) :: st.currentCommand.toList ::: st.remainingCommands
|
||||
new StateTransform(st.copy(currentCommand = None, remainingCommands = remaining))
|
||||
} else {
|
||||
val tail = "Apply these changes by running `reload`.\nAutomatically reload the " +
|
||||
"build when source changes are detected by setting " +
|
||||
"`Global / onChangedBuildSource := ReloadOnSourceChanges`.\nDisable this " +
|
||||
"warning by setting `Global / onChangedBuildSource := IgnoreSourceChanges`."
|
||||
logger.warn(s"$prefix\n$tail")
|
||||
new StateTransform(st)
|
||||
}
|
||||
case _ => ()
|
||||
case _ => new StateTransform(st)
|
||||
}
|
||||
}
|
||||
checkMetaBuildParam.foreach(_.set(true))
|
||||
}
|
||||
private[sbt] def buildSourceFileInputs: Def.Initialize[Seq[Glob]] = Def.setting {
|
||||
if (onChangedBuildSource.value != IgnoreSourceChanges) {
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ object Keys {
|
|||
taskKey[FileTreeView.Nio[FileAttributes]]("A view of the local file system tree")
|
||||
|
||||
val checkBuildSources =
|
||||
taskKey[Unit]("Check if any meta build sources have changed").withRank(DSetting)
|
||||
taskKey[StateTransform]("Check if any meta build sources have changed").withRank(DSetting)
|
||||
|
||||
// watch related settings
|
||||
val watchAntiEntropyRetentionPeriod = settingKey[FiniteDuration](
|
||||
|
|
|
|||
Loading…
Reference in New Issue