mirror of https://github.com/sbt/sbt.git
Fix data race to remote cache on classpath analyses
The attributed classpath should point to the existing analysis file in the target folder to avoid data racing to the remote cache.
This commit is contained in:
parent
1c03463e19
commit
85fcb014f0
|
|
@ -229,7 +229,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
|
||||
import language.experimental.macros
|
||||
|
||||
// These are here, as opposed to RemoteCahe, since we need them from TaskMacro etc
|
||||
// These are here, as opposed to RemoteCache, since we need them from TaskMacro etc
|
||||
private[sbt] var _cacheStore: ActionCacheStore = InMemoryActionCacheStore()
|
||||
def cacheStore: ActionCacheStore = _cacheStore
|
||||
private[sbt] var _outputDirectory: Option[Path] = None
|
||||
|
|
|
|||
|
|
@ -102,6 +102,7 @@ import sbt.SlashSyntax0._
|
|||
import sbt.internal.inc.{
|
||||
Analysis,
|
||||
AnalyzingCompiler,
|
||||
FileAnalysisStore,
|
||||
ManagedLoggedReporter,
|
||||
MixedAnalyzingCompiler,
|
||||
ScalaInstance
|
||||
|
|
@ -143,22 +144,17 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = LibraryManagement.lock(app)
|
||||
|
||||
def extractAnalysis[A1](a: Attributed[A1]): (A1, CompileAnalysis) =
|
||||
(
|
||||
a.data,
|
||||
a.metadata.get(Keys.analysis) match
|
||||
case Some(ref) => RemoteCache.getCachedAnalysis(ref)
|
||||
case None => Analysis.Empty
|
||||
)
|
||||
|
||||
def analysisMap[T](cp: Seq[Attributed[T]]): T => Option[CompileAnalysis] = {
|
||||
val m = (for {
|
||||
a <- cp
|
||||
ref <- a.metadata.get(Keys.analysis)
|
||||
an = RemoteCache.getCachedAnalysis(ref)
|
||||
} yield (a.data, an)).toMap
|
||||
m.get(_)
|
||||
}
|
||||
private[sbt] def extractAnalysis(
|
||||
metadata: StringAttributeMap,
|
||||
converter: FileConverter
|
||||
): Option[CompileAnalysis] =
|
||||
def asBinary(file: File) = FileAnalysisStore.binary(file).get.asScala
|
||||
def asText(file: File) = FileAnalysisStore.text(file).get.asScala
|
||||
for
|
||||
ref <- metadata.get(Keys.analysis)
|
||||
file = converter.toPath(VirtualFileRef.of(ref)).toFile
|
||||
content <- asBinary(file).orElse(asText(file))
|
||||
yield content.getAnalysis
|
||||
|
||||
private[sbt] def globalDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
Def.defaultSettings(inScope(GlobalScope)(ss))
|
||||
|
|
@ -1427,10 +1423,10 @@ object Defaults extends BuildCommon {
|
|||
Def.task {
|
||||
val cp = (test / fullClasspath).value
|
||||
val s = (test / streams).value
|
||||
val analyses: Seq[Analysis] = cp
|
||||
.flatMap(_.metadata.get(Keys.analysis))
|
||||
.map: str =>
|
||||
RemoteCache.getCachedAnalysis(str).asInstanceOf[Analysis]
|
||||
val converter = fileConverter.value
|
||||
val analyses = cp
|
||||
.flatMap(a => extractAnalysis(a.metadata, converter))
|
||||
.collect { case analysis: Analysis => analysis }
|
||||
val succeeded = TestStatus.read(succeededFile(s.cacheDirectory))
|
||||
val stamps = collection.mutable.Map.empty[String, Long]
|
||||
def stamp(dep: String): Option[Long] =
|
||||
|
|
@ -2396,7 +2392,7 @@ object Defaults extends BuildCommon {
|
|||
val _ = compileIncremental.value
|
||||
val exportP = exportPipelining.value
|
||||
// Save analysis midway if pipelining is enabled
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val contents = store.unsafeGet()
|
||||
if (exportP) {
|
||||
// this stores the eary analysis (again) in case the subproject contains a macro
|
||||
|
|
@ -2421,7 +2417,7 @@ object Defaults extends BuildCommon {
|
|||
.debug(s"${name.value}: compileEarly: blocking on earlyOutputPing")
|
||||
earlyOutputPing.await.value
|
||||
}) {
|
||||
val store = earlyAnalysisStore
|
||||
val store = analysisStore(earlyCompileAnalysisFile)
|
||||
store.get.toOption match {
|
||||
case Some(contents) => contents.getAnalysis
|
||||
case _ => Analysis.empty
|
||||
|
|
@ -2433,7 +2429,7 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def compileTask: Initialize[Task[CompileAnalysis]] = Def.task {
|
||||
val setup: Setup = compileIncSetup.value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val c = fileConverter.value
|
||||
// TODO - expose bytecode manipulation phase.
|
||||
val analysisResult: CompileResult = manipulateBytecode.value
|
||||
|
|
@ -2456,7 +2452,7 @@ object Defaults extends BuildCommon {
|
|||
val bspTask = (compile / bspCompileTask).value
|
||||
val result = cachedCompileIncrementalTask.result.value
|
||||
val reporter = (compile / bspReporter).value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val ci = (compile / compileInputs).value
|
||||
result match
|
||||
case Result.Value(res) =>
|
||||
|
|
@ -2489,7 +2485,7 @@ object Defaults extends BuildCommon {
|
|||
val ci2 = (compile / compileInputs2).value
|
||||
val ping = (TaskZero / earlyOutputPing).value
|
||||
val setup: Setup = (TaskZero / compileIncSetup).value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val c = fileConverter.value
|
||||
// TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too?
|
||||
val analysisResult = Retry(compileIncrementalTaskImpl(bspTask, s, ci, ping))
|
||||
|
|
@ -2570,17 +2566,22 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def compileIncSetupTask = Def.task {
|
||||
val cp = dependencyPicklePath.value
|
||||
val converter = fileConverter.value
|
||||
val cachedAnalysisMap: Map[VirtualFile, CompileAnalysis] = (
|
||||
for
|
||||
attributed <- cp
|
||||
analysis <- extractAnalysis(attributed.metadata, converter)
|
||||
yield (converter.toVirtualFile(attributed.data), analysis)
|
||||
).toMap
|
||||
val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass =
|
||||
Keys.classpathEntryDefinesClassVF.value
|
||||
val lookup = new PerClasspathEntryLookup:
|
||||
private val cachedAnalysisMap: VirtualFile => Option[CompileAnalysis] =
|
||||
analysisMap(cp)
|
||||
private val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass =
|
||||
Keys.classpathEntryDefinesClassVF.value
|
||||
override def analysis(classpathEntry: VirtualFile): Optional[CompileAnalysis] =
|
||||
cachedAnalysisMap(classpathEntry).toOptional
|
||||
cachedAnalysisMap.get(classpathEntry).toOptional
|
||||
override def definesClass(classpathEntry: VirtualFile): DefinesClass =
|
||||
cachedPerEntryDefinesClassLookup(classpathEntry)
|
||||
val extra = extraIncOptions.value.map(t2)
|
||||
val store = earlyAnalysisStore
|
||||
val store = analysisStore(earlyCompileAnalysisFile)
|
||||
val eaOpt = if exportPipelining.value then Some(store) else None
|
||||
Setup.of(
|
||||
lookup,
|
||||
|
|
@ -2685,7 +2686,7 @@ object Defaults extends BuildCommon {
|
|||
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
|
||||
previousCompile := {
|
||||
val setup = compileIncSetup.value
|
||||
val store = analysisStore
|
||||
val store = analysisStore(compileAnalysisFile)
|
||||
val prev = store.get().toOption match {
|
||||
case Some(contents) =>
|
||||
val analysis = Option(contents.getAnalysis).toOptional
|
||||
|
|
@ -2697,17 +2698,11 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
)
|
||||
|
||||
private inline def analysisStore: AnalysisStore = {
|
||||
val setup = compileIncSetup.value
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary)
|
||||
}
|
||||
|
||||
private inline def earlyAnalysisStore: AnalysisStore = {
|
||||
val earlyAnalysisPath = earlyCompileAnalysisFile.value.toPath
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
MixedAnalyzingCompiler.staticCachedStore(earlyAnalysisPath, !useBinary)
|
||||
}
|
||||
private inline def analysisStore(inline analysisFile: TaskKey[File]): AnalysisStore =
|
||||
MixedAnalyzingCompiler.staticCachedStore(
|
||||
analysisFile.value.toPath,
|
||||
!enableBinaryCompileAnalysis.value
|
||||
)
|
||||
|
||||
def printWarningsTask: Initialize[Task[Unit]] =
|
||||
Def.task {
|
||||
|
|
@ -4232,7 +4227,6 @@ object Classpaths {
|
|||
new RawRepository(resolver, resolver.getName)
|
||||
}
|
||||
|
||||
def analyzed[T](data: T, analysis: CompileAnalysis) = ClasspathImpl.analyzed[T](data, analysis)
|
||||
def makeProducts: Initialize[Task[Seq[File]]] = Def.task {
|
||||
val c = fileConverter.value
|
||||
Def.unit(copyResources.value)
|
||||
|
|
|
|||
|
|
@ -22,10 +22,9 @@ import sbt.ProjectExtra.*
|
|||
import sbt.ScopeFilter.Make._
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.coursierint.LMCoursier
|
||||
import sbt.internal.inc.{ CompileOutput, HashUtil, JarUtils, MappedFileConverter }
|
||||
import sbt.internal.inc.{ HashUtil, JarUtils }
|
||||
import sbt.internal.librarymanagement._
|
||||
import sbt.internal.remotecache._
|
||||
import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler }
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
import sbt.librarymanagement._
|
||||
|
|
@ -35,16 +34,10 @@ import sbt.nio.FileStamp
|
|||
import sbt.nio.Keys.{ inputFileStamps, outputFileStamps }
|
||||
import sbt.std.TaskExtra._
|
||||
import sbt.util.InterfaceUtil.toOption
|
||||
import sbt.util.{
|
||||
ActionCacheStore,
|
||||
AggregateActionCacheStore,
|
||||
CacheImplicits,
|
||||
DiskActionCacheStore,
|
||||
Logger
|
||||
}
|
||||
import sbt.util.{ ActionCacheStore, AggregateActionCacheStore, DiskActionCacheStore, Logger }
|
||||
import sjsonnew.JsonFormat
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.compile.{ AnalysisContents, AnalysisStore, CompileAnalysis, MiniSetup, MiniOptions }
|
||||
import xsbti.compile.CompileAnalysis
|
||||
|
||||
import scala.collection.mutable
|
||||
|
||||
|
|
@ -72,43 +65,6 @@ object RemoteCache {
|
|||
val tempDiskCache = (s.baseDir / "target" / "bootcache").toPath()
|
||||
Def._cacheStore = DiskActionCacheStore(tempDiskCache)
|
||||
|
||||
private[sbt] def getCachedAnalysis(ref: String): CompileAnalysis =
|
||||
getCachedAnalysis(CacheImplicits.strToHashedVirtualFileRef(ref))
|
||||
private[sbt] def getCachedAnalysis(ref: HashedVirtualFileRef): CompileAnalysis =
|
||||
analysisStore.getOrElseUpdate(
|
||||
ref, {
|
||||
val outputDirectory = Def.cacheConfiguration.outputDirectory
|
||||
cacheStore.syncBlobs(ref :: Nil, outputDirectory).headOption match
|
||||
case Some(file) => analysisStore(file).get.get.getAnalysis
|
||||
case None => Analysis.empty
|
||||
}
|
||||
)
|
||||
|
||||
private[sbt] val tempConverter: MappedFileConverter = MappedFileConverter.empty
|
||||
private[sbt] def postAnalysis(analysis: CompileAnalysis): Option[HashedVirtualFileRef] =
|
||||
IO.withTemporaryFile("analysis", ".tmp", true): file =>
|
||||
val output = CompileOutput.empty
|
||||
val option = MiniOptions.of(Array(), Array(), Array())
|
||||
val setup = MiniSetup.of(
|
||||
output,
|
||||
option,
|
||||
"",
|
||||
xsbti.compile.CompileOrder.Mixed,
|
||||
false,
|
||||
Array()
|
||||
)
|
||||
analysisStore(file.toPath).set(AnalysisContents.create(analysis, setup))
|
||||
val vf = tempConverter.toVirtualFile(file.toPath)
|
||||
val refs = cacheStore.putBlobs(vf :: Nil)
|
||||
refs.headOption match
|
||||
case Some(ref) =>
|
||||
analysisStore(ref) = analysis
|
||||
Some(ref)
|
||||
case None => None
|
||||
|
||||
private def analysisStore(file: Path): AnalysisStore =
|
||||
MixedAnalyzingCompiler.staticCachedStore(file, true)
|
||||
|
||||
private[sbt] def artifactToStr(art: Artifact): String = {
|
||||
import LibraryManagementCodec._
|
||||
import sjsonnew.support.scalajson.unsafe._
|
||||
|
|
@ -556,10 +512,10 @@ object RemoteCache {
|
|||
key: SettingKey[A],
|
||||
pkgTasks: Seq[TaskKey[HashedVirtualFileRef]]
|
||||
): Def.Initialize[Seq[A]] =
|
||||
(Classpaths.forallIn(key, pkgTasks) zipWith
|
||||
Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) =>
|
||||
a
|
||||
})
|
||||
Classpaths
|
||||
.forallIn(key, pkgTasks)
|
||||
.zipWith(Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))
|
||||
.apply(_.zip(_).collect { case (a, true) => a })
|
||||
|
||||
private def extractHash(inputs: Seq[(Path, FileStamp)]): Vector[String] =
|
||||
inputs.toVector map { case (_, stamp0) =>
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import Def.Setting
|
|||
import sbt.io.Hash
|
||||
import sbt.internal.util.Attributed
|
||||
import sbt.internal.inc.ReflectUtilities
|
||||
import xsbti.FileConverter
|
||||
|
||||
trait BuildDef {
|
||||
def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects
|
||||
|
|
@ -72,10 +73,9 @@ private[sbt] object BuildDef {
|
|||
autoGeneratedProject := true
|
||||
)
|
||||
|
||||
def analyzed(in: Seq[Attributed[_]]): Seq[xsbti.compile.CompileAnalysis] =
|
||||
in.flatMap: a =>
|
||||
a.metadata
|
||||
.get(Keys.analysis)
|
||||
.map: str =>
|
||||
RemoteCache.getCachedAnalysis(str)
|
||||
def analyzed(
|
||||
in: Seq[Attributed[_]],
|
||||
converter: FileConverter
|
||||
): Seq[xsbti.compile.CompileAnalysis] =
|
||||
in.flatMap(a => Defaults.extractAnalysis(a.metadata, converter))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,15 +15,13 @@ import sbt.Keys._
|
|||
import sbt.nio.Keys._
|
||||
import sbt.nio.file.{ Glob, RecursiveGlob }
|
||||
import sbt.Def.Initialize
|
||||
import sbt.internal.inc.Analysis
|
||||
import sbt.internal.inc.JavaInterfaceUtil._
|
||||
import sbt.internal.util.{ Attributed, Dag, Settings }
|
||||
import sbt.librarymanagement.{ Configuration, TrackLevel }
|
||||
import sbt.librarymanagement.Configurations.names
|
||||
import sbt.std.TaskExtra._
|
||||
import sbt.util._
|
||||
import scala.jdk.CollectionConverters.*
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFile, VirtualFileRef }
|
||||
import xsbti.compile.CompileAnalysis
|
||||
|
||||
private[sbt] object ClasspathImpl {
|
||||
|
|
@ -38,10 +36,13 @@ private[sbt] object ClasspathImpl {
|
|||
val config = configuration.value
|
||||
val products = pickleProducts.value
|
||||
val analysis = compileEarly.value
|
||||
val xs = products map { _ -> analysis }
|
||||
val converter = fileConverter.value
|
||||
val analysisFile = converter.toVirtualFile(earlyCompileAnalysisFile.value.toPath)
|
||||
|
||||
val xs = products.map(_ -> analysis)
|
||||
for (f, analysis) <- xs
|
||||
yield APIMappings
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
.store(analyzed(f, analysisFile), apiURL.value)
|
||||
.put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module))
|
||||
.put(Keys.configurationStr, config.name)
|
||||
else exportedProducts.value
|
||||
|
|
@ -55,7 +56,7 @@ private[sbt] object ClasspathImpl {
|
|||
val config = configuration.value
|
||||
for (f, analysis) <- trackedExportedProductsImplTask(track).value
|
||||
yield APIMappings
|
||||
.store(analyzed[HashedVirtualFileRef](f, analysis), apiURL.value)
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
.put(Keys.artifactStr, RemoteCache.artifactToStr(art))
|
||||
.put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module))
|
||||
.put(Keys.configurationStr, config.name)
|
||||
|
|
@ -67,7 +68,6 @@ private[sbt] object ClasspathImpl {
|
|||
val art = (packageBin / artifact).value
|
||||
val module = projectID.value
|
||||
val config = configuration.value
|
||||
val converter = fileConverter.value
|
||||
for (f, analysis) <- trackedJarProductsImplTask(track).value
|
||||
yield APIMappings
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
|
|
@ -78,7 +78,7 @@ private[sbt] object ClasspathImpl {
|
|||
|
||||
private[this] def trackedExportedProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] =
|
||||
Def.taskIf {
|
||||
if {
|
||||
val _ = (packageBin / dynamicDependency).value
|
||||
|
|
@ -89,44 +89,38 @@ private[sbt] object ClasspathImpl {
|
|||
|
||||
private[this] def trackedNonJarProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
(Def
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] =
|
||||
Def
|
||||
.task {
|
||||
val dirs = productDirectories.value
|
||||
val view = fileTreeView.value
|
||||
(TrackLevel.intersection(track, exportToInternal.value), dirs, view)
|
||||
})
|
||||
}
|
||||
.flatMapTask {
|
||||
case (TrackLevel.TrackAlways, _, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val a = compile.value
|
||||
products.value
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map { (_, a) }
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
products.value.map(x => (converter.toVirtualFile(x.toPath()), analysisFile))
|
||||
}
|
||||
case (TrackLevel.TrackIfMissing, dirs, view)
|
||||
if view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).isEmpty =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val a = compile.value
|
||||
products.value
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map { (_, a) }
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
products.value.map(x => (converter.toVirtualFile(x.toPath()), analysisFile))
|
||||
}
|
||||
case (_, dirs, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty)
|
||||
dirs
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map(_ -> analysis)
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
dirs.map { x => (converter.toVirtualFile(x.toPath()), analysisFile) }
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def trackedJarProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] =
|
||||
(Def
|
||||
.task {
|
||||
val converter = fileConverter.value
|
||||
|
|
@ -137,23 +131,21 @@ private[sbt] object ClasspathImpl {
|
|||
.flatMapTask {
|
||||
case (TrackLevel.TrackAlways, _, _) =>
|
||||
Def.task {
|
||||
Seq((packageBin.value, compile.value))
|
||||
val converter = fileConverter.value
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
Seq((packageBin.value, analysisFile))
|
||||
}
|
||||
case (TrackLevel.TrackIfMissing, _, jar) if !jar.toFile().exists =>
|
||||
Def.task {
|
||||
Seq((packageBin.value, compile.value))
|
||||
val converter = fileConverter.value
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
Seq((packageBin.value, analysisFile))
|
||||
}
|
||||
case (_, vf, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val analysisOpt = previousCompile.value.analysis.toOption
|
||||
Seq(vf).map(converter.toPath).map(converter.toVirtualFile).map { x =>
|
||||
(
|
||||
x,
|
||||
if (analysisOpt.isDefined) analysisOpt.get
|
||||
else Analysis.empty
|
||||
)
|
||||
}
|
||||
val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath)
|
||||
Seq(vf).map { x => (converter.toVirtualFile(x), analysisFile) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -351,13 +343,8 @@ private[sbt] object ClasspathImpl {
|
|||
(tasks.toSeq.join).map(_.flatten.distinct)
|
||||
}
|
||||
|
||||
def analyzed[A](data: A, analysis: CompileAnalysis) =
|
||||
RemoteCache.postAnalysis(analysis) match
|
||||
case Some(ref) =>
|
||||
Attributed
|
||||
.blank(data)
|
||||
.put(Keys.analysis, CacheImplicits.hashedVirtualFileRefToStr(ref))
|
||||
case None => Attributed.blank(data)
|
||||
def analyzed[A](data: A, analysisFile: VirtualFile): Attributed[A] =
|
||||
Attributed.blank(data).put(Keys.analysis, analysisFile.id)
|
||||
|
||||
def interSort(
|
||||
projectRef: ProjectRef,
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ object PluginDiscovery:
|
|||
): Seq[String] =
|
||||
(
|
||||
binaryModuleNames(classpath, converter, loader, resourceName) ++
|
||||
(analyzed(classpath) flatMap (a => sourceModuleNames(a, subclasses: _*)))
|
||||
analyzed(classpath, converter).flatMap(a => sourceModuleNames(a, subclasses: _*))
|
||||
).distinct
|
||||
|
||||
/** Discovers top-level modules in `analysis` that inherit from any of `subclasses`. */
|
||||
|
|
|
|||
|
|
@ -199,7 +199,7 @@ private[sbt] object Definition {
|
|||
}
|
||||
|
||||
def collectAnalysesTask = Def.task {
|
||||
val cacheFile: String = compileIncSetup.value.cacheFile.getAbsolutePath
|
||||
val cacheFile: String = compileAnalysisFile.value.getAbsolutePath
|
||||
val useBinary = enableBinaryCompileAnalysis.value
|
||||
val s = state.value
|
||||
s.log.debug(s"analysis location ${cacheFile -> useBinary}")
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import sbt.io.syntax.*
|
|||
import xsbti.{ HashedVirtualFileRef, PathBasedFile, VirtualFile }
|
||||
|
||||
/**
|
||||
* An abstration of a remote or local cache store.
|
||||
* An abstraction of a remote or local cache store.
|
||||
*/
|
||||
trait ActionCacheStore:
|
||||
/**
|
||||
|
|
|
|||
Loading…
Reference in New Issue