mirror of https://github.com/sbt/sbt.git
Merge 8ab636e10a into 67b4434507
This commit is contained in:
commit
b6396a5ffa
|
|
@ -9,14 +9,16 @@
|
|||
package sbt
|
||||
|
||||
import java.io.{ File, IOException }
|
||||
import java.nio.file.Path
|
||||
import java.util.zip.ZipException
|
||||
|
||||
import sbt.internal.inc.MappedFileConverter
|
||||
import sbt.internal.util.Relation
|
||||
import sbt.internal.io.TranslatedException
|
||||
import sbt.util.CacheImplicits.*
|
||||
import sbt.util.CacheImplicits.given
|
||||
import sbt.util.{ CacheStore, FileInfo }
|
||||
import sbt.io.IO
|
||||
import sbt.io.Path.{ flat, rebase }
|
||||
import sjsonnew.{
|
||||
Builder,
|
||||
IsoString,
|
||||
|
|
@ -27,6 +29,7 @@ import sjsonnew.{
|
|||
deserializationError,
|
||||
}
|
||||
import xsbti.{ FileConverter, VirtualFileRef }
|
||||
import xsbti.compile.CompileAnalysis
|
||||
|
||||
/**
|
||||
* Maintains a set of mappings so that they are uptodate.
|
||||
|
|
@ -94,6 +97,59 @@ object Sync {
|
|||
relation
|
||||
}
|
||||
|
||||
private[sbt] def syncClasses(
|
||||
store: CacheStore,
|
||||
fileConverter: FileConverter
|
||||
): (Option[CompileAnalysis], Path, Path) => Unit =
|
||||
(analysisOpt, backendDir, classesDir) => {
|
||||
val currentStamps = analysisOpt match
|
||||
case Some(a) =>
|
||||
import scala.jdk.CollectionConverters.*
|
||||
a.readStamps
|
||||
.getAllProductStamps()
|
||||
.asScala
|
||||
.map: (k, v) =>
|
||||
(k, v.toString())
|
||||
.toMap
|
||||
case None => Map.empty
|
||||
val currentStampsSeq = currentStamps.toVector.sortBy(_._1.id())
|
||||
val previousStampsSeq = store.read[Vector[(VirtualFileRef, String)]](Vector.empty)
|
||||
val previousStamps = Map(previousStampsSeq*)
|
||||
if currentStampsSeq == previousStampsSeq then ()
|
||||
else
|
||||
val t = classesDir.toFile()
|
||||
val productsVf = currentStamps.map(_._1)
|
||||
val flt: File => Option[File] = flat(t)
|
||||
val transform: VirtualFileRef => Option[File] =
|
||||
(vf: VirtualFileRef) =>
|
||||
val f = fileConverter.toPath(vf).toFile()
|
||||
rebase(backendDir.toFile(), t)(f).orElse(flt(f))
|
||||
val mappings = productsVf.flatMap: x =>
|
||||
transform(x).map(x -> _)
|
||||
val relation = Relation.empty ++ mappings
|
||||
def outofdate(source: VirtualFileRef, target: File): Boolean =
|
||||
!previousStamps.contains(source) ||
|
||||
previousStamps.get(source) != currentStamps.get(source) ||
|
||||
!target.exists
|
||||
val updates = relation.filter(outofdate)
|
||||
val removeTargets = (previousStampsSeq.map(_._1) diff currentStampsSeq.map(_._1)).flatMap:
|
||||
x => transform(x).map(x -> _)
|
||||
val (cleanDirs, cleanFiles) =
|
||||
(updates._2s ++ removeTargets.map(_._2)).partition(_.isDirectory)
|
||||
IO.delete(cleanFiles)
|
||||
IO.deleteIfEmpty(cleanDirs)
|
||||
updates.all.foreach: (k, v) =>
|
||||
val classFile = fileConverter.toPath(k).toFile()
|
||||
copy(classFile, v)
|
||||
if !classFile.getName().contains("$") then
|
||||
val (name, ext) = IO.split(classFile.getName)
|
||||
val tasty = File(classFile.getParentFile(), name + ".tasty")
|
||||
if tasty.exists() then
|
||||
val tastyTarget = File(v.getParentFile(), name + ".tasty")
|
||||
copy(tasty, tastyTarget)
|
||||
store.write(currentStampsSeq)
|
||||
}
|
||||
|
||||
def copy(source: File, target: File): Unit =
|
||||
if (source.isFile) IO.copyFile(source, target, true)
|
||||
else if (!target.exists) { // we don't want to update the last modified time of an existing directory
|
||||
|
|
|
|||
|
|
@ -4068,20 +4068,18 @@ object Classpaths {
|
|||
|
||||
def makeProducts: Initialize[Task[Seq[File]]] = Def.task {
|
||||
val c = fileConverter.value
|
||||
val resources = copyResources.value.map(_._2).toSet
|
||||
val classDir = classDirectory.value
|
||||
val syncDir = target.value / (prefix(configuration.value.name) + "sync")
|
||||
val factory = CacheStoreFactory(syncDir)
|
||||
val cacheStore = factory.make("make-product")
|
||||
val t = classDirectory.value
|
||||
val vfBackendDir = compileIncremental.value._2
|
||||
val setup: Setup = compileIncSetup.value
|
||||
val analysisOut = c.toVirtualFile(setup.cachePath())
|
||||
val analysisOpt = BuildDef.extractAnalysis(analysisOut, c)
|
||||
val backendDir = c.toPath(vfBackendDir)
|
||||
// delete outdated files
|
||||
Path
|
||||
.allSubpaths(classDir)
|
||||
.collect { case (f, _) if f.isFile() && !resources.contains(f) => f }
|
||||
.foreach(IO.delete)
|
||||
IO.copyDirectory(
|
||||
source = backendDir.toFile(),
|
||||
target = classDir,
|
||||
)
|
||||
classDir :: Nil
|
||||
val resources = copyResources.value.map(_._2).toSet
|
||||
Sync.syncClasses(cacheStore, fileConverter = c)(analysisOpt, backendDir, t.toPath())
|
||||
t :: Nil
|
||||
}
|
||||
|
||||
private[sbt] def makePickleProducts: Initialize[Task[Seq[VirtualFile]]] = Def.task {
|
||||
|
|
|
|||
|
|
@ -113,6 +113,15 @@ private[sbt] object BuildDef:
|
|||
private[sbt] def extractAnalysis(
|
||||
metadata: StringAttributeMap,
|
||||
converter: FileConverter
|
||||
): Option[CompileAnalysis] =
|
||||
for
|
||||
ref <- metadata.get(Keys.analysis)
|
||||
analysis <- extractAnalysis(VirtualFileRef.of(ref), converter)
|
||||
yield analysis
|
||||
|
||||
private[sbt] def extractAnalysis(
|
||||
ref: VirtualFileRef,
|
||||
converter: FileConverter
|
||||
): Option[CompileAnalysis] =
|
||||
import sbt.OptionSyntax.*
|
||||
def asBinary(file: File) = FileAnalysisStore.binary(file).get.asScala
|
||||
|
|
@ -129,9 +138,6 @@ private[sbt] object BuildDef:
|
|||
val sizeBytes = attrs.size()
|
||||
getOrElseUpdate(ref, lastModified, sizeBytes)(fallback(file))
|
||||
catch case _: NoSuchFileException => fallback(file)
|
||||
for
|
||||
ref <- metadata.get(Keys.analysis)
|
||||
content <- getContents(VirtualFileRef.of(ref))
|
||||
yield content.getAnalysis
|
||||
getContents(ref).map(_.getAnalysis)
|
||||
|
||||
end BuildDef
|
||||
|
|
|
|||
Loading…
Reference in New Issue