Merge pull request #6611 from Nirvikalpa108/remote-cache-again

Virtualize Sync.sync so it can be cached remotely for resources dir syncing
This commit is contained in:
eugene yokota 2021-07-31 14:56:29 -04:00 committed by GitHub
commit 8586e19f62
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 126 additions and 47 deletions

View File

@ -50,6 +50,19 @@ object Relation {
private[sbt] def get[X, Y](map: M[X, Y], t: X): Set[Y] = map.getOrElse(t, Set.empty[Y])
private[sbt] type M[X, Y] = Map[X, Set[Y]]
/** when both parameters taken by relation are the same type, switch calls a function on them. */
private[sbt] def switch[X, Y](relation: Relation[X, X], f: X => Y): Relation[Y, Y] = {
val forward = relation.forwardMap.map {
case (first, second) =>
f(first) -> second.map(f)
}
val reverse = relation.reverseMap.map {
case (first, second) =>
f(first) -> second.map(f)
}
make(forward, reverse)
}
}
/** Binary relation between A and B. It is a set of pairs (_1, _2) for _1 in A, _2 in B. */
@ -133,7 +146,6 @@ trait Relation[A, B] {
* The value associated with a given `_2` is the set of all `_1`s such that `(_1, _2)` is in this relation.
*/
def reverseMap: Map[B, Set[A]]
}
// Note that we assume without checking that fwd and rev are consistent.

View File

@ -9,13 +9,16 @@ package sbt
import java.io.{ File, IOException }
import java.util.zip.ZipException
import sbt.internal.inc.MappedFileConverter
import sbt.internal.util.Relation
import sbt.internal.io.TranslatedException
import sbt.util.CacheImplicits._
import sbt.util.{ FileInfo, CacheStore }
import sbt.util.{ CacheStore, FileInfo }
import sbt.io.IO
import sbt.librarymanagement.LibraryManagementCodec
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
import xsbti.{ FileConverter, VirtualFileRef }
/**
* Maintains a set of mappings so that they are uptodate.
@ -34,20 +37,34 @@ object Sync {
def apply(
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
outStyle: FileInfo.Style = FileInfo.exists,
outStyle: FileInfo.Style = FileInfo.exists
): Traversable[(File, File)] => Relation[File, File] =
sync(store, inStyle)
def sync(
store: CacheStore,
fileConverter: FileConverter
): Traversable[(File, File)] => Relation[File, File] =
sync(store, FileInfo.lastModified, fileConverter)
def sync(
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
): Traversable[(File, File)] => Relation[File, File] =
sync(store, inStyle, MappedFileConverter.empty)
/** this function ensures that the latest files in /src are also in /target, so that they are synchronised */
def sync(
store: CacheStore,
inStyle: FileInfo.Style,
fileConverter: FileConverter
): Traversable[(File, File)] => Relation[File, File] =
mappings => {
val relation = Relation.empty ++ mappings
noDuplicateTargets(relation)
val currentInfo = relation._1s.map(s => (s, inStyle(s))).toMap
val (previousRelation, previousInfo) = readInfo(store)(inStyle.format)
val (previousRelation, previousInfo) = readInfoWrapped(store, fileConverter)(inStyle.format)
val removeTargets = previousRelation._2s -- relation._2s
def outofdate(source: File, target: File): Boolean =
@ -64,7 +81,7 @@ object Sync {
IO.deleteIfEmpty(cleanDirs)
updates.all.foreach((copy _).tupled)
writeInfo(store, relation, currentInfo)(inStyle.format)
writeInfoVirtual(store, relation, currentInfo, fileConverter)(inStyle.format)
relation
}
@ -108,7 +125,6 @@ object Sync {
bf.write(obj.reverseMap, builder)
builder.endArray()
}
}
def writeInfo[F <: FileInfo](
@ -118,7 +134,51 @@ object Sync {
)(implicit infoFormat: JsonFormat[F]): Unit =
store.write((relation, info))
def writeInfoVirtual[F <: FileInfo](
store: CacheStore,
relation: Relation[File, File],
info: Map[File, F],
fileConverter: FileConverter
)(implicit infoFormat: JsonFormat[F]): Unit = {
val virtualRelation: Relation[VirtualFileRef, VirtualFileRef] =
Relation.switch(relation, (f: File) => fileConverter.toVirtualFile(f.toPath))
val virtualInfo: Map[VirtualFileRef, F] = info.map {
case (file, fileInfo) =>
fileConverter.toVirtualFile(file.toPath) -> fileInfo
}
import LibraryManagementCodec._
import sjsonnew.IsoString
implicit def virtualFileRefStringIso: IsoString[VirtualFileRef] =
IsoString.iso[VirtualFileRef](_.toString, VirtualFileRef.of(_))
store.write(
(
virtualRelation,
virtualInfo
)
)
}
type RelationInfo[F] = (Relation[File, File], Map[File, F])
type RelationInfoVirtual[F] = (Relation[VirtualFileRef, VirtualFileRef], Map[VirtualFileRef, F])
def readInfoWrapped[F <: FileInfo](store: CacheStore, fileConverter: FileConverter)(
implicit infoFormat: JsonFormat[F]
): RelationInfo[F] = {
convertFromVirtual(readInfoVirtual(store)(infoFormat), fileConverter)
}
def convertFromVirtual[F <: FileInfo](
info: RelationInfoVirtual[F],
fileConverter: FileConverter
): RelationInfo[F] = {
val firstPart = Relation.switch(info._1, (r: VirtualFileRef) => fileConverter.toPath(r).toFile)
val secondPart = info._2.map {
case (file, fileInfo) =>
fileConverter.toPath(file).toFile -> fileInfo
}
firstPart -> secondPart
}
def readInfo[F <: FileInfo](
store: CacheStore
@ -135,8 +195,37 @@ object Sync {
}
}
def readInfoVirtual[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfoVirtual[F] =
try {
readUncaughtVirtual[F](store)(infoFormat)
} catch {
case _: IOException =>
(Relation.empty[VirtualFileRef, VirtualFileRef], Map.empty[VirtualFileRef, F])
case _: ZipException =>
(Relation.empty[VirtualFileRef, VirtualFileRef], Map.empty[VirtualFileRef, F])
case e: TranslatedException =>
e.getCause match {
case _: ZipException =>
(Relation.empty[VirtualFileRef, VirtualFileRef], Map.empty[VirtualFileRef, F])
case _ => throw e
}
}
private def readUncaught[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
store.read(default = (Relation.empty[File, File], Map.empty[File, F]))
private def readUncaughtVirtual[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfoVirtual[F] = {
import sjsonnew.IsoString
implicit def virtualFileRefStringIso: IsoString[VirtualFileRef] =
IsoString.iso[VirtualFileRef](_.toString, VirtualFileRef.of(_))
store.read(default =
(Relation.empty[VirtualFileRef, VirtualFileRef], Map.empty[VirtualFileRef, F])
)
}
}

View File

@ -2523,7 +2523,10 @@ object Defaults extends BuildCommon {
val t = classDirectory.value
val dirs = resourceDirectories.value.toSet
val s = streams.value
val cacheStore = s.cacheStoreFactory make "copy-resources"
val syncDir = crossTarget.value / (prefix(configuration.value.name) + "sync")
val factory = CacheStoreFactory(syncDir)
val cacheStore = factory.make("copy-resource")
val converter = fileConverter.value
val flt: File => Option[File] = flat(t)
val transform: File => Option[File] = (f: File) => rebase(dirs, t)(f).orElse(flt(f))
val mappings: Seq[(File, File)] = resources.value.flatMap {
@ -2531,7 +2534,7 @@ object Defaults extends BuildCommon {
case _ => None
}
s.log.debug("Copy resource mappings: " + mappings.mkString("\n\t", "\n\t", ""))
Sync.sync(cacheStore)(mappings)
Sync.sync(cacheStore, fileConverter = converter)(mappings)
mappings
}

View File

@ -25,7 +25,6 @@ import sbt.internal.inc.{ HashUtil, JarUtils }
import sbt.internal.librarymanagement._
import sbt.internal.remotecache._
import sbt.io.IO
import sbt.io.Path.{ flat, rebase }
import sbt.io.syntax._
import sbt.librarymanagement._
import sbt.librarymanagement.ivy.{ Credentials, IvyPaths, UpdateOptions }
@ -35,8 +34,6 @@ import sbt.nio.Keys.{ inputFileStamps, outputFileStamps }
import sbt.std.TaskExtra._
import sbt.util.InterfaceUtil.toOption
import sbt.util.Logger
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
import xsbti.FileConverter
import scala.annotation.nowarn
@ -158,21 +155,8 @@ object RemoteCache {
++ inConfig(Test)(configCacheSettings(testArtifact(Test, cachedTestClassifier))))
def getResourceFilePaths() = Def.task {
import sbt.librarymanagement.LibraryManagementCodec._
val t = classDirectory.value
val dirs = resourceDirectories.value.toSet
val flt: File => Option[File] = flat(t)
val cacheDirectory = crossTarget.value / (prefix(configuration.value.name) + "caches")
val converter = fileConverter.value
val transform: File => Option[File] = (f: File) => rebase(dirs, t)(f).orElse(flt(f))
val resourcesInClassesDir = resources.value
.flatMap(x => transform(x).toList)
.map(f => converter.toVirtualFile(f.toPath).toString)
val json = Converter.toJson[Seq[String]](resourcesInClassesDir).get
val tmp = CompactPrinter(json)
val file = cacheDirectory / "resources.json"
IO.write(file, tmp)
val syncDir = crossTarget.value / (prefix(configuration.value.name) + "sync")
val file = syncDir / "copy-resource"
file
}
@ -193,7 +177,7 @@ object RemoteCache {
}
val rf = getResourceFilePaths.value
if (rf.exists) {
JarUtils.includeInJar(artp, Vector(rf -> s"META-INF/resources.json"))
JarUtils.includeInJar(artp, Vector(rf -> s"META-INF/copy-resources.txt"))
}
// val testStream = (test / streams).?.value
// testStream foreach { s =>
@ -277,7 +261,7 @@ object RemoteCache {
val smi = scalaModuleInfo.value
val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value
val nonPom = artifacts.filterNot(isPomArtifact).toVector
val converter = fileConverter.value
val copyResources = getResourceFilePaths.value
m.withModule(log) {
case (ivy, md, _) =>
val resolver = ivy.getSettings.getResolver(r.name)
@ -310,7 +294,7 @@ object RemoteCache {
findJar(classifier, v, jars) match {
case Some(jar) =>
extractJar(art, jar, converter)
extractJar(art, jar, copyResources)
log.info(s"remote cache artifact extracted for $p $classifier")
case None =>
@ -401,13 +385,13 @@ object RemoteCache {
private def extractJar(
cacheArtifact: RemoteCacheArtifact,
jar: File,
converter: FileConverter
copyResources: File
): Unit =
cacheArtifact match {
case a: CompileRemoteCacheArtifact =>
extractCache(jar, a.extractDirectory, preserveLastModified = true) { output =>
extractAnalysis(output, a.analysisFile)
extractResourceList(output, converter)
extractResourceList(output, copyResources)
}
case a: TestRemoteCacheArtifact =>
@ -445,20 +429,11 @@ object RemoteCache {
}
}
private def extractResourceList(output: File, converter: FileConverter): Unit = {
import sbt.librarymanagement.LibraryManagementCodec._
import sjsonnew.support.scalajson.unsafe.{ Converter, Parser }
import xsbti.VirtualFileRef
val resourceFilesToDelete = output / "META-INF" / "resources.json"
if (resourceFilesToDelete.exists) {
val readFile = IO.read(resourceFilesToDelete)
val parseFile = Parser.parseUnsafe(readFile)
val resourceFiles = Converter.fromJsonUnsafe[Seq[String]](parseFile)
val paths = resourceFiles.map(f => converter.toPath(VirtualFileRef.of(f)))
val filesToDelete = paths.map(_.toFile)
for (file <- filesToDelete if file.getAbsolutePath.startsWith(output.getAbsolutePath))
IO.delete(file)
private def extractResourceList(output: File, copyResources: File): Unit = {
val metaDir = output / "META-INF"
val extractedCopyResources = metaDir / "copy-resources.txt"
if (extractedCopyResources.exists) {
IO.move(extractedCopyResources, copyResources)
}
}

View File

@ -1 +1 @@
sbt.version=1.5.4
sbt.version=1.5.4