fixes remote caching not managing resource files

What is the problem?
When using remote caching, the resource files are not tracked so if they
have changed, pullRemoteCache will deliver both the old resource file
as well as the changed one.

This is a problem, because it's not the behaviour that our users will
expect and it's not in keeping with the contract of this feature.

Why is this happening?
Zinc, sbt's incremental compiler, keeps track of changes that have
been made. It keeps this in what is called the Analysis file.
However, resource files are not tracked in the Analysis file, so
remote caching is not invalidating the unchanged resource file in
place of the latest version.

What is the solution?
PullRemoteCache deletes all of the resources files. After this,
copyResources is called by PackageBin, which puts the latest
version of the resources back.
This commit is contained in:
Amina Adewusi 2021-06-18 18:01:45 +01:00
parent 3ef1c5508b
commit f82c0c4c5f
1 changed files with 65 additions and 14 deletions

View File

@ -10,29 +10,34 @@ package internal
import java.io.File
import java.nio.file.Path
import Keys._
import SlashSyntax0._
import ScopeFilter.Make._
import Project._ // for tag and inTask()
import org.apache.ivy.core.module.descriptor.{ Artifact => IArtifact, DefaultArtifact }
import org.apache.ivy.core.resolve.DownloadOptions
import org.apache.ivy.core.module.descriptor.{ DefaultArtifact, Artifact => IArtifact }
import org.apache.ivy.core.report.DownloadStatus
import org.apache.ivy.core.resolve.DownloadOptions
import org.apache.ivy.plugins.resolver.DependencyResolver
import std.TaskExtra._ // for join
import sbt.Defaults.prefix
import sbt.Keys._
import sbt.Project._
import sbt.ScopeFilter.Make._
import sbt.SlashSyntax0._
import sbt.coursierint.LMCoursier
import sbt.internal.inc.{ HashUtil, JarUtils }
import sbt.internal.librarymanagement._
import sbt.internal.remotecache._
import sbt.io.IO
import sbt.io.Path.{ flat, rebase }
import sbt.io.syntax._
import sbt.librarymanagement._
import sbt.librarymanagement.ivy.{ Credentials, IvyPaths, UpdateOptions }
import sbt.librarymanagement.syntax._
import sbt.nio.FileStamp
import sbt.nio.Keys.{ inputFileStamps, outputFileStamps }
import sbt.internal.librarymanagement._
import sbt.io.IO
import sbt.io.syntax._
import sbt.internal.remotecache._
import sbt.internal.inc.{ HashUtil, JarUtils }
import sbt.std.TaskExtra._
import sbt.util.InterfaceUtil.toOption
import sbt.util.Logger
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
import xsbti.FileConverter
import scala.annotation.nowarn
object RemoteCache {
@ -152,6 +157,25 @@ object RemoteCache {
) ++ inConfig(Compile)(configCacheSettings(compileArtifact(Compile, cachedCompileClassifier)))
++ inConfig(Test)(configCacheSettings(testArtifact(Test, cachedTestClassifier))))
def getResourceFilePaths() = Def.task {
import sbt.librarymanagement.LibraryManagementCodec._
val t = classDirectory.value
val dirs = resourceDirectories.value.toSet
val flt: File => Option[File] = flat(t)
val cacheDirectory = crossTarget.value / (prefix(configuration.value.name) + "caches")
val converter = fileConverter.value
val transform: File => Option[File] = (f: File) => rebase(dirs, t)(f).orElse(flt(f))
val resourcesInClassesDir = resources.value
.flatMap(x => transform(x).toList)
.map(f => converter.toVirtualFile(f.toPath).toString)
val json = Converter.toJson[Seq[String]](resourcesInClassesDir).get
val tmp = CompactPrinter(json)
val file = cacheDirectory / "resources.json"
IO.write(file, tmp)
file
}
@nowarn
def configCacheSettings[A <: RemoteCacheArtifact](
cacheArtifactTask: Def.Initialize[Task[A]]
@ -166,6 +190,10 @@ object RemoteCache {
if (af.exists) {
JarUtils.includeInJar(artp, Vector(af -> s"META-INF/inc_compile.zip"))
}
val rf = getResourceFilePaths.value
if (rf.exists) {
JarUtils.includeInJar(artp, Vector(rf -> s"META-INF/resources.json"))
}
// val testStream = (test / streams).?.value
// testStream foreach { s =>
// val sf = Defaults.succeededFile(s.cacheDirectory)
@ -248,6 +276,7 @@ object RemoteCache {
val smi = scalaModuleInfo.value
val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value
val nonPom = artifacts.filterNot(isPomArtifact).toVector
val converter = fileConverter.value
m.withModule(log) {
case (ivy, md, _) =>
val resolver = ivy.getSettings.getResolver(r.name)
@ -280,7 +309,7 @@ object RemoteCache {
findJar(classifier, v, jars) match {
case Some(jar) =>
extractJar(art, jar)
extractJar(art, jar, converter)
log.info(s"remote cache artifact extracted for $p $classifier")
case None =>
@ -368,11 +397,16 @@ object RemoteCache {
jars.find(_.toString.endsWith(suffix))
}
private def extractJar(cacheArtifact: RemoteCacheArtifact, jar: File): Unit =
private def extractJar(
cacheArtifact: RemoteCacheArtifact,
jar: File,
converter: FileConverter
): Unit =
cacheArtifact match {
case a: CompileRemoteCacheArtifact =>
extractCache(jar, a.extractDirectory, preserveLastModified = true) { output =>
extractAnalysis(output, a.analysisFile)
extractResourceList(output, converter)
}
case a: TestRemoteCacheArtifact =>
@ -410,6 +444,23 @@ object RemoteCache {
}
}
private def extractResourceList(output: File, converter: FileConverter): Unit = {
import sbt.librarymanagement.LibraryManagementCodec._
import sjsonnew.support.scalajson.unsafe.{ Converter, Parser }
import xsbti.VirtualFileRef
val resourceFilesToDelete = output / "META-INF" / "resources.json"
if (resourceFilesToDelete.exists) {
val readFile = IO.read(resourceFilesToDelete)
val parseFile = Parser.parseUnsafe(readFile)
val resourceFiles = Converter.fromJsonUnsafe[Seq[String]](parseFile)
val paths = resourceFiles.map(f => converter.toPath(VirtualFileRef.of(f)))
val filesToDelete = paths.map(_.toFile)
for (file <- filesToDelete if file.getAbsolutePath.startsWith(output.getAbsolutePath))
IO.delete(file)
}
}
private def extractTestResult(output: File, testResult: File): Unit = {
//val expandedTestResult = output / "META-INF" / "succeeded_tests"
//if (expandedTestResult.exists) {