mirror of https://github.com/sbt/sbt.git
[2.x] perf: Use bytecodeHash for incremental test (#9152)
**Problem** There's perf issue building ClassStamp for the incremental test. **Solution** This reuses the bytecodeHash created during compilation.
This commit is contained in:
parent
7b7aba5245
commit
5fe73c82fd
|
|
@ -379,6 +379,7 @@ lazy val utilCache = project
|
|||
contrabandSettings,
|
||||
mimaSettings,
|
||||
mimaBinaryIssueFilters ++= Seq(
|
||||
exclude[ReversedMissingMethodProblem]("sbt.util.CacheImplicits.sbt$util*")
|
||||
),
|
||||
Test / fork := true,
|
||||
)
|
||||
|
|
@ -742,6 +743,7 @@ lazy val mainProj = (project in file("main"))
|
|||
Compile / doc / sources := Nil,
|
||||
mimaSettings,
|
||||
mimaBinaryIssueFilters ++= Vector(
|
||||
exclude[DirectMissingMethodProblem]("sbt.internal.ClassStamper.stampVf")
|
||||
),
|
||||
)
|
||||
.dependsOn(lmCore, lmIvy, lmCoursierShadedPublishing)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ import sbt.internal.util.Types.const
|
|||
import sbt.io.{ GlobFilter, IO, NameFilter }
|
||||
import sbt.protocol.testing.TestResult
|
||||
import sbt.util.{ ActionCache, BuildWideCacheConfiguration, CacheLevelTag, Digest, Logger }
|
||||
import sbt.util.CacheImplicits
|
||||
import sbt.util.CacheImplicits.given
|
||||
import scala.collection.concurrent
|
||||
import scala.collection.mutable
|
||||
|
|
@ -151,10 +152,12 @@ class ClassStamper(
|
|||
converter: FileConverter,
|
||||
):
|
||||
private val stamps = mutable.Map.empty[String, SortedSet[Digest]]
|
||||
private val vfStamps = mutable.Map.empty[VirtualFileRef, Digest]
|
||||
private val internalStamps = mutable.Map.empty[String, SortedSet[Digest]]
|
||||
private lazy val analyses = classpath
|
||||
.flatMap(a => BuildDef.extractAnalysis(a.metadata, converter))
|
||||
.collect { case analysis: Analysis => analysis }
|
||||
private val stampVf: VirtualFileRef => Digest =
|
||||
CacheImplicits.virtualFileRefToDigest(_)(converter)
|
||||
|
||||
/**
|
||||
* Given a classpath and a class name, this tries to create a SHA-256 digest.
|
||||
|
|
@ -180,7 +183,7 @@ class ClassStamper(
|
|||
import analysis.relations
|
||||
// log.debug(s"test: internalStamp($javaClassName)")
|
||||
def internalStamp0(className: String): SortedSet[Digest] =
|
||||
// log.debug(s" internalStamp: relations = $relations")
|
||||
// Zinc doesn't fully track the transitive dependencies
|
||||
val internalDeps = relations
|
||||
.internalClassDeps(className)
|
||||
.flatMap: otherCN =>
|
||||
|
|
@ -196,17 +199,11 @@ class ClassStamper(
|
|||
relations.libraryClassName
|
||||
.reverse(libClassName)
|
||||
.map(stampVf)
|
||||
val classDigests = relations
|
||||
.definesClass(className)
|
||||
.flatMap: sourceFile =>
|
||||
relations
|
||||
.products(sourceFile)
|
||||
.map(stampVf)
|
||||
// TODO: substitute the above with
|
||||
// val classDigests = analysis.apis.internal
|
||||
// .get(className)
|
||||
// .map: analyzed =>
|
||||
// 0L // analyzed.??? we need a hash here
|
||||
val classDigests = analysis.apis.internal
|
||||
.get(className)
|
||||
.toSet
|
||||
.map: analyzed =>
|
||||
Digest.dummy(37 * (17 + analyzed.transitiveBytecodeHash) + analyzed.bytecodeHash)
|
||||
val xs =
|
||||
(internalDeps union internalJarDeps union externalDeps union classDigests)
|
||||
.to(SortedSet)
|
||||
|
|
@ -221,10 +218,4 @@ class ClassStamper(
|
|||
// Note: internalClassDeps uses Scala-encoded class name for companion objects
|
||||
val classNames = relations.productClassName.reverse(javaClassName)
|
||||
SortedSet(classNames.toSeq*).flatMap(internalStamp0)
|
||||
|
||||
def stampVf(vf: VirtualFileRef): Digest =
|
||||
vf match
|
||||
case h: HashedVirtualFileRef => Digest(h)
|
||||
case _ =>
|
||||
vfStamps.getOrElseUpdate(vf, Digest.sha256Hash(converter.toPath(vf)))
|
||||
end ClassStamper
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import java.nio.file.{ Files, NoSuchFileException }
|
|||
import java.nio.file.attribute.BasicFileAttributes
|
||||
import java.util.concurrent.atomic.{ AtomicLong, AtomicReference }
|
||||
import sjsonnew.BasicJsonProtocol
|
||||
import xsbti.{ HashedVirtualFileRef, PathBasedFile }
|
||||
import xsbti.{ FileConverter, HashedVirtualFileRef, PathBasedFile, VirtualFileRef }
|
||||
|
||||
object CacheImplicits extends CacheImplicits:
|
||||
private[sbt] val defaultLocalDigestCacheByteSize = 1024L * 1024L
|
||||
|
|
@ -24,6 +24,9 @@ trait CacheImplicits extends BasicCacheImplicits with BasicJsonProtocol:
|
|||
private val weigher: Weigher[String, (String, Long, Long)] = { case (k, (v1, _, _)) =>
|
||||
k.size + v1.size + 16
|
||||
}
|
||||
private val digestWeigher: Weigher[String, (Digest, Long, Long)] = { case (k, (v1, _, _)) =>
|
||||
k.size + v1.digestSize + 16
|
||||
}
|
||||
|
||||
private val stampCache: AtomicReference[CCache[String, (String, Long, Long)]] =
|
||||
AtomicReference(
|
||||
|
|
@ -34,6 +37,15 @@ trait CacheImplicits extends BasicCacheImplicits with BasicJsonProtocol:
|
|||
.build()
|
||||
)
|
||||
|
||||
private val digestCache: AtomicReference[CCache[String, (Digest, Long, Long)]] =
|
||||
AtomicReference(
|
||||
Caffeine
|
||||
.newBuilder()
|
||||
.maximumWeight(localDigestCacheByteSize.get())
|
||||
.weigher(digestWeigher)
|
||||
.build()
|
||||
)
|
||||
|
||||
private[sbt] def setCacheSize(size: Long): Unit =
|
||||
if localDigestCacheByteSize.get() == size then ()
|
||||
else
|
||||
|
|
@ -46,6 +58,14 @@ trait CacheImplicits extends BasicCacheImplicits with BasicJsonProtocol:
|
|||
.weigher(weigher)
|
||||
.build()
|
||||
)
|
||||
digestCache.get().invalidateAll()
|
||||
digestCache.set(
|
||||
Caffeine
|
||||
.newBuilder()
|
||||
.maximumWeight(localDigestCacheByteSize.get())
|
||||
.weigher(digestWeigher)
|
||||
.build()
|
||||
)
|
||||
|
||||
private def getOrElseUpdate(ref: HashedVirtualFileRef, lastModified: Long, sizeBytes: Long)(
|
||||
value: => String
|
||||
|
|
@ -57,6 +77,16 @@ trait CacheImplicits extends BasicCacheImplicits with BasicJsonProtocol:
|
|||
stampCache.get().put(ref.id(), (v, lastModified, sizeBytes))
|
||||
v
|
||||
|
||||
private def getOrElseUpdate(ref: VirtualFileRef, lastModified: Long, sizeBytes: Long)(
|
||||
value: => Digest
|
||||
) =
|
||||
Option(digestCache.get().getIfPresent(ref.id())) match
|
||||
case Some((v, mod, i)) if lastModified == mod && sizeBytes == i => v
|
||||
case _ =>
|
||||
val v = value
|
||||
digestCache.get().put(ref.id(), (v, lastModified, sizeBytes))
|
||||
v
|
||||
|
||||
/**
|
||||
* A string representation of HashedVirtualFileRef, delimited by `>`.
|
||||
*/
|
||||
|
|
@ -76,4 +106,21 @@ trait CacheImplicits extends BasicCacheImplicits with BasicJsonProtocol:
|
|||
getOrElseUpdate(ref, lastModified, sizeBytes)(fallback)
|
||||
catch case e: NoSuchFileException => throw e
|
||||
case _ => fallback
|
||||
|
||||
def virtualFileRefToDigest(vf: VirtualFileRef)(converter: FileConverter): Digest =
|
||||
vf match
|
||||
case pbf: PathBasedFile =>
|
||||
val path = pbf.toPath
|
||||
val attrs = Files.readAttributes(path, classOf[BasicFileAttributes])
|
||||
def fallback: Digest = Digest.sha256Hash(path)
|
||||
if attrs.isDirectory then sys.error(s"$vf is a directory")
|
||||
else
|
||||
val lastModified = attrs.lastModifiedTime().toMillis()
|
||||
val sizeBytes = attrs.size()
|
||||
vf match
|
||||
case h: HashedVirtualFileRef =>
|
||||
getOrElseUpdate(vf, lastModified, sizeBytes)(Digest(h))
|
||||
case _ =>
|
||||
getOrElseUpdate(vf, lastModified, sizeBytes)(fallback)
|
||||
case _ => Digest.sha256Hash(converter.toPath(vf))
|
||||
end CacheImplicits
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ object Digest:
|
|||
def hashHexString: String = parse(d)._2
|
||||
def toBytes: Array[Byte] = parse(d)._4
|
||||
def sizeBytes: Long = parse(d)._3
|
||||
private[sbt] def digestSize: Int = d.size
|
||||
|
||||
given digestOrd(using ord: Ordering[String]): Ordering[Digest] with
|
||||
def compare(x: Digest, y: Digest) =
|
||||
|
|
|
|||
Loading…
Reference in New Issue