mirror of https://github.com/sbt/sbt.git
Merge pull request #7525 from eed3si9n/wip/remoteapis
[2.x] Remote caching support
This commit is contained in:
commit
facd7ab64e
31
build.sbt
31
build.sbt
|
|
@ -419,11 +419,26 @@ lazy val utilRelation = (project in file("internal") / "util-relation")
|
|||
// Persisted caching based on sjson-new
|
||||
lazy val utilCache = project
|
||||
.in(file("util-cache"))
|
||||
.enablePlugins(
|
||||
ContrabandPlugin,
|
||||
// we generate JsonCodec only for actionresult.conta
|
||||
// JsonCodecPlugin,
|
||||
)
|
||||
.dependsOn(utilLogging)
|
||||
.settings(
|
||||
testedBaseSettings,
|
||||
name := "Util Cache",
|
||||
libraryDependencies ++=
|
||||
Seq(sjsonNewScalaJson.value, sjsonNewMurmurhash.value, scalaReflect.value),
|
||||
Seq(
|
||||
sjsonNewCore.value,
|
||||
sjsonNewScalaJson.value,
|
||||
sjsonNewMurmurhash.value,
|
||||
scalaReflect.value
|
||||
),
|
||||
Compile / managedSourceDirectories +=
|
||||
baseDirectory.value / "src" / "main" / "contraband-scala",
|
||||
Compile / generateContrabands / sourceManaged := baseDirectory.value / "src" / "main" / "contraband-scala",
|
||||
Compile / generateContrabands / contrabandFormatsForType := ContrabandConfig.getFormats,
|
||||
utilMimaSettings,
|
||||
Test / fork := true,
|
||||
)
|
||||
|
|
@ -645,6 +660,19 @@ lazy val dependencyTreeProj = (project in file("dependency-tree"))
|
|||
mimaPreviousArtifacts := Set.empty,
|
||||
)
|
||||
|
||||
lazy val remoteCacheProj = (project in file("sbt-remote-cache"))
|
||||
.dependsOn(sbtProj)
|
||||
.settings(
|
||||
sbtPlugin := true,
|
||||
baseSettings,
|
||||
name := "sbt-remote-cache",
|
||||
pluginCrossBuild / sbtVersion := version.value,
|
||||
publishMavenStyle := true,
|
||||
// mimaSettings,
|
||||
mimaPreviousArtifacts := Set.empty,
|
||||
libraryDependencies += remoteapis,
|
||||
)
|
||||
|
||||
// Implementation and support code for defining actions.
|
||||
lazy val actionsProj = (project in file("main-actions"))
|
||||
.dependsOn(
|
||||
|
|
@ -1266,6 +1294,7 @@ def allProjects =
|
|||
utilTracking,
|
||||
collectionProj,
|
||||
coreMacrosProj,
|
||||
remoteCacheProj,
|
||||
) ++ lowerUtilProjects
|
||||
|
||||
// These need to be cross published to 2.12 and 2.13 for Zinc
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ import testing.{
|
|||
}
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.util.control.NonFatal
|
||||
import sbt.internal.util.ManagedLogger
|
||||
import sbt.util.Logger
|
||||
import sbt.protocol.testing.TestResult
|
||||
|
|
@ -534,15 +535,19 @@ object Tests {
|
|||
case analysis: Analysis =>
|
||||
val acs: Seq[xsbti.api.AnalyzedClass] = analysis.apis.internal.values.toVector
|
||||
acs.flatMap { ac =>
|
||||
val companions = ac.api
|
||||
val all =
|
||||
Seq(companions.classApi: Definition, companions.objectApi: Definition) ++
|
||||
(companions.classApi.structure.declared.toSeq: Seq[Definition]) ++
|
||||
(companions.classApi.structure.inherited.toSeq: Seq[Definition]) ++
|
||||
(companions.objectApi.structure.declared.toSeq: Seq[Definition]) ++
|
||||
(companions.objectApi.structure.inherited.toSeq: Seq[Definition])
|
||||
|
||||
all
|
||||
try
|
||||
val companions = ac.api
|
||||
val all =
|
||||
Seq(companions.classApi: Definition, companions.objectApi: Definition) ++
|
||||
(companions.classApi.structure.declared.toSeq: Seq[Definition]) ++
|
||||
(companions.classApi.structure.inherited.toSeq: Seq[Definition]) ++
|
||||
(companions.objectApi.structure.declared.toSeq: Seq[Definition]) ++
|
||||
(companions.objectApi.structure.inherited.toSeq: Seq[Definition])
|
||||
all
|
||||
catch
|
||||
case NonFatal(e) =>
|
||||
if e.getMessage.startsWith("No companions") then Nil
|
||||
else throw e
|
||||
}.toSeq
|
||||
}
|
||||
def discover(
|
||||
|
|
|
|||
|
|
@ -233,10 +233,12 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
private[sbt] var _cacheStore: ActionCacheStore = InMemoryActionCacheStore()
|
||||
def cacheStore: ActionCacheStore = _cacheStore
|
||||
private[sbt] var _outputDirectory: Option[Path] = None
|
||||
private[sbt] val cacheEventLog: CacheEventLog = CacheEventLog()
|
||||
def cacheConfiguration: BuildWideCacheConfiguration =
|
||||
BuildWideCacheConfiguration(
|
||||
_cacheStore,
|
||||
_outputDirectory.getOrElse(sys.error("outputDirectory has not been set")),
|
||||
cacheEventLog,
|
||||
)
|
||||
|
||||
inline def cachedTask[A1: JsonFormat](inline a1: A1): Def.Initialize[Task[A1]] =
|
||||
|
|
|
|||
|
|
@ -4654,6 +4654,16 @@ trait BuildExtra extends BuildCommon with DefExtra {
|
|||
scalaBinaryVersion.value
|
||||
)
|
||||
|
||||
/**
|
||||
* Adds remote cache plugin.
|
||||
*/
|
||||
def addRemoteCachePlugin: Setting[Seq[ModuleID]] =
|
||||
libraryDependencies += sbtPluginExtra(
|
||||
ModuleID("org.scala-sbt", "sbt-remote-cache", sbtVersion.value),
|
||||
sbtBinaryVersion.value,
|
||||
scalaBinaryVersion.value
|
||||
)
|
||||
|
||||
/**
|
||||
* Adds `dependency` as an sbt plugin for the specific sbt version `sbtVersion` and Scala version `scalaVersion`.
|
||||
* Typically, use the default values for these versions instead of specifying them explicitly.
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ package sbt
|
|||
|
||||
import java.nio.file.{ Path => NioPath }
|
||||
import java.io.File
|
||||
import java.net.URL
|
||||
import java.net.{ URL, URI }
|
||||
import lmcoursier.definitions.{ CacheLogger, ModuleMatchers, Reconciliation }
|
||||
import lmcoursier.{ CoursierConfiguration, FallbackDependency }
|
||||
import org.apache.ivy.core.module.descriptor.ModuleDescriptor
|
||||
|
|
@ -116,6 +116,16 @@ object Keys {
|
|||
val fullServerHandlers = SettingKey(BasicKeys.fullServerHandlers)
|
||||
val serverHandlers = settingKey[Seq[ServerHandler]]("User-defined server handlers.")
|
||||
val cacheStores = settingKey[Seq[ActionCacheStore]]("Cache backends")
|
||||
@cacheLevel(include = Array.empty)
|
||||
val remoteCache = settingKey[Option[URI]]("URI of the remote cache")
|
||||
@cacheLevel(include = Array.empty)
|
||||
val remoteCacheTlsCertificate = settingKey[Option[File]]("Path to a TLS certificate (*.crt) that is trusted to sign server certificates")
|
||||
@cacheLevel(include = Array.empty)
|
||||
val remoteCacheTlsClientCertificate = settingKey[Option[File]]("Path to a TLS client certificate *.crt used with remoteCacheTlsClientKey ")
|
||||
@cacheLevel(include = Array.empty)
|
||||
val remoteCacheTlsClientKey = settingKey[Option[File]]("Path to a TLS client key *.pem used with remoteCacheTlsClientCertificate")
|
||||
@cacheLevel(include = Array.empty)
|
||||
val remoteCacheHeaders = settingKey[Seq[String]]("List of key=value headers to be sent to the remote cache.")
|
||||
val rootOutputDirectory = SettingKey(BasicKeys.rootOutputDirectory)
|
||||
|
||||
// val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
|
||||
|
|
|
|||
|
|
@ -113,6 +113,11 @@ object RemoteCache {
|
|||
DiskActionCacheStore(localCacheDirectory.value.toPath(), fileConverter.value)
|
||||
)
|
||||
},
|
||||
remoteCache := SysProp.remoteCache,
|
||||
remoteCacheTlsCertificate := SysProp.remoteCacheTlsCertificate,
|
||||
remoteCacheTlsClientCertificate := SysProp.remoteCacheTlsClientCertificate,
|
||||
remoteCacheTlsClientKey := SysProp.remoteCacheTlsClientKey,
|
||||
remoteCacheHeaders := SysProp.remoteCacheHeaders,
|
||||
)
|
||||
|
||||
lazy val projectSettings: Seq[Def.Setting[_]] = (Seq(
|
||||
|
|
|
|||
|
|
@ -28,10 +28,11 @@ object Aggregation {
|
|||
success: Boolean
|
||||
)
|
||||
|
||||
final case class Complete[T](
|
||||
final case class Complete[A](
|
||||
start: Long,
|
||||
stop: Long,
|
||||
results: sbt.Result[Seq[KeyValue[T]]],
|
||||
results: sbt.Result[Seq[KeyValue[A]]],
|
||||
cacheSummary: String,
|
||||
state: State
|
||||
)
|
||||
|
||||
|
|
@ -68,44 +69,43 @@ object Aggregation {
|
|||
)(implicit display: Show[ScopedKey[_]]): Parser[() => State] =
|
||||
Command.applyEffect(seqParser(ps))(ts => runTasks(s, ts, DummyTaskMap(Nil), show))
|
||||
|
||||
private def showRun[T](complete: Complete[T], show: ShowConfig)(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): Unit = {
|
||||
import complete._
|
||||
private def showRun[A](complete: Complete[A], show: ShowConfig)(implicit
|
||||
display: Show[ScopedKey[?]]
|
||||
): Unit =
|
||||
import complete.*
|
||||
val log = state.log
|
||||
val extracted = Project.extract(state)
|
||||
val success = results match
|
||||
case Result.Value(_) => true
|
||||
case Result.Inc(_) => false
|
||||
results.toEither.foreach { r =>
|
||||
if (show.taskValues) printSettings(r, show.print)
|
||||
if show.taskValues then printSettings(r, show.print) else ()
|
||||
}
|
||||
if (show.success && !state.get(suppressShow).getOrElse(false))
|
||||
printSuccess(start, stop, extracted, success, log)
|
||||
}
|
||||
if show.success && !state.get(suppressShow).getOrElse(false) then
|
||||
printSuccess(start, stop, extracted, success, cacheSummary, log)
|
||||
else ()
|
||||
|
||||
def timedRun[T](
|
||||
def timedRun[A](
|
||||
s: State,
|
||||
ts: Values[Task[T]],
|
||||
extra: DummyTaskMap
|
||||
): Complete[T] = {
|
||||
ts: Values[Task[A]],
|
||||
extra: DummyTaskMap,
|
||||
): Complete[A] =
|
||||
import EvaluateTask._
|
||||
import std.TaskExtra._
|
||||
|
||||
val extracted = Project extract s
|
||||
val extracted = Project.extract(s)
|
||||
import extracted.structure
|
||||
val toRun = ts.map { case KeyValue(k, t) => t.map(v => KeyValue(k, v)) }.join
|
||||
val roots = ts.map { case KeyValue(k, _) => k }
|
||||
val config = extractedTaskConfig(extracted, structure, s)
|
||||
|
||||
val start = System.currentTimeMillis
|
||||
val (newS, result) = withStreams(structure, s) { str =>
|
||||
val cacheEventLog = Def.cacheConfiguration.cacheEventLog
|
||||
cacheEventLog.clear()
|
||||
val (newS, result) = withStreams(structure, s): str =>
|
||||
val transform = nodeView(s, str, roots, extra)
|
||||
runTask(toRun, s, str, structure.index.triggers, config)(using transform)
|
||||
}
|
||||
val stop = System.currentTimeMillis
|
||||
Complete(start, stop, result, newS)
|
||||
}
|
||||
val cacheSummary = cacheEventLog.summary
|
||||
Complete(start, stop, result, cacheSummary, newS)
|
||||
|
||||
def runTasks[A1](
|
||||
s: State,
|
||||
|
|
@ -124,20 +124,22 @@ object Aggregation {
|
|||
stop: Long,
|
||||
extracted: Extracted,
|
||||
success: Boolean,
|
||||
log: Logger
|
||||
): Unit = {
|
||||
import extracted._
|
||||
cacheSummary: String,
|
||||
log: Logger,
|
||||
): Unit =
|
||||
import extracted.*
|
||||
def get(key: SettingKey[Boolean]): Boolean =
|
||||
(currentRef / key).get(structure.data) getOrElse true
|
||||
|
||||
if (get(showSuccess)) {
|
||||
if (get(showTiming)) {
|
||||
val msg = timingString(start, stop, structure.data, currentRef)
|
||||
if (success) log.success(msg) else if (Terminal.get.isSuccessEnabled) log.error(msg)
|
||||
} else if (success)
|
||||
log.success("")
|
||||
}
|
||||
}
|
||||
if get(showSuccess) then
|
||||
if get(showTiming) then
|
||||
val msg = timingString(start, stop, structure.data, currentRef) + (
|
||||
if cacheSummary == "" then ""
|
||||
else ", " + cacheSummary
|
||||
)
|
||||
if success then log.success(msg)
|
||||
else if Terminal.get.isSuccessEnabled then log.error(msg)
|
||||
else if success then log.success("")
|
||||
else ()
|
||||
|
||||
private def timingString(
|
||||
startTime: Long,
|
||||
|
|
@ -149,23 +151,19 @@ object Aggregation {
|
|||
timing(format, startTime, endTime)
|
||||
}
|
||||
|
||||
def timing(format: java.text.DateFormat, startTime: Long, endTime: Long): String = {
|
||||
val nowString = format.format(new java.util.Date(endTime))
|
||||
def timing(format: java.text.DateFormat, startTime: Long, endTime: Long): String =
|
||||
val total = (endTime - startTime + 500) / 1000
|
||||
val totalString = s"$total s" +
|
||||
(if (total <= 60) ""
|
||||
(if total <= 60 then ""
|
||||
else {
|
||||
val maybeHours = total / 3600 match {
|
||||
val maybeHours = total / 3600 match
|
||||
case 0 => ""
|
||||
case h => f"$h%02d:"
|
||||
}
|
||||
val mins = f"${total % 3600 / 60}%02d"
|
||||
val secs = f"${total % 60}%02d"
|
||||
s" ($maybeHours$mins:$secs)"
|
||||
})
|
||||
|
||||
s"Total time: $totalString, completed $nowString"
|
||||
}
|
||||
s"elapsed time: $totalString"
|
||||
|
||||
def defaultFormat: DateFormat = {
|
||||
import java.text.DateFormat
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ package sbt
|
|||
package internal
|
||||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.nio.file.{ Path, Paths }
|
||||
import java.util.Locale
|
||||
|
||||
|
|
@ -98,6 +99,21 @@ object SysProp {
|
|||
def legacyTestReport: Boolean = getOrFalse("sbt.testing.legacyreport")
|
||||
def semanticdb: Boolean = getOrFalse("sbt.semanticdb")
|
||||
def forceServerStart: Boolean = getOrFalse("sbt.server.forcestart")
|
||||
def remoteCache: Option[URI] = sys.props
|
||||
.get("sbt.remote_cache")
|
||||
.map(URI(_))
|
||||
def remoteCacheTlsCertificate: Option[File] = sys.props
|
||||
.get("sbt.remote_cache.tls_certificate")
|
||||
.map(File(_))
|
||||
def remoteCacheTlsClientCertificate: Option[File] = sys.props
|
||||
.get("sbt.remote_cache.tls_client_certificate")
|
||||
.map(File(_))
|
||||
def remoteCacheTlsClientKey: Option[File] = sys.props
|
||||
.get("sbt.remote_cache.tls_client_key")
|
||||
.map(File(_))
|
||||
def remoteCacheHeaders: List[String] = sys.props
|
||||
.get("sbt.remote_cache.header")
|
||||
.toList
|
||||
|
||||
def watchMode: String =
|
||||
sys.props.get("sbt.watch.mode").getOrElse("auto")
|
||||
|
|
|
|||
|
|
@ -11,14 +11,14 @@ object AggregationSpec extends verify.BasicTestSuite {
|
|||
val timing = Aggregation.timing(Aggregation.defaultFormat, 0, _: Long)
|
||||
|
||||
test("timing should format total time properly") {
|
||||
assert(timing(101).startsWith("Total time: 0 s,"))
|
||||
assert(timing(1000).startsWith("Total time: 1 s,"))
|
||||
assert(timing(3000).startsWith("Total time: 3 s,"))
|
||||
assert(timing(30399).startsWith("Total time: 30 s,"))
|
||||
assert(timing(60399).startsWith("Total time: 60 s,"))
|
||||
assert(timing(60699).startsWith("Total time: 61 s (01:01),"))
|
||||
assert(timing(303099).startsWith("Total time: 303 s (05:03),"))
|
||||
assert(timing(6003099).startsWith("Total time: 6003 s (01:40:03),"))
|
||||
assert(timing(96003099).startsWith("Total time: 96003 s (26:40:03),"))
|
||||
assert(timing(101).startsWith("elapsed time: 0 s"))
|
||||
assert(timing(1000).startsWith("elapsed time: 1 s"))
|
||||
assert(timing(3000).startsWith("elapsed time: 3 s"))
|
||||
assert(timing(30399).startsWith("elapsed time: 30 s"))
|
||||
assert(timing(60399).startsWith("elapsed time: 60 s"))
|
||||
assert(timing(60699).startsWith("elapsed time: 61 s (01:01)"))
|
||||
assert(timing(303099).startsWith("elapsed time: 303 s (05:03)"))
|
||||
assert(timing(6003099).startsWith("elapsed time: 6003 s (01:40:03)"))
|
||||
assert(timing(96003099).startsWith("elapsed time: 96003 s (26:40:03)"))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,6 +35,12 @@ object ContrabandConfig {
|
|||
case "scalajson.ast.unsafe.JValue" | "sjsonnew.shaded.scalajson.ast.unsafe.JValue" => { _ =>
|
||||
"sbt.internal.util.codec.JValueFormats" :: Nil
|
||||
}
|
||||
case "xsbti.HashedVirtualFileRef" => { _ =>
|
||||
"sbt.internal.util.codec.HashedVirtualFileRefFormats" :: Nil
|
||||
}
|
||||
case "java.nio.ByteBuffer" => { _ =>
|
||||
"sbt.internal.util.codec.ByteBufferFormats" :: Nil
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns the list of formats required to encode the given `TpeRef`. */
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ object Dependencies {
|
|||
private val ioVersion = nightlyVersion.getOrElse("1.8.0")
|
||||
private val lmVersion =
|
||||
sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("2.0.0-alpha13")
|
||||
val zincVersion = nightlyVersion.getOrElse("2.0.0-alpha12")
|
||||
val zincVersion = nightlyVersion.getOrElse("2.0.0-alpha13")
|
||||
|
||||
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
|
||||
|
||||
|
|
@ -106,6 +106,8 @@ object Dependencies {
|
|||
val junit = "junit" % "junit" % "4.13.1"
|
||||
val scalaVerify = "com.eed3si9n.verify" %% "verify" % "1.0.0"
|
||||
val templateResolverApi = "org.scala-sbt" % "template-resolver" % "0.1"
|
||||
val remoteapis =
|
||||
"com.eed3si9n.remoteapis.shaded" % "shaded-remoteapis-java" % "2.3.0-M1-52317e00d8d4c37fa778c628485d220fb68a8d08"
|
||||
|
||||
val scalaCompiler = "org.scala-lang" %% "scala3-compiler" % scala3
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,281 @@
|
|||
package sbt
|
||||
package internal
|
||||
|
||||
import build.bazel.remote.execution.v2.{
|
||||
ActionCacheGrpc,
|
||||
ActionResult => XActionResult,
|
||||
BatchReadBlobsRequest,
|
||||
BatchReadBlobsResponse,
|
||||
BatchUpdateBlobsRequest,
|
||||
BatchUpdateBlobsResponse,
|
||||
Compressor,
|
||||
ContentAddressableStorageGrpc,
|
||||
Digest => XDigest,
|
||||
DigestFunction,
|
||||
FindMissingBlobsRequest,
|
||||
GetActionResultRequest => XGetActionResultRequest,
|
||||
OutputFile,
|
||||
UpdateActionResultRequest => XUpdateActionResultRequest,
|
||||
}
|
||||
import com.eed3si9n.remoteapis.shaded.com.google.protobuf.ByteString
|
||||
import com.eed3si9n.remoteapis.shaded.io.grpc.{
|
||||
CallCredentials,
|
||||
Grpc,
|
||||
ManagedChannel,
|
||||
ManagedChannelBuilder,
|
||||
Metadata,
|
||||
Status,
|
||||
TlsChannelCredentials,
|
||||
}
|
||||
import java.net.URI
|
||||
import java.nio.file.{ Files, Path }
|
||||
import sbt.util.{
|
||||
AbstractActionCacheStore,
|
||||
ActionResult,
|
||||
Digest,
|
||||
DiskActionCacheStore,
|
||||
GetActionResultRequest,
|
||||
UpdateActionResultRequest,
|
||||
}
|
||||
import scala.util.control.NonFatal
|
||||
import scala.jdk.CollectionConverters.*
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFile }
|
||||
|
||||
object GrpcActionCacheStore:
|
||||
def apply(
|
||||
uri: URI,
|
||||
rootCerts: Option[Path],
|
||||
clientCertChain: Option[Path],
|
||||
clientPrivateKey: Option[Path],
|
||||
remoteHeaders: List[String],
|
||||
disk: DiskActionCacheStore,
|
||||
): GrpcActionCacheStore =
|
||||
val b: ManagedChannelBuilder[?] = uri.getScheme() match
|
||||
case "grpc" =>
|
||||
val port = uri.getPort() match
|
||||
case p if p >= 0 => p
|
||||
case _ => 80
|
||||
val builder = ManagedChannelBuilder.forAddress(uri.getHost(), port)
|
||||
builder.usePlaintext()
|
||||
builder
|
||||
case "grpcs" =>
|
||||
val port = uri.getPort() match
|
||||
case p if p >= 0 => p
|
||||
case _ => 443
|
||||
// https://grpc.github.io/grpc-java/javadoc/io/grpc/TlsChannelCredentials.Builder.html
|
||||
val tlsBuilder = TlsChannelCredentials.newBuilder()
|
||||
rootCerts.foreach: cert =>
|
||||
tlsBuilder.trustManager(cert.toFile())
|
||||
(clientCertChain, clientPrivateKey) match
|
||||
case (Some(cert), Some(key)) =>
|
||||
tlsBuilder.keyManager(cert.toFile(), key.toFile())
|
||||
case _ => ()
|
||||
Grpc.newChannelBuilderForAddress(
|
||||
uri.getHost(),
|
||||
port,
|
||||
tlsBuilder.build(),
|
||||
)
|
||||
case scheme => sys.error(s"unsupported $uri")
|
||||
val channel = b.build()
|
||||
val instanceName = Option(uri.getPath()) match
|
||||
case Some(x) if x.startsWith("/") => x.drop(1)
|
||||
case Some(x) => x
|
||||
case None => ""
|
||||
new GrpcActionCacheStore(channel, instanceName, remoteHeaders, disk)
|
||||
|
||||
class AuthCallCredentials(remoteHeaders: List[String]) extends CallCredentials:
|
||||
val pairs = remoteHeaders.map: h =>
|
||||
h.split("=").toList match
|
||||
case List(k, v) => Metadata.Key.of(k, Metadata.ASCII_STRING_MARSHALLER) -> v
|
||||
case _ => sys.error("remote header must contain one '='")
|
||||
override def applyRequestMetadata(
|
||||
requestInfo: CallCredentials.RequestInfo,
|
||||
executor: java.util.concurrent.Executor,
|
||||
applier: CallCredentials.MetadataApplier
|
||||
): Unit =
|
||||
executor.execute: () =>
|
||||
try
|
||||
val headers = Metadata()
|
||||
pairs.map { case (k, v) =>
|
||||
headers.put(k, v)
|
||||
}
|
||||
applier.apply(headers)
|
||||
catch case NonFatal(e) => applier.fail(Status.UNAUTHENTICATED.withCause(e))
|
||||
end AuthCallCredentials
|
||||
end GrpcActionCacheStore
|
||||
|
||||
/*
|
||||
* https://github.com/bazelbuild/remote-apis/blob/main/build/bazel/remote/execution/v2/remote_execution.proto
|
||||
*/
|
||||
class GrpcActionCacheStore(
|
||||
channel: ManagedChannel,
|
||||
instanceName: String,
|
||||
remoteHeaders: List[String],
|
||||
disk: DiskActionCacheStore,
|
||||
) extends AbstractActionCacheStore:
|
||||
lazy val creds = GrpcActionCacheStore.AuthCallCredentials(remoteHeaders)
|
||||
lazy val acStub0 = ActionCacheGrpc.newBlockingStub(channel)
|
||||
lazy val acStub = remoteHeaders match
|
||||
case x :: xs => acStub0.withCallCredentials(creds)
|
||||
case _ => acStub0
|
||||
lazy val casStub0 = ContentAddressableStorageGrpc.newBlockingStub(channel)
|
||||
lazy val casStub = remoteHeaders match
|
||||
case x :: xs => casStub0.withCallCredentials(creds)
|
||||
case _ => casStub0
|
||||
|
||||
override def storeName: String = "remote"
|
||||
|
||||
/**
|
||||
* https://github.com/bazelbuild/remote-apis/blob/9ff14cecffe5287ba337f857731ceadfc2d80de9/build/bazel/remote/execution/v2/remote_execution.proto#L170
|
||||
*/
|
||||
override def get(request: GetActionResultRequest): Either[Throwable, ActionResult] =
|
||||
try
|
||||
val b = XGetActionResultRequest.newBuilder()
|
||||
b.setInstanceName(instanceName)
|
||||
b.setActionDigest(toXDigest(request.actionDigest))
|
||||
b.setDigestFunction(DigestFunction.Value.SHA256)
|
||||
request.inlineOutputFiles.foreach: p =>
|
||||
b.addInlineOutputFiles(p)
|
||||
val req = b.build()
|
||||
val result = acStub.getActionResult(req)
|
||||
Right(toActionResult(result))
|
||||
catch case NonFatal(e) => Left(e)
|
||||
|
||||
/**
|
||||
* https://github.com/bazelbuild/remote-apis/blob/9ff14cecffe5287ba337f857731ceadfc2d80de9/build/bazel/remote/execution/v2/remote_execution.proto#L1596
|
||||
*/
|
||||
override def put(request: UpdateActionResultRequest): Either[Throwable, ActionResult] =
|
||||
try
|
||||
val refs = putBlobsIfNeeded(request.outputFiles)
|
||||
val b = XUpdateActionResultRequest.newBuilder()
|
||||
b.setInstanceName(instanceName)
|
||||
b.setActionDigest(toXDigest(request.actionDigest))
|
||||
b.setDigestFunction(DigestFunction.Value.SHA256)
|
||||
b.setActionResult(toXActionResult(refs, request.exitCode))
|
||||
val req = b.build()
|
||||
val result = acStub.updateActionResult(req)
|
||||
Right(toActionResult(result))
|
||||
catch case NonFatal(e) => Left(e)
|
||||
|
||||
/**
|
||||
* https://github.com/bazelbuild/remote-apis/blob/9ff14cecffe5287ba337f857731ceadfc2d80de9/build/bazel/remote/execution/v2/remote_execution.proto#L379
|
||||
*/
|
||||
override def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] =
|
||||
val b = BatchUpdateBlobsRequest.newBuilder()
|
||||
b.setInstanceName(instanceName)
|
||||
b.setDigestFunction(DigestFunction.Value.SHA256)
|
||||
blobs.foreach: blob =>
|
||||
val bb = BatchUpdateBlobsRequest.Request.newBuilder()
|
||||
bb.setDigest(toXDigest(Digest(blob)))
|
||||
bb.setData(toByteString(blob))
|
||||
bb.setCompressor(Compressor.Value.IDENTITY)
|
||||
b.addRequests(bb.build())
|
||||
val req = b.build()
|
||||
val result = casStub.batchUpdateBlobs(req)
|
||||
val responses = result.getResponsesList().asScala.toList
|
||||
// do not assume responses to come in order
|
||||
val lookupResponse: Map[Digest, BatchUpdateBlobsResponse.Response] =
|
||||
Map(responses.map(res => toDigest(res.getDigest()) -> res): _*)
|
||||
blobs.flatMap: blob =>
|
||||
val d = Digest(blob)
|
||||
if lookupResponse.contains(d) then
|
||||
Some(HashedVirtualFileRef.of(blob.id(), d.contentHashStr, d.sizeBytes))
|
||||
else None
|
||||
|
||||
/**
|
||||
* https://github.com/bazelbuild/remote-apis/blob/9ff14cecffe5287ba337f857731ceadfc2d80de9/build/bazel/remote/execution/v2/remote_execution.proto#L403
|
||||
*/
|
||||
override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] =
|
||||
val result = doGetBlobs(refs)
|
||||
val blobs = result.getResponsesList().asScala.toList
|
||||
val allOk = blobs.forall(_.getStatus().getCode() == 0)
|
||||
if allOk then
|
||||
// do not assume the responses to come in order
|
||||
val lookupResponse: Map[Digest, BatchReadBlobsResponse.Response] =
|
||||
Map(blobs.map(res => toDigest(res.getDigest) -> res): _*)
|
||||
refs.map: r =>
|
||||
val digest = Digest(r)
|
||||
val blob = lookupResponse(digest)
|
||||
val casFile = disk.putBlob(blob.getData().newInput(), digest)
|
||||
val shortPath =
|
||||
if r.id.startsWith("${OUT}/") then r.id.drop(7)
|
||||
else r.id
|
||||
val outPath = outputDirectory.resolve(shortPath)
|
||||
Files.createDirectories(outPath.getParent())
|
||||
if outPath.toFile().exists() then IO.delete(outPath.toFile())
|
||||
Files.createSymbolicLink(outPath, casFile)
|
||||
outPath
|
||||
else Nil
|
||||
|
||||
/**
|
||||
* https://github.com/bazelbuild/remote-apis/blob/96942a2107c702ed3ca4a664f7eeb7c85ba8dc77/build/bazel/remote/execution/v2/remote_execution.proto#L1629
|
||||
*/
|
||||
override def findBlobs(refs: Seq[HashedVirtualFileRef]): Seq[HashedVirtualFileRef] =
|
||||
val b = FindMissingBlobsRequest.newBuilder()
|
||||
b.setInstanceName(instanceName)
|
||||
refs.map: r =>
|
||||
b.addBlobDigests(toXDigest(Digest(r)))
|
||||
b.setDigestFunction(DigestFunction.Value.SHA256)
|
||||
val req = b.build()
|
||||
val res = casStub.findMissingBlobs(req)
|
||||
val missing = res.getMissingBlobDigestsList().asScala.map(toDigest).toSet
|
||||
refs.flatMap: r =>
|
||||
if missing(Digest(r)) then None
|
||||
else Some(r)
|
||||
|
||||
private def doGetBlobs(refs: Seq[HashedVirtualFileRef]): BatchReadBlobsResponse =
|
||||
val b = BatchReadBlobsRequest.newBuilder()
|
||||
b.setInstanceName(instanceName)
|
||||
refs.map: ref =>
|
||||
b.addDigests(toXDigest(Digest(ref)))
|
||||
b.setDigestFunction(DigestFunction.Value.SHA256)
|
||||
b.addAcceptableCompressors(Compressor.Value.IDENTITY)
|
||||
val req = b.build()
|
||||
casStub.batchReadBlobs(req)
|
||||
|
||||
private def toXActionResult(
|
||||
refs: Seq[HashedVirtualFileRef],
|
||||
exitCode: Option[Int]
|
||||
): XActionResult =
|
||||
val b = XActionResult.newBuilder()
|
||||
exitCode.foreach: e =>
|
||||
b.setExitCode(e)
|
||||
refs.foreach: ref =>
|
||||
val out = toOutputFile(ref)
|
||||
b.addOutputFiles(out)
|
||||
b.build()
|
||||
|
||||
// per spec, Clients SHOULD NOT populate [contents] when uploading to the cache.
|
||||
private def toOutputFile(ref: HashedVirtualFileRef): OutputFile =
|
||||
val b = OutputFile.newBuilder()
|
||||
val shortPath =
|
||||
if ref.id.startsWith("${OUT}/") then ref.id.drop(7)
|
||||
else ref.id
|
||||
b.setPath(shortPath)
|
||||
b.setDigest(toXDigest(Digest(ref)))
|
||||
b.build()
|
||||
|
||||
def toActionResult(ar: XActionResult): ActionResult =
|
||||
val outs = ar.getOutputFilesList.asScala.toVector.map: out =>
|
||||
val d = toDigest(out.getDigest())
|
||||
HashedVirtualFileRef.of(out.getPath(), d.contentHashStr, d.sizeBytes)
|
||||
ActionResult(outs, storeName, ar.getExitCode())
|
||||
|
||||
def toXDigest(d: Digest): XDigest =
|
||||
val str = d.contentHashStr.split("-")(1)
|
||||
val sizeBytes = d.sizeBytes
|
||||
val b = XDigest.newBuilder()
|
||||
b.setHash(str)
|
||||
b.setSizeBytes(sizeBytes)
|
||||
b.build()
|
||||
|
||||
def toDigest(d: XDigest): Digest =
|
||||
val hash = d.getHash()
|
||||
val sizeBytes = d.getSizeBytes()
|
||||
Digest(s"sha256-$hash/$sizeBytes")
|
||||
|
||||
private def toByteString(blob: VirtualFile): ByteString =
|
||||
val out = ByteString.newOutput()
|
||||
IO.transfer(blob.input(), out)
|
||||
out.toByteString()
|
||||
end GrpcActionCacheStore
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
package sbt
|
||||
package plugins
|
||||
|
||||
import Keys.*
|
||||
import sbt.util.DiskActionCacheStore
|
||||
import sbt.internal.GrpcActionCacheStore
|
||||
|
||||
object RemoteCachePlugin extends AutoPlugin:
|
||||
override def trigger = AllRequirements
|
||||
override def requires = JvmPlugin
|
||||
override def globalSettings: Seq[Def.Setting[_]] = Seq(
|
||||
cacheStores := {
|
||||
val orig = cacheStores.value
|
||||
val remoteOpt = remoteCache.value
|
||||
remoteOpt match
|
||||
case Some(remote) =>
|
||||
val disk = orig.collect { case r: DiskActionCacheStore =>
|
||||
r
|
||||
}.headOption match
|
||||
case Some(x) => x
|
||||
case None => sys.error("disk store not found")
|
||||
val r = GrpcActionCacheStore(
|
||||
uri = remote,
|
||||
rootCerts = remoteCacheTlsCertificate.value.map(_.toPath),
|
||||
clientCertChain = remoteCacheTlsClientCertificate.value.map(_.toPath),
|
||||
clientPrivateKey = remoteCacheTlsClientKey.value.map(_.toPath),
|
||||
remoteHeaders = remoteCacheHeaders.value.toList,
|
||||
disk = disk,
|
||||
)
|
||||
orig ++ Seq(r)
|
||||
case _ => orig
|
||||
},
|
||||
)
|
||||
end RemoteCachePlugin
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -o errexit # abort on nonzero exitstatus
|
||||
set -o nounset # abort on unbound variable
|
||||
set -o pipefail # don't hide errors within pipes
|
||||
|
||||
mkdir -p /tmp/sslcert
|
||||
pushd /tmp/sslcert
|
||||
# Changes these CN's to match your hosts in your environment if needed.
|
||||
SERVER_CN=localhost
|
||||
CLIENT_CN=localhost # Used when doing mutual TLS
|
||||
|
||||
echo Generate CA key:
|
||||
openssl genrsa -passout pass:1111 -des3 -out ca.key 4096
|
||||
echo Generate CA certificate:
|
||||
# Generates ca.crt which is the trustCertCollectionFile
|
||||
openssl req -passin pass:1111 -new -x509 -days 358000 -key ca.key -out ca.crt -subj "/CN=${SERVER_CN}"
|
||||
echo Generate server key:
|
||||
openssl genrsa -passout pass:1111 -des3 -out server.key 4096
|
||||
echo Generate server signing request:
|
||||
openssl req -passin pass:1111 -new -key server.key -out server.csr -subj "/CN=${SERVER_CN}"
|
||||
echo Self-signed server certificate:
|
||||
# Generates server.crt which is the certChainFile for the server
|
||||
openssl x509 -req -passin pass:1111 -days 358000 -in server.csr -CA ca.crt -CAkey ca.key -set_serial 01 -out server.crt
|
||||
echo Remove passphrase from server key:
|
||||
openssl rsa -passin pass:1111 -in server.key -out server.key
|
||||
echo Generate client key
|
||||
openssl genrsa -passout pass:1111 -des3 -out client.key 4096
|
||||
echo Generate client signing request:
|
||||
openssl req -passin pass:1111 -new -key client.key -out client.csr -subj "/CN=${CLIENT_CN}"
|
||||
echo Self-signed client certificate:
|
||||
# Generates client.crt which is the clientCertChainFile for the client (need for mutual TLS only)
|
||||
openssl x509 -passin pass:1111 -req -days 358000 -in client.csr -CA ca.crt -CAkey ca.key -set_serial 01 -out client.crt
|
||||
echo Remove passphrase from client key:
|
||||
openssl rsa -passin pass:1111 -in client.key -out client.key
|
||||
echo Converting the private keys to X.509:
|
||||
# Generates client.pem which is the clientPrivateKeyFile for the Client (needed for mutual TLS only)
|
||||
openssl pkcs8 -topk8 -nocrypt -in client.key -out client.pem
|
||||
# Generates server.pem which is the privateKeyFile for the Server
|
||||
openssl pkcs8 -topk8 -nocrypt -in server.key -out server.pem
|
||||
popd
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
/**
|
||||
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.util
|
||||
/**
|
||||
* An ActionResult represents a result from executing a task.
|
||||
* In addition to the value typically represented in the return type
|
||||
* of a task, ActionResult tracks the file output and other side effects.
|
||||
*
|
||||
* See also https://github.com/bazelbuild/remote-apis/blob/96942a2107c702ed3ca4a664f7eeb7c85ba8dc77/build/bazel/remote/execution/v2/remote_execution.proto#L1056
|
||||
*/
|
||||
final class ActionResult private (
|
||||
val outputFiles: Vector[xsbti.HashedVirtualFileRef],
|
||||
val origin: Option[String],
|
||||
val exitCode: Option[Int],
|
||||
val contents: Vector[java.nio.ByteBuffer],
|
||||
val isExecutable: Vector[Boolean]) extends Serializable {
|
||||
|
||||
private def this() = this(Vector(), None, None, Vector(), Vector())
|
||||
private def this(outputFiles: Vector[xsbti.HashedVirtualFileRef]) = this(outputFiles, None, None, Vector(), Vector())
|
||||
private def this(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: Option[String]) = this(outputFiles, origin, None, Vector(), Vector())
|
||||
private def this(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: Option[String], exitCode: Option[Int]) = this(outputFiles, origin, exitCode, Vector(), Vector())
|
||||
private def this(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: Option[String], exitCode: Option[Int], contents: Vector[java.nio.ByteBuffer]) = this(outputFiles, origin, exitCode, contents, Vector())
|
||||
|
||||
override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match {
|
||||
case x: ActionResult => (this.outputFiles == x.outputFiles) && (this.origin == x.origin) && (this.exitCode == x.exitCode) && (this.contents == x.contents) && (this.isExecutable == x.isExecutable)
|
||||
case _ => false
|
||||
})
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.util.ActionResult".##) + outputFiles.##) + origin.##) + exitCode.##) + contents.##) + isExecutable.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"ActionResult(" + outputFiles + ", " + origin + ", " + exitCode + ", " + contents + ", " + isExecutable + ")"
|
||||
}
|
||||
private[this] def copy(outputFiles: Vector[xsbti.HashedVirtualFileRef] = outputFiles, origin: Option[String] = origin, exitCode: Option[Int] = exitCode, contents: Vector[java.nio.ByteBuffer] = contents, isExecutable: Vector[Boolean] = isExecutable): ActionResult = {
|
||||
new ActionResult(outputFiles, origin, exitCode, contents, isExecutable)
|
||||
}
|
||||
def withOutputFiles(outputFiles: Vector[xsbti.HashedVirtualFileRef]): ActionResult = {
|
||||
copy(outputFiles = outputFiles)
|
||||
}
|
||||
def withOrigin(origin: Option[String]): ActionResult = {
|
||||
copy(origin = origin)
|
||||
}
|
||||
def withOrigin(origin: String): ActionResult = {
|
||||
copy(origin = Option(origin))
|
||||
}
|
||||
def withExitCode(exitCode: Option[Int]): ActionResult = {
|
||||
copy(exitCode = exitCode)
|
||||
}
|
||||
def withExitCode(exitCode: Int): ActionResult = {
|
||||
copy(exitCode = Option(exitCode))
|
||||
}
|
||||
def withContents(contents: Vector[java.nio.ByteBuffer]): ActionResult = {
|
||||
copy(contents = contents)
|
||||
}
|
||||
def withIsExecutable(isExecutable: Vector[Boolean]): ActionResult = {
|
||||
copy(isExecutable = isExecutable)
|
||||
}
|
||||
}
|
||||
object ActionResult {
|
||||
|
||||
def apply(): ActionResult = new ActionResult()
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef]): ActionResult = new ActionResult(outputFiles)
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: Option[String]): ActionResult = new ActionResult(outputFiles, origin)
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: String): ActionResult = new ActionResult(outputFiles, Option(origin))
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: Option[String], exitCode: Option[Int]): ActionResult = new ActionResult(outputFiles, origin, exitCode)
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: String, exitCode: Int): ActionResult = new ActionResult(outputFiles, Option(origin), Option(exitCode))
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: Option[String], exitCode: Option[Int], contents: Vector[java.nio.ByteBuffer]): ActionResult = new ActionResult(outputFiles, origin, exitCode, contents)
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: String, exitCode: Int, contents: Vector[java.nio.ByteBuffer]): ActionResult = new ActionResult(outputFiles, Option(origin), Option(exitCode), contents)
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: Option[String], exitCode: Option[Int], contents: Vector[java.nio.ByteBuffer], isExecutable: Vector[Boolean]): ActionResult = new ActionResult(outputFiles, origin, exitCode, contents, isExecutable)
|
||||
def apply(outputFiles: Vector[xsbti.HashedVirtualFileRef], origin: String, exitCode: Int, contents: Vector[java.nio.ByteBuffer], isExecutable: Vector[Boolean]): ActionResult = new ActionResult(outputFiles, Option(origin), Option(exitCode), contents, isExecutable)
|
||||
}
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
/**
|
||||
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.util
|
||||
final class GetActionResultRequest private (
|
||||
val actionDigest: sbt.util.Digest,
|
||||
val inlineStdout: Option[Boolean],
|
||||
val inlineStderr: Option[Boolean],
|
||||
val inlineOutputFiles: Vector[String]) extends Serializable {
|
||||
|
||||
private def this(actionDigest: sbt.util.Digest) = this(actionDigest, None, None, Vector())
|
||||
private def this(actionDigest: sbt.util.Digest, inlineStdout: Option[Boolean], inlineStderr: Option[Boolean]) = this(actionDigest, inlineStdout, inlineStderr, Vector())
|
||||
|
||||
override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match {
|
||||
case x: GetActionResultRequest => (this.actionDigest == x.actionDigest) && (this.inlineStdout == x.inlineStdout) && (this.inlineStderr == x.inlineStderr) && (this.inlineOutputFiles == x.inlineOutputFiles)
|
||||
case _ => false
|
||||
})
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (37 * (37 * (17 + "sbt.util.GetActionResultRequest".##) + actionDigest.##) + inlineStdout.##) + inlineStderr.##) + inlineOutputFiles.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"GetActionResultRequest(" + actionDigest + ", " + inlineStdout + ", " + inlineStderr + ", " + inlineOutputFiles + ")"
|
||||
}
|
||||
private[this] def copy(actionDigest: sbt.util.Digest = actionDigest, inlineStdout: Option[Boolean] = inlineStdout, inlineStderr: Option[Boolean] = inlineStderr, inlineOutputFiles: Vector[String] = inlineOutputFiles): GetActionResultRequest = {
|
||||
new GetActionResultRequest(actionDigest, inlineStdout, inlineStderr, inlineOutputFiles)
|
||||
}
|
||||
def withActionDigest(actionDigest: sbt.util.Digest): GetActionResultRequest = {
|
||||
copy(actionDigest = actionDigest)
|
||||
}
|
||||
def withInlineStdout(inlineStdout: Option[Boolean]): GetActionResultRequest = {
|
||||
copy(inlineStdout = inlineStdout)
|
||||
}
|
||||
def withInlineStdout(inlineStdout: Boolean): GetActionResultRequest = {
|
||||
copy(inlineStdout = Option(inlineStdout))
|
||||
}
|
||||
def withInlineStderr(inlineStderr: Option[Boolean]): GetActionResultRequest = {
|
||||
copy(inlineStderr = inlineStderr)
|
||||
}
|
||||
def withInlineStderr(inlineStderr: Boolean): GetActionResultRequest = {
|
||||
copy(inlineStderr = Option(inlineStderr))
|
||||
}
|
||||
def withInlineOutputFiles(inlineOutputFiles: Vector[String]): GetActionResultRequest = {
|
||||
copy(inlineOutputFiles = inlineOutputFiles)
|
||||
}
|
||||
}
|
||||
object GetActionResultRequest {
|
||||
|
||||
def apply(actionDigest: sbt.util.Digest): GetActionResultRequest = new GetActionResultRequest(actionDigest)
|
||||
def apply(actionDigest: sbt.util.Digest, inlineStdout: Option[Boolean], inlineStderr: Option[Boolean]): GetActionResultRequest = new GetActionResultRequest(actionDigest, inlineStdout, inlineStderr)
|
||||
def apply(actionDigest: sbt.util.Digest, inlineStdout: Boolean, inlineStderr: Boolean): GetActionResultRequest = new GetActionResultRequest(actionDigest, Option(inlineStdout), Option(inlineStderr))
|
||||
def apply(actionDigest: sbt.util.Digest, inlineStdout: Option[Boolean], inlineStderr: Option[Boolean], inlineOutputFiles: Vector[String]): GetActionResultRequest = new GetActionResultRequest(actionDigest, inlineStdout, inlineStderr, inlineOutputFiles)
|
||||
def apply(actionDigest: sbt.util.Digest, inlineStdout: Boolean, inlineStderr: Boolean, inlineOutputFiles: Vector[String]): GetActionResultRequest = new GetActionResultRequest(actionDigest, Option(inlineStdout), Option(inlineStderr), inlineOutputFiles)
|
||||
}
|
||||
54
util-cache/src/main/contraband-scala/sbt/util/UpdateActionResultRequest.scala
generated
Normal file
54
util-cache/src/main/contraband-scala/sbt/util/UpdateActionResultRequest.scala
generated
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
/**
|
||||
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.util
|
||||
final class UpdateActionResultRequest private (
|
||||
val actionDigest: sbt.util.Digest,
|
||||
val outputFiles: Vector[xsbti.VirtualFile],
|
||||
val exitCode: Option[Int],
|
||||
val isExecutable: Vector[Boolean]) extends Serializable {
|
||||
|
||||
private def this(actionDigest: sbt.util.Digest) = this(actionDigest, Vector(), None, Vector())
|
||||
private def this(actionDigest: sbt.util.Digest, outputFiles: Vector[xsbti.VirtualFile]) = this(actionDigest, outputFiles, None, Vector())
|
||||
private def this(actionDigest: sbt.util.Digest, outputFiles: Vector[xsbti.VirtualFile], exitCode: Option[Int]) = this(actionDigest, outputFiles, exitCode, Vector())
|
||||
|
||||
override def equals(o: Any): Boolean = this.eq(o.asInstanceOf[AnyRef]) || (o match {
|
||||
case x: UpdateActionResultRequest => (this.actionDigest == x.actionDigest) && (this.outputFiles == x.outputFiles) && (this.exitCode == x.exitCode) && (this.isExecutable == x.isExecutable)
|
||||
case _ => false
|
||||
})
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (37 * (37 * (17 + "sbt.util.UpdateActionResultRequest".##) + actionDigest.##) + outputFiles.##) + exitCode.##) + isExecutable.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"UpdateActionResultRequest(" + actionDigest + ", " + outputFiles + ", " + exitCode + ", " + isExecutable + ")"
|
||||
}
|
||||
private[this] def copy(actionDigest: sbt.util.Digest = actionDigest, outputFiles: Vector[xsbti.VirtualFile] = outputFiles, exitCode: Option[Int] = exitCode, isExecutable: Vector[Boolean] = isExecutable): UpdateActionResultRequest = {
|
||||
new UpdateActionResultRequest(actionDigest, outputFiles, exitCode, isExecutable)
|
||||
}
|
||||
def withActionDigest(actionDigest: sbt.util.Digest): UpdateActionResultRequest = {
|
||||
copy(actionDigest = actionDigest)
|
||||
}
|
||||
def withOutputFiles(outputFiles: Vector[xsbti.VirtualFile]): UpdateActionResultRequest = {
|
||||
copy(outputFiles = outputFiles)
|
||||
}
|
||||
def withExitCode(exitCode: Option[Int]): UpdateActionResultRequest = {
|
||||
copy(exitCode = exitCode)
|
||||
}
|
||||
def withExitCode(exitCode: Int): UpdateActionResultRequest = {
|
||||
copy(exitCode = Option(exitCode))
|
||||
}
|
||||
def withIsExecutable(isExecutable: Vector[Boolean]): UpdateActionResultRequest = {
|
||||
copy(isExecutable = isExecutable)
|
||||
}
|
||||
}
|
||||
object UpdateActionResultRequest {
|
||||
|
||||
def apply(actionDigest: sbt.util.Digest): UpdateActionResultRequest = new UpdateActionResultRequest(actionDigest)
|
||||
def apply(actionDigest: sbt.util.Digest, outputFiles: Vector[xsbti.VirtualFile]): UpdateActionResultRequest = new UpdateActionResultRequest(actionDigest, outputFiles)
|
||||
def apply(actionDigest: sbt.util.Digest, outputFiles: Vector[xsbti.VirtualFile], exitCode: Option[Int]): UpdateActionResultRequest = new UpdateActionResultRequest(actionDigest, outputFiles, exitCode)
|
||||
def apply(actionDigest: sbt.util.Digest, outputFiles: Vector[xsbti.VirtualFile], exitCode: Int): UpdateActionResultRequest = new UpdateActionResultRequest(actionDigest, outputFiles, Option(exitCode))
|
||||
def apply(actionDigest: sbt.util.Digest, outputFiles: Vector[xsbti.VirtualFile], exitCode: Option[Int], isExecutable: Vector[Boolean]): UpdateActionResultRequest = new UpdateActionResultRequest(actionDigest, outputFiles, exitCode, isExecutable)
|
||||
def apply(actionDigest: sbt.util.Digest, outputFiles: Vector[xsbti.VirtualFile], exitCode: Int, isExecutable: Vector[Boolean]): UpdateActionResultRequest = new UpdateActionResultRequest(actionDigest, outputFiles, Option(exitCode), isExecutable)
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
package sbt.util
|
||||
@target(Scala)
|
||||
@codecPackage("sbt.internal.util.codec")
|
||||
@fullCodec("ActionResultCodec")
|
||||
|
||||
## An ActionResult represents a result from executing a task.
|
||||
## In addition to the value typically represented in the return type
|
||||
## of a task, ActionResult tracks the file output and other side effects.
|
||||
##
|
||||
## See also https://github.com/bazelbuild/remote-apis/blob/96942a2107c702ed3ca4a664f7eeb7c85ba8dc77/build/bazel/remote/execution/v2/remote_execution.proto#L1056
|
||||
type ActionResult {
|
||||
outputFiles: [xsbti.HashedVirtualFileRef] @since("0.1.0")
|
||||
origin: String @since("0.2.0")
|
||||
exitCode: Int @since("0.3.0")
|
||||
contents: [java.nio.ByteBuffer] @since("0.4.0")
|
||||
isExecutable: [Boolean] @since("0.5.0")
|
||||
}
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
package sbt.util
|
||||
@target(Scala)
|
||||
|
||||
type UpdateActionResultRequest {
|
||||
actionDigest: sbt.util.Digest!
|
||||
outputFiles: [xsbti.VirtualFile] @since("0.1.0")
|
||||
exitCode: Int @since("0.2.0")
|
||||
isExecutable: [Boolean] @since("0.3.0")
|
||||
}
|
||||
|
||||
type GetActionResultRequest {
|
||||
actionDigest: sbt.util.Digest!
|
||||
inlineStdout: Boolean @since("0.1.0")
|
||||
inlineStderr: Boolean @since("0.1.0")
|
||||
inlineOutputFiles: [String] @since("0.2.0")
|
||||
}
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
package sbt
|
||||
package internal
|
||||
package util
|
||||
|
||||
import scala.collection.concurrent.TrieMap
|
||||
|
||||
enum ActionCacheEvent:
|
||||
case Found(storeName: String)
|
||||
case NotFound
|
||||
end ActionCacheEvent
|
||||
|
||||
class CacheEventLog:
|
||||
private val acEvents = TrieMap.empty[ActionCacheEvent, Long]
|
||||
def append(event: ActionCacheEvent): Unit =
|
||||
acEvents.updateWith(event) {
|
||||
case None => Some(1L)
|
||||
case Some(count) => Some(count + 1L)
|
||||
}
|
||||
def clear(): Unit =
|
||||
acEvents.clear()
|
||||
|
||||
def summary: String =
|
||||
if acEvents.isEmpty then ""
|
||||
else
|
||||
val total = acEvents.values.sum
|
||||
val hit = acEvents.view.collect { case (k @ ActionCacheEvent.Found(_), v) =>
|
||||
(k, v)
|
||||
}.toMap
|
||||
val hitCount = hit.values.sum
|
||||
val missCount = total - hitCount
|
||||
val hitRate = (hitCount.toDouble / total.toDouble * 100.0).floor.toInt
|
||||
val hitDescs = hit.toSeq.map {
|
||||
case (ActionCacheEvent.Found(id), 1) => s"1 $id cache hit"
|
||||
case (ActionCacheEvent.Found(id), v) => s"$v $id cache hits"
|
||||
}.sorted
|
||||
val missDescs = missCount match
|
||||
case 0 => Nil
|
||||
case 1 => Seq(s"$missCount onsite task")
|
||||
case _ => Seq(s"$missCount onsite tasks")
|
||||
val descs = hitDescs ++ missDescs
|
||||
val descsSummary = descs.mkString(", ", ", ", "")
|
||||
s"cache $hitRate%$descsSummary"
|
||||
end CacheEventLog
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
package sbt
|
||||
package internal
|
||||
package util
|
||||
|
||||
import java.io.InputStream
|
||||
import java.nio.file.{ Files, Path }
|
||||
import sbt.util.{ Digest, HashUtil }
|
||||
import xsbti.{ BasicVirtualFileRef, PathBasedFile }
|
||||
|
||||
class PlainVirtualFile1(path: Path, id: String) extends BasicVirtualFileRef(id) with PathBasedFile:
|
||||
override def contentHash: Long = HashUtil.farmHash(path)
|
||||
override def contentHashStr: String = Digest.sha256Hash(input()).toString()
|
||||
override def name(): String = path.getFileName.toString
|
||||
override def input(): InputStream = Files.newInputStream(path)
|
||||
override def toPath: Path = path
|
||||
end PlainVirtualFile1
|
||||
|
|
@ -8,7 +8,11 @@ case class StringVirtualFile1(path: String, content: String)
|
|||
extends BasicVirtualFileRef(path)
|
||||
with VirtualFile:
|
||||
override def contentHash: Long = HashUtil.farmHash(content.getBytes("UTF-8"))
|
||||
override def contentHashStr: String = Digest.sha256Hash(input).toString()
|
||||
override def sizeBytes: Long = content.getBytes("UTF-8").size
|
||||
override def contentHashStr: String =
|
||||
import Digest.*
|
||||
val d = Digest.sha256Hash(content.getBytes("UTF-8"))
|
||||
d.contentHashStr
|
||||
override def input: InputStream = new ByteArrayInputStream(content.getBytes("UTF-8"))
|
||||
override def toString: String = s"StringVirtualFile1($path, <content>)"
|
||||
end StringVirtualFile1
|
||||
|
|
|
|||
|
|
@ -0,0 +1,12 @@
|
|||
/**
|
||||
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
trait ActionResultCodec
|
||||
extends sbt.internal.util.codec.HashedVirtualFileRefFormats
|
||||
with sbt.internal.util.codec.ByteBufferFormats
|
||||
with sjsonnew.BasicJsonProtocol
|
||||
with sbt.internal.util.codec.ActionResultFormats
|
||||
object ActionResultCodec extends ActionResultCodec
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
/**
|
||||
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait ActionResultFormats {
|
||||
self: sbt.internal.util.codec.HashedVirtualFileRefFormats
|
||||
with sbt.internal.util.codec.ByteBufferFormats
|
||||
with sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val ActionResultFormat: JsonFormat[sbt.util.ActionResult] =
|
||||
new JsonFormat[sbt.util.ActionResult] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.util.ActionResult = {
|
||||
__jsOpt match {
|
||||
case Some(__js) =>
|
||||
unbuilder.beginObject(__js)
|
||||
val outputFiles = unbuilder.readField[Vector[xsbti.HashedVirtualFileRef]]("outputFiles")
|
||||
val origin = unbuilder.readField[Option[String]]("origin")
|
||||
val exitCode = unbuilder.readField[Option[Int]]("exitCode")
|
||||
val contents = unbuilder.readField[Vector[java.nio.ByteBuffer]]("contents")
|
||||
val isExecutable = unbuilder.readField[Vector[Boolean]]("isExecutable")
|
||||
unbuilder.endObject()
|
||||
sbt.util.ActionResult(outputFiles, origin, exitCode, contents, isExecutable)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.util.ActionResult, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("outputFiles", obj.outputFiles)
|
||||
builder.addField("origin", obj.origin)
|
||||
builder.addField("exitCode", obj.exitCode)
|
||||
builder.addField("contents", obj.contents)
|
||||
builder.addField("isExecutable", obj.isExecutable)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
package sbt
|
||||
package internal
|
||||
package util
|
||||
package codec
|
||||
|
||||
import java.nio.ByteBuffer
|
||||
import sjsonnew.{ BasicJsonProtocol, BUtil, IsoString }
|
||||
|
||||
trait ByteBufferFormats { self: BasicJsonProtocol =>
|
||||
|
||||
/**
|
||||
* A string representation of HashedVirtualFileRef, delimited by `>`.
|
||||
*/
|
||||
def byteBufferToStr(buf: ByteBuffer): String =
|
||||
BUtil.toHex(buf.array())
|
||||
|
||||
def strToByteBuffer(s: String): ByteBuffer =
|
||||
ByteBuffer.wrap(BUtil.fromHex(s))
|
||||
|
||||
implicit lazy val byteBufferIsoString: IsoString[ByteBuffer] =
|
||||
IsoString.iso(byteBufferToStr, strToByteBuffer)
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
package sbt
|
||||
package internal
|
||||
package util
|
||||
package codec
|
||||
|
||||
import sjsonnew.{ BasicJsonProtocol, IsoString }
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
trait HashedVirtualFileRefFormats { self: BasicJsonProtocol =>
|
||||
|
||||
/**
|
||||
* A string representation of HashedVirtualFileRef, delimited by `>`.
|
||||
*/
|
||||
def hashedVirtualFileRefToStr(ref: HashedVirtualFileRef): String =
|
||||
s"${ref.id}>${ref.contentHashStr}/${ref.sizeBytes}"
|
||||
|
||||
def strToHashedVirtualFileRef(s: String): HashedVirtualFileRef =
|
||||
s.split(">").toList match {
|
||||
case path :: rest :: Nil =>
|
||||
rest.split("/").toList match {
|
||||
case hash :: size :: Nil => HashedVirtualFileRef.of(path, hash, size.toLong)
|
||||
case _ => throw new RuntimeException(s"invalid HashedVirtualFileRefIsoString $s")
|
||||
}
|
||||
case _ => throw new RuntimeException(s"invalid HashedVirtualFileRefIsoString $s")
|
||||
}
|
||||
|
||||
implicit lazy val hashedVirtualFileRefIsoString: IsoString[HashedVirtualFileRef] =
|
||||
IsoString.iso(hashedVirtualFileRefToStr, strToHashedVirtualFileRef)
|
||||
}
|
||||
|
|
@ -1,10 +1,14 @@
|
|||
package sbt.util
|
||||
|
||||
import sbt.internal.util.{ ActionCacheEvent, CacheEventLog, StringVirtualFile1 }
|
||||
import sbt.io.IO
|
||||
import scala.reflect.ClassTag
|
||||
import scala.annotation.{ meta, StaticAnnotation }
|
||||
import sjsonnew.{ HashWriter, JsonFormat }
|
||||
import sjsonnew.support.murmurhash.Hasher
|
||||
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
|
||||
import xsbti.VirtualFile
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.nio.file.Path
|
||||
import scala.quoted.{ Expr, FromExpr, ToExpr, Quotes }
|
||||
|
||||
|
|
@ -33,22 +37,48 @@ object ActionCache:
|
|||
)(
|
||||
config: BuildWideCacheConfiguration
|
||||
): O =
|
||||
val store = config.store
|
||||
val cacheEventLog = config.cacheEventLog
|
||||
val input =
|
||||
Digest.sha256Hash(codeContentHash, extraHash, Digest.dummy(Hasher.hashUnsafe[I](key)))
|
||||
val store = config.store
|
||||
val result = store
|
||||
.get[O](input)
|
||||
.getOrElse:
|
||||
val (newResult, outputs) = action(key)
|
||||
store.put[O](input, newResult, outputs)
|
||||
// run the side effect to sync the output files
|
||||
store.syncBlobs(result.outputFiles, config.outputDirectory)
|
||||
result.value
|
||||
val valuePath = config.outputDirectory.resolve(s"value/${input}.json").toString
|
||||
def organicTask: O =
|
||||
cacheEventLog.append(ActionCacheEvent.NotFound)
|
||||
// run action(...) and combine the newResult with outputs
|
||||
val (newResult, outputs) = action(key)
|
||||
val json = Converter.toJsonUnsafe(newResult)
|
||||
val valueFile = StringVirtualFile1(valuePath, CompactPrinter(json))
|
||||
val newOutputs = Vector(valueFile) ++ outputs.toVector
|
||||
store.put(UpdateActionResultRequest(input, newOutputs, exitCode = 0)) match
|
||||
case Right(result) =>
|
||||
store.syncBlobs(result.outputFiles, config.outputDirectory)
|
||||
newResult
|
||||
case Left(e) => throw e
|
||||
def valueFromStr(str: String, origin: Option[String]): O =
|
||||
cacheEventLog.append(ActionCacheEvent.Found(origin.getOrElse("unknown")))
|
||||
val json = Parser.parseUnsafe(str)
|
||||
Converter.fromJsonUnsafe[O](json)
|
||||
store.get(
|
||||
GetActionResultRequest(input, inlineStdout = false, inlineStderr = false, Vector(valuePath))
|
||||
) match
|
||||
case Right(result) =>
|
||||
// some protocol can embed values into the result
|
||||
result.contents.headOption match
|
||||
case Some(head) =>
|
||||
store.syncBlobs(result.outputFiles, config.outputDirectory)
|
||||
val str = String(head.array(), StandardCharsets.UTF_8)
|
||||
valueFromStr(str, result.origin)
|
||||
case _ =>
|
||||
val paths = store.syncBlobs(result.outputFiles, config.outputDirectory)
|
||||
if paths.isEmpty then organicTask
|
||||
else valueFromStr(IO.read(paths.head.toFile()), result.origin)
|
||||
case Left(_) => organicTask
|
||||
end ActionCache
|
||||
|
||||
class BuildWideCacheConfiguration(
|
||||
val store: ActionCacheStore,
|
||||
val outputDirectory: Path,
|
||||
val cacheEventLog: CacheEventLog,
|
||||
):
|
||||
override def toString(): String =
|
||||
s"BuildWideCacheConfiguration(store = $store, outputDirectory = $outputDirectory)"
|
||||
|
|
|
|||
|
|
@ -1,79 +1,99 @@
|
|||
package sbt.util
|
||||
|
||||
import java.io.RandomAccessFile
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.file.{ Files, Path }
|
||||
import sjsonnew.*
|
||||
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
|
||||
import scala.collection.mutable
|
||||
import scala.reflect.ClassTag
|
||||
import scala.util.control.NonFatal
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax.*
|
||||
import sbt.internal.util.StringVirtualFile1
|
||||
import sbt.internal.util.codec.ActionResultCodec.given
|
||||
import xsbti.{ FileConverter, HashedVirtualFileRef, PathBasedFile, VirtualFile }
|
||||
import java.io.InputStream
|
||||
|
||||
/**
|
||||
* An abstraction of a remote or local cache store.
|
||||
*/
|
||||
trait ActionCacheStore:
|
||||
/**
|
||||
* A named used to identify the cache store.
|
||||
*/
|
||||
def storeName: String
|
||||
|
||||
/**
|
||||
* Put a value and blobs to the cache store for later retrieval,
|
||||
* based on the `actionDigest`.
|
||||
*/
|
||||
def put[A1: ClassTag: JsonFormat](
|
||||
actionDigest: Digest,
|
||||
value: A1,
|
||||
blobs: Seq[VirtualFile],
|
||||
): ActionResult[A1]
|
||||
def put(request: UpdateActionResultRequest): Either[Throwable, ActionResult]
|
||||
|
||||
/**
|
||||
* Get the value for the key from the cache store.
|
||||
* `inlineContentPaths` - paths whose contents would be inlined.
|
||||
*/
|
||||
def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]]
|
||||
def get(request: GetActionResultRequest): Either[Throwable, ActionResult]
|
||||
|
||||
/**
|
||||
* Put VirtualFile blobs to the cache store for later retrieval.
|
||||
*/
|
||||
def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef]
|
||||
|
||||
/**
|
||||
* Get blobs from the cache store.
|
||||
*/
|
||||
def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile]
|
||||
|
||||
/**
|
||||
* Materialize blobs to the output directory.
|
||||
*/
|
||||
def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path]
|
||||
|
||||
/**
|
||||
* Find if blobs are present in the storage.
|
||||
*/
|
||||
def findBlobs(refs: Seq[HashedVirtualFileRef]): Seq[HashedVirtualFileRef]
|
||||
end ActionCacheStore
|
||||
|
||||
class AggregateActionCacheStore(stores: Seq[ActionCacheStore]) extends ActionCacheStore:
|
||||
trait AbstractActionCacheStore extends ActionCacheStore:
|
||||
def putBlobsIfNeeded(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] =
|
||||
val found = findBlobs(blobs).toSet
|
||||
val missing = blobs.flatMap: blob =>
|
||||
val ref: HashedVirtualFileRef = blob
|
||||
if found.contains(ref) then None
|
||||
else Some(blob)
|
||||
val combined = putBlobs(missing).toSet ++ found
|
||||
blobs.flatMap: blob =>
|
||||
val ref: HashedVirtualFileRef = blob
|
||||
if combined.contains(ref) then Some(ref)
|
||||
else None
|
||||
|
||||
def notFound: Throwable =
|
||||
new RuntimeException("not found")
|
||||
end AbstractActionCacheStore
|
||||
|
||||
/**
|
||||
* An aggregate ActionCacheStore.
|
||||
*/
|
||||
class AggregateActionCacheStore(stores: Seq[ActionCacheStore]) extends AbstractActionCacheStore:
|
||||
extension [A1](xs: Seq[A1])
|
||||
// unlike collectFirst this accepts A1 => Option[A2]
|
||||
inline def collectFirst1[A2](f: A1 => Option[A2]): Option[A2] =
|
||||
xs.foldLeft(Option.empty[A2]): (res, x) =>
|
||||
res.orElse(f(x))
|
||||
|
||||
// unlike collectFirst this accepts A1 => Seq[A2]
|
||||
inline def collectFirst2[A2](f: A1 => Seq[A2]): Seq[A2] =
|
||||
inline def collectFirst2[A2](f: A1 => Seq[A2], size: Int): Seq[A2] =
|
||||
xs.foldLeft(Seq.empty[A2]): (res, x) =>
|
||||
if res.isEmpty then f(x) else res
|
||||
if res.size == size then res else f(x)
|
||||
|
||||
override def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]] =
|
||||
stores.collectFirst1(_.get[A1](input))
|
||||
override def storeName: String = "aggregate"
|
||||
|
||||
override def put[A1: ClassTag: JsonFormat](
|
||||
actionDigest: Digest,
|
||||
value: A1,
|
||||
blobs: Seq[VirtualFile],
|
||||
): ActionResult[A1] =
|
||||
(stores
|
||||
.foldLeft(Option.empty[ActionResult[A1]]): (res, store) =>
|
||||
override def get(request: GetActionResultRequest): Either[Throwable, ActionResult] =
|
||||
// unlike collectFirst we operate on A1 => Option[A2]
|
||||
stores.foldLeft(Left(notFound): Either[Throwable, ActionResult]): (res, store) =>
|
||||
if res.isRight then res
|
||||
else store.get(request)
|
||||
|
||||
override def put(request: UpdateActionResultRequest): Either[Throwable, ActionResult] =
|
||||
stores
|
||||
.foldLeft(Left(notFound): Either[Throwable, ActionResult]): (res, store) =>
|
||||
// put the value into all stores
|
||||
val v = store.put[A1](actionDigest, value, blobs)
|
||||
res.orElse(Some(v))
|
||||
)
|
||||
.get
|
||||
val v = store.put(request)
|
||||
res.orElse(v)
|
||||
|
||||
override def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] =
|
||||
stores.foldLeft(Seq.empty[HashedVirtualFileRef]): (res, store) =>
|
||||
|
|
@ -81,55 +101,73 @@ class AggregateActionCacheStore(stores: Seq[ActionCacheStore]) extends ActionCac
|
|||
val xs = store.putBlobs(blobs)
|
||||
if res.isEmpty then xs else res
|
||||
|
||||
override def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] =
|
||||
stores.collectFirst2(_.getBlobs(refs))
|
||||
|
||||
override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] =
|
||||
stores.collectFirst2(_.syncBlobs(refs, outputDirectory))
|
||||
stores.collectFirst2(_.syncBlobs(refs, outputDirectory), refs.size)
|
||||
|
||||
override def findBlobs(refs: Seq[HashedVirtualFileRef]): Seq[HashedVirtualFileRef] =
|
||||
stores.collectFirst2(_.findBlobs(refs), refs.size)
|
||||
end AggregateActionCacheStore
|
||||
|
||||
object AggregateActionCacheStore:
|
||||
lazy val empty: AggregateActionCacheStore = AggregateActionCacheStore(Nil)
|
||||
end AggregateActionCacheStore
|
||||
|
||||
class InMemoryActionCacheStore extends ActionCacheStore:
|
||||
class InMemoryActionCacheStore extends AbstractActionCacheStore:
|
||||
private val underlying: mutable.Map[Digest, JValue] = mutable.Map.empty
|
||||
private val blobCache: mutable.Map[String, VirtualFile] = mutable.Map.empty
|
||||
|
||||
override def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]] =
|
||||
underlying
|
||||
.get(input)
|
||||
.map: j =>
|
||||
Converter.fromJsonUnsafe[ActionResult[A1]](j)
|
||||
override def storeName: String = "in-memory"
|
||||
override def get(request: GetActionResultRequest): Either[Throwable, ActionResult] =
|
||||
val optResult = underlying
|
||||
.get(request.actionDigest)
|
||||
.flatMap: j =>
|
||||
try
|
||||
val value = Converter.fromJsonUnsafe[ActionResult](j)
|
||||
if request.inlineOutputFiles.isEmpty then Some(value)
|
||||
else
|
||||
val inlineRefs = request.inlineOutputFiles.map: path =>
|
||||
value.outputFiles.find(_.id == path).get
|
||||
val contents = getBlobs(inlineRefs).toVector.map: b =>
|
||||
ByteBuffer.wrap(IO.readBytes(b.input))
|
||||
Some(value.withContents(contents))
|
||||
catch case NonFatal(_) => None
|
||||
optResult match
|
||||
case Some(r) => Right(r)
|
||||
case None => Left(notFound)
|
||||
|
||||
override def put[A1: ClassTag: JsonFormat](
|
||||
key: Digest,
|
||||
value: A1,
|
||||
blobs: Seq[VirtualFile],
|
||||
): ActionResult[A1] =
|
||||
val refs = putBlobs(blobs)
|
||||
val v = ActionResult(value, refs)
|
||||
override def put(request: UpdateActionResultRequest): Either[Throwable, ActionResult] =
|
||||
val refs = putBlobsIfNeeded(request.outputFiles).toVector
|
||||
val v = ActionResult(refs, storeName)
|
||||
val json = Converter.toJsonUnsafe(v)
|
||||
underlying(key) = json
|
||||
v
|
||||
underlying(request.actionDigest) = json
|
||||
Right(v)
|
||||
|
||||
override def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] =
|
||||
blobs.map: (b: VirtualFile) =>
|
||||
blobCache(b.contentHashStr()) = b
|
||||
(b: HashedVirtualFileRef)
|
||||
|
||||
// we won't keep the blobs in-memory so return Nil
|
||||
override def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] =
|
||||
Nil
|
||||
private def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] =
|
||||
refs.map: ref =>
|
||||
blobCache(ref.contentHashStr())
|
||||
|
||||
// we won't keep the blobs in-memory so return Nil
|
||||
// to implement this correctly, we'd have to grab the content from the original file
|
||||
override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] =
|
||||
Nil
|
||||
|
||||
override def findBlobs(refs: Seq[HashedVirtualFileRef]): Seq[HashedVirtualFileRef] =
|
||||
refs.flatMap: r =>
|
||||
if blobCache.contains(r.contentHashStr()) then Some(r)
|
||||
else None
|
||||
|
||||
override def toString(): String =
|
||||
underlying.toString()
|
||||
end InMemoryActionCacheStore
|
||||
|
||||
class DiskActionCacheStore(base: Path, fileConverter: FileConverter) extends ActionCacheStore:
|
||||
class DiskActionCacheStore(base: Path, fileConverter: FileConverter)
|
||||
extends AbstractActionCacheStore:
|
||||
lazy val casBase: Path = {
|
||||
val dir = base.resolve("cas")
|
||||
IO.createDirectory(dir.toFile)
|
||||
|
|
@ -142,51 +180,80 @@ class DiskActionCacheStore(base: Path, fileConverter: FileConverter) extends Act
|
|||
dir
|
||||
}
|
||||
|
||||
override def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]] =
|
||||
val acFile = acBase.toFile / input.toString
|
||||
override def storeName: String = "disk"
|
||||
override def get(request: GetActionResultRequest): Either[Throwable, ActionResult] =
|
||||
val acFile = acBase.toFile / request.actionDigest.toString.replace("/", "-")
|
||||
if acFile.exists then
|
||||
val str = IO.read(acFile)
|
||||
val json = Parser.parseUnsafe(str)
|
||||
try
|
||||
val value = Converter.fromJsonUnsafe[ActionResult[A1]](json)
|
||||
Some(value)
|
||||
catch case NonFatal(_) => None
|
||||
else None
|
||||
val value = Converter.fromJsonUnsafe[ActionResult](json)
|
||||
if request.inlineOutputFiles.isEmpty then Right(value)
|
||||
else
|
||||
val inlineRefs = request.inlineOutputFiles.map: path =>
|
||||
value.outputFiles.find(_.id == path).get
|
||||
val contents = getBlobs(inlineRefs).toVector.map: b =>
|
||||
ByteBuffer.wrap(IO.readBytes(b.input))
|
||||
Right(value.withContents(contents))
|
||||
catch case NonFatal(e) => Left(e)
|
||||
else Left(notFound)
|
||||
|
||||
override def put[A1: ClassTag: JsonFormat](
|
||||
key: Digest,
|
||||
value: A1,
|
||||
blobs: Seq[VirtualFile],
|
||||
): ActionResult[A1] =
|
||||
val acFile = acBase.toFile / key.toString
|
||||
val refs = putBlobs(blobs)
|
||||
val v = ActionResult(value, refs)
|
||||
val json = Converter.toJsonUnsafe(v)
|
||||
IO.write(acFile, CompactPrinter(json))
|
||||
v
|
||||
override def put(request: UpdateActionResultRequest): Either[Throwable, ActionResult] =
|
||||
try
|
||||
val acFile = acBase.toFile / request.actionDigest.toString.replace("/", "-")
|
||||
val refs = putBlobsIfNeeded(request.outputFiles).toVector
|
||||
val v = ActionResult(refs, storeName)
|
||||
val json = Converter.toJsonUnsafe(v)
|
||||
IO.write(acFile, CompactPrinter(json))
|
||||
Right(v)
|
||||
catch case NonFatal(e) => Left(e)
|
||||
|
||||
override def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] =
|
||||
blobs.map: (b: VirtualFile) =>
|
||||
val outFile = casBase.toFile / Digest(b.contentHashStr).toString
|
||||
IO.transfer(b.input, outFile)
|
||||
putBlob(b.input, Digest(b))
|
||||
(b: HashedVirtualFileRef)
|
||||
|
||||
override def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] =
|
||||
def toCasFile(digest: Digest): Path =
|
||||
(casBase.toFile / digest.toString.replace("/", "-")).toPath()
|
||||
|
||||
def putBlob(input: InputStream, digest: Digest): Path =
|
||||
val casFile = toCasFile(digest)
|
||||
IO.transfer(input, casFile.toFile())
|
||||
casFile
|
||||
|
||||
def putBlob(input: ByteBuffer, digest: Digest): Path =
|
||||
val casFile = toCasFile(digest)
|
||||
input.flip()
|
||||
val file = RandomAccessFile(casFile.toFile(), "rw")
|
||||
try
|
||||
file.getChannel().write(input)
|
||||
casFile
|
||||
finally file.close()
|
||||
|
||||
private def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] =
|
||||
refs.flatMap: r =>
|
||||
val casFile = casBase.toFile / Digest(r.contentHashStr).toString
|
||||
if casFile.exists then
|
||||
val casFile = toCasFile(Digest(r))
|
||||
if casFile.toFile().exists then
|
||||
r match
|
||||
case p: PathBasedFile => Some(p)
|
||||
case _ => None
|
||||
case _ =>
|
||||
val content = IO.read(casFile.toFile())
|
||||
Some(StringVirtualFile1(r.id, content))
|
||||
else None
|
||||
|
||||
override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] =
|
||||
refs.flatMap: ref =>
|
||||
val casFile = casBase.toFile / Digest(ref.contentHashStr).toString
|
||||
if casFile.exists then
|
||||
val casFile = toCasFile(Digest(ref))
|
||||
if casFile.toFile().exists then
|
||||
val outPath = fileConverter.toPath(ref)
|
||||
Files.createDirectories(outPath.getParent())
|
||||
if outPath.toFile().exists() then IO.delete(outPath.toFile())
|
||||
Some(Files.createSymbolicLink(outPath, casFile.toPath))
|
||||
Some(Files.createSymbolicLink(outPath, casFile))
|
||||
else None
|
||||
|
||||
override def findBlobs(refs: Seq[HashedVirtualFileRef]): Seq[HashedVirtualFileRef] =
|
||||
refs.flatMap: r =>
|
||||
val casFile = toCasFile(Digest(r))
|
||||
if casFile.toFile().exists then Some(r)
|
||||
else None
|
||||
end DiskActionCacheStore
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
package sbt.util
|
||||
|
||||
import scala.reflect.ClassTag
|
||||
import sjsonnew.*
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
/**
|
||||
* An action result represents a result from excuting a task.
|
||||
* In addition to the value typically represented in the return type
|
||||
* of the task, action value tracks the file output side effect.
|
||||
*/
|
||||
class ActionResult[A1](a: A1, outs: Seq[HashedVirtualFileRef]):
|
||||
def value: A1 = a
|
||||
def outputFiles: Seq[HashedVirtualFileRef] = outs
|
||||
override def equals(o: Any): Boolean =
|
||||
o match {
|
||||
case o: ActionResult[a] => this.value == o.value && this.outputFiles == o.outputFiles
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode(): Int = (a, outs).##
|
||||
override def toString(): String = s"ActionResult($a, $outs)"
|
||||
end ActionResult
|
||||
|
||||
object ActionResult:
|
||||
import CacheImplicits.*
|
||||
|
||||
given [A1: ClassTag: JsonFormat]
|
||||
: IsoLList.Aux[ActionResult[A1], A1 :*: Vector[HashedVirtualFileRef] :*: LNil] =
|
||||
LList.iso(
|
||||
{ (v: ActionResult[A1]) =>
|
||||
("value", v.value) :*: ("outputFiles", v.outputFiles.toVector) :*: LNil
|
||||
},
|
||||
{ (in: A1 :*: Vector[HashedVirtualFileRef] :*: LNil) =>
|
||||
ActionResult(in.head, in.tail.head)
|
||||
}
|
||||
)
|
||||
end ActionResult
|
||||
|
|
@ -7,10 +7,11 @@
|
|||
|
||||
package sbt.util
|
||||
|
||||
import sbt.internal.util.codec.HashedVirtualFileRefFormats
|
||||
import sjsonnew.{ BasicJsonProtocol, IsoString, JsonFormat }
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.VirtualFileRef
|
||||
|
||||
trait BasicCacheImplicits { self: BasicJsonProtocol =>
|
||||
trait BasicCacheImplicits extends HashedVirtualFileRefFormats { self: BasicJsonProtocol =>
|
||||
|
||||
implicit def basicCache[I: JsonFormat, O: JsonFormat]: Cache[I, O] =
|
||||
new BasicCache[I, O]()
|
||||
|
|
@ -24,21 +25,6 @@ trait BasicCacheImplicits { self: BasicJsonProtocol =>
|
|||
def singleton[T](t: T): SingletonCache[T] =
|
||||
SingletonCache.basicSingletonCache(asSingleton(t))
|
||||
|
||||
/**
|
||||
* A string representation of HashedVirtualFileRef, delimited by `>`.
|
||||
*/
|
||||
def hashedVirtualFileRefToStr(ref: HashedVirtualFileRef): String =
|
||||
s"${ref.id}>${ref.contentHashStr}"
|
||||
|
||||
def strToHashedVirtualFileRef(s: String): HashedVirtualFileRef =
|
||||
s.split(">").toList match {
|
||||
case path :: hash :: Nil => HashedVirtualFileRef.of(path, hash)
|
||||
case _ => throw new RuntimeException(s"invalid HashedVirtualFileRefIsoString $s")
|
||||
}
|
||||
|
||||
implicit lazy val virtualFileRefIsoString: IsoString[VirtualFileRef] =
|
||||
IsoString.iso(_.id, VirtualFileRef.of)
|
||||
|
||||
implicit lazy val hashedVirtualFileRefIsoString: IsoString[HashedVirtualFileRef] =
|
||||
IsoString.iso(hashedVirtualFileRefToStr, strToHashedVirtualFileRef)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ package sbt.util
|
|||
|
||||
import sjsonnew.IsoString
|
||||
import sbt.io.Hash
|
||||
import xsbti.HashedVirtualFileRef
|
||||
import java.io.{ BufferedInputStream, InputStream }
|
||||
import java.nio.ByteBuffer
|
||||
import java.security.{ DigestInputStream, MessageDigest }
|
||||
|
|
@ -11,30 +12,36 @@ opaque type Digest = String
|
|||
object Digest:
|
||||
private val sha256_upper = "SHA-256"
|
||||
|
||||
extension (d: Digest) def toBytes: Array[Byte] = parse(d)
|
||||
extension (d: Digest)
|
||||
def contentHashStr: String =
|
||||
val tokens = parse(d)
|
||||
s"${tokens._1}-${tokens._2}"
|
||||
def toBytes: Array[Byte] = parse(d)._4
|
||||
def sizeBytes: Long = parse(d)._3
|
||||
|
||||
def apply(s: String): Digest =
|
||||
validateString(s)
|
||||
s
|
||||
|
||||
def apply(algo: String, bytes: Array[Byte]): Digest =
|
||||
algo + "-" + toHexString(bytes)
|
||||
def apply(algo: String, digest: Array[Byte], sizeBytes: Long): Digest =
|
||||
algo + "-" + toHexString(digest) + "/" + sizeBytes.toString
|
||||
|
||||
def apply(ref: HashedVirtualFileRef): Digest =
|
||||
apply(ref.contentHashStr() + "/" + ref.sizeBytes.toString)
|
||||
|
||||
// used to wrap a Long value as a fake Digest, which will
|
||||
// later be hashed using sha256 anyway.
|
||||
def dummy(value: Long): Digest =
|
||||
apply("murmur3", longsToBytes(Array(0L, value)))
|
||||
apply("murmur3", longsToBytes(Array(0L, value)), 0)
|
||||
|
||||
lazy val zero: Digest = dummy(0L)
|
||||
|
||||
def sha256Hash(bytes: Array[Byte]): Digest =
|
||||
apply("sha256", hashBytes(sha256_upper, bytes))
|
||||
apply("sha256", hashBytes(sha256_upper, bytes), bytes.length)
|
||||
|
||||
def sha256Hash(longs: Array[Long]): Digest =
|
||||
apply("sha256", hashBytes(sha256_upper, longs))
|
||||
|
||||
def sha256Hash(input: InputStream): Digest =
|
||||
apply("sha256", hashBytes(sha256_upper, input))
|
||||
val bytes = hashBytes(sha256_upper, longs)
|
||||
apply("sha256", bytes, bytes.length)
|
||||
|
||||
def sha256Hash(digests: Digest*): Digest =
|
||||
sha256Hash(digests.toSeq.map(_.toBytes).flatten.toArray[Byte])
|
||||
|
|
@ -62,16 +69,26 @@ object Digest:
|
|||
parse(s)
|
||||
()
|
||||
|
||||
private def parse(s: String): Array[Byte] =
|
||||
private def parse(s: String): (String, String, Long, Array[Byte]) =
|
||||
val tokens = s.split("-").toList
|
||||
tokens match
|
||||
case "murmur3" :: value :: Nil => parseHex(value, 128)
|
||||
case "md5" :: value :: Nil => parseHex(value, 128)
|
||||
case "sha1" :: value :: Nil => parseHex(value, 160)
|
||||
case "sha256" :: value :: Nil => parseHex(value, 256)
|
||||
case "sha384" :: value :: Nil => parseHex(value, 384)
|
||||
case "sha512" :: value :: Nil => parseHex(value, 512)
|
||||
case _ => throw IllegalArgumentException(s"unexpected digest: $s")
|
||||
case head :: rest :: Nil =>
|
||||
val subtokens = head :: rest.split("/").toList
|
||||
subtokens match
|
||||
case (a @ "murmur3") :: value :: sizeBytes :: Nil =>
|
||||
(a, value, sizeBytes.toLong, parseHex(value, 128))
|
||||
case (a @ "md5") :: value :: sizeBytes :: Nil =>
|
||||
(a, value, sizeBytes.toLong, parseHex(value, 128))
|
||||
case (a @ "sha1") :: value :: sizeBytes :: Nil =>
|
||||
(a, value, sizeBytes.toLong, parseHex(value, 160))
|
||||
case (a @ "sha256") :: value :: sizeBytes :: Nil =>
|
||||
(a, value, sizeBytes.toLong, parseHex(value, 256))
|
||||
case (a @ "sha384") :: value :: sizeBytes :: Nil =>
|
||||
(a, value, sizeBytes.toLong, parseHex(value, 384))
|
||||
case (a @ "sha512") :: value :: sizeBytes :: Nil =>
|
||||
(a, value, sizeBytes.toLong, parseHex(value, 512))
|
||||
case _ => throw IllegalArgumentException(s"unexpected digest: $s")
|
||||
case _ => throw IllegalArgumentException(s"unexpected digest: $s")
|
||||
|
||||
private def parseHex(value: String, expectedBytes: Int): Array[Byte] =
|
||||
val bs = Hash.fromHex(value)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
package sbt.util
|
||||
|
||||
import sbt.internal.util.CacheEventLog
|
||||
import sbt.internal.util.StringVirtualFile1
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax.*
|
||||
|
|
@ -40,7 +41,7 @@ object ActionCacheTest extends BasicTestSuite:
|
|||
(a + b, Nil)
|
||||
}
|
||||
IO.withTemporaryDirectory: (tempDir) =>
|
||||
val config = BuildWideCacheConfiguration(cache, tempDir.toPath())
|
||||
val config = BuildWideCacheConfiguration(cache, tempDir.toPath(), CacheEventLog())
|
||||
val v1 =
|
||||
ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config)
|
||||
assert(v1 == 2)
|
||||
|
|
@ -62,7 +63,7 @@ object ActionCacheTest extends BasicTestSuite:
|
|||
val out = StringVirtualFile1(s"$tempDir/a.txt", (a + b).toString)
|
||||
(a + b, Seq(out))
|
||||
}
|
||||
val config = BuildWideCacheConfiguration(cache, tempDir.toPath())
|
||||
val config = BuildWideCacheConfiguration(cache, tempDir.toPath(), CacheEventLog())
|
||||
val v1 =
|
||||
ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config)
|
||||
assert(v1 == 2)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,54 @@
|
|||
package sbt.util
|
||||
|
||||
import sbt.internal.util.{ ActionCacheEvent, CacheEventLog }
|
||||
import verify.BasicTestSuite
|
||||
|
||||
object CacheEventLogTest extends BasicTestSuite:
|
||||
test("summary of 0 events") {
|
||||
val logger = CacheEventLog()
|
||||
val expectedSummary = ""
|
||||
assertEquals(logger.summary, expectedSummary)
|
||||
}
|
||||
|
||||
test("summary of 1 disk event") {
|
||||
val logger = CacheEventLog()
|
||||
logger.append(ActionCacheEvent.Found("disk"))
|
||||
val expectedSummary = "cache 100%, 1 disk cache hit"
|
||||
assertEquals(logger.summary, expectedSummary)
|
||||
}
|
||||
|
||||
test("summary of 2 disk events") {
|
||||
val logger = CacheEventLog()
|
||||
logger.append(ActionCacheEvent.Found("disk"))
|
||||
logger.append(ActionCacheEvent.Found("disk"))
|
||||
val expectedSummary = "cache 100%, 2 disk cache hits"
|
||||
assertEquals(logger.summary, expectedSummary)
|
||||
}
|
||||
|
||||
test("summary of 1 disk, 1 miss event") {
|
||||
val logger = CacheEventLog()
|
||||
logger.append(ActionCacheEvent.Found("disk"))
|
||||
logger.append(ActionCacheEvent.NotFound)
|
||||
val expectedSummary = "cache 50%, 1 disk cache hit, 1 onsite task"
|
||||
assertEquals(logger.summary, expectedSummary)
|
||||
}
|
||||
|
||||
test("summary of 1 disk, 2 remote, 1 miss event") {
|
||||
val logger = CacheEventLog()
|
||||
logger.append(ActionCacheEvent.Found("disk"))
|
||||
logger.append(ActionCacheEvent.Found("remote"))
|
||||
logger.append(ActionCacheEvent.Found("remote"))
|
||||
logger.append(ActionCacheEvent.NotFound)
|
||||
val expectedSummary = "cache 75%, 1 disk cache hit, 2 remote cache hits, 1 onsite task"
|
||||
assertEquals(logger.summary, expectedSummary)
|
||||
}
|
||||
|
||||
test("summary of 1 disk event after clear") {
|
||||
val logger = CacheEventLog()
|
||||
logger.append(ActionCacheEvent.Found("disk"))
|
||||
logger.clear()
|
||||
logger.append(ActionCacheEvent.Found("disk"))
|
||||
val expectedSummary = "cache 100%, 1 disk cache hit"
|
||||
assertEquals(logger.summary, expectedSummary)
|
||||
}
|
||||
end CacheEventLogTest
|
||||
|
|
@ -2,41 +2,42 @@ package sbt.util
|
|||
|
||||
object DigestTest extends verify.BasicTestSuite:
|
||||
test("murmur3") {
|
||||
val d = Digest("murmur3-00000000000000000000000000000000")
|
||||
val d = Digest("murmur3-00000000000000000000000000000000/0")
|
||||
val dummy = Digest.dummy(0L)
|
||||
assert(d == dummy)
|
||||
}
|
||||
|
||||
test("md5") {
|
||||
val d = Digest("md5-d41d8cd98f00b204e9800998ecf8427e")
|
||||
val d = Digest("md5-d41d8cd98f00b204e9800998ecf8427e/0")
|
||||
}
|
||||
|
||||
test("sha1") {
|
||||
val d = Digest("sha1-da39a3ee5e6b4b0d3255bfef95601890afd80709")
|
||||
val d = Digest("sha1-da39a3ee5e6b4b0d3255bfef95601890afd80709/0")
|
||||
}
|
||||
|
||||
test("sha256") {
|
||||
val hashOfNull = Digest.sha256Hash(Array[Byte]())
|
||||
val d = Digest("sha256-e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
|
||||
val d = Digest("sha256-e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855/0")
|
||||
assert(hashOfNull == d)
|
||||
}
|
||||
|
||||
test("sha384") {
|
||||
val d = Digest(
|
||||
"sha384-38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b"
|
||||
"sha384-38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b/0"
|
||||
)
|
||||
}
|
||||
|
||||
test("sha512") {
|
||||
val d = Digest(
|
||||
"sha512-cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"
|
||||
"sha512-cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e/0"
|
||||
)
|
||||
}
|
||||
|
||||
test("digest composition") {
|
||||
val dummy1 = Digest.dummy(0L)
|
||||
val dummy2 = Digest.dummy(0L)
|
||||
val expected = Digest("sha256-66687aadf862bd776c8fc18b8e9f8e20089714856ee233b3902a591d0d5f2925")
|
||||
val expected =
|
||||
Digest("sha256-66687aadf862bd776c8fc18b8e9f8e20089714856ee233b3902a591d0d5f2925/32")
|
||||
assert(Digest.sha256Hash(dummy1, dummy2) == expected)
|
||||
}
|
||||
end DigestTest
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ object HasherTest extends BasicTestSuite:
|
|||
|
||||
test("HashedVirtualFileRef") {
|
||||
import PathHashWriters.given
|
||||
val x = HashedVirtualFileRef.of("a.txt", blankContentHashStr)
|
||||
val x = HashedVirtualFileRef.of("a.txt", blankContentHashStr, 0L)
|
||||
val actual = Hasher.hashUnsafe(x)
|
||||
assert(actual == blankATxtHash)
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue