mirror of https://github.com/sbt/sbt.git
[sbt 2.x] remote cache (#7464)
For the details about this PR, please see the blog post https://eed3si9n.com/sbt-remote-cache/. * Add cache basics * Refactor Attributed to use StringAttributeMap, which is Map[StringAttributeKey, String] * Implement disk cache * Rename Package to Pkg * Virtualize packageBin * Use HashedVirtualFileRef for packageBin * Virtualize compile task
This commit is contained in:
parent
0d291f0489
commit
ecca26175e
|
|
@ -52,10 +52,7 @@ jobs:
|
|||
env:
|
||||
JAVA_OPTS: -Xms800M -Xmx2G -Xss6M -XX:ReservedCodeCacheSize=128M -server -Dsbt.io.virtual=false -Dfile.encoding=UTF-8
|
||||
JVM_OPTS: -Xms800M -Xmx2G -Xss6M -XX:ReservedCodeCacheSize=128M -server -Dsbt.io.virtual=false -Dfile.encoding=UTF-8
|
||||
SCALA_212: 2.12.17
|
||||
SCALA_213: 2.13.8
|
||||
SCALA_3: 3.1.0
|
||||
UTIL_TESTS: "utilCache/test utilControl/test utilInterface/test utilLogging/test utilPosition/test utilRelation/test utilScripted/test utilTracking/test"
|
||||
UTIL_TESTS: "utilControl/test utilInterface/test utilLogging/test utilPosition/test utilRelation/test utilScripted/test"
|
||||
SBT_LOCAL: false
|
||||
TEST_SBT_VER: 1.5.0
|
||||
SBT_ETC_FILE: $HOME/etc/sbt/sbtopts
|
||||
|
|
@ -126,9 +123,9 @@ jobs:
|
|||
# ./sbt -v --client "serverTestProj/test"
|
||||
# ./sbt -v --client doc
|
||||
./sbt -v --client "all $UTIL_TESTS"
|
||||
./sbt -v --client ++$SCALA_213
|
||||
./sbt -v --client ++2.13.x
|
||||
./sbt -v --client "all $UTIL_TESTS"
|
||||
./sbt -v --client ++$SCALA_212
|
||||
./sbt -v --client ++2.12.x
|
||||
./sbt -v --client "all $UTIL_TESTS"
|
||||
- name: Build and test (2)
|
||||
if: ${{ matrix.jobtype == 2 }}
|
||||
|
|
@ -156,8 +153,8 @@ jobs:
|
|||
shell: bash
|
||||
run: |
|
||||
./sbt -v --client test
|
||||
./sbt -v --client "++$SCALA_213; all $UTIL_TESTS"
|
||||
./sbt -v --client "++$SCALA_212; all $UTIL_TESTS"
|
||||
./sbt -v --client "++2.13.x; all $UTIL_TESTS"
|
||||
./sbt -v --client "++2.12.x; all $UTIL_TESTS"
|
||||
# - name: Build and test (6)
|
||||
# if: ${{ matrix.jobtype == 6 }}
|
||||
# shell: bash
|
||||
|
|
|
|||
24
build.sbt
24
build.sbt
|
|
@ -427,17 +427,17 @@ lazy val utilRelation = (project in file("internal") / "util-relation")
|
|||
lazy val utilCache = (project in file("util-cache"))
|
||||
.settings(
|
||||
utilCommonSettings,
|
||||
testedBaseSettings,
|
||||
name := "Util Cache",
|
||||
libraryDependencies ++=
|
||||
Seq(sjsonNewScalaJson.value, sjsonNewMurmurhash.value, scalaReflect.value),
|
||||
libraryDependencies ++= Seq(scalatest % "test"),
|
||||
utilMimaSettings,
|
||||
mimaBinaryIssueFilters ++= Seq(
|
||||
// Added a method to a sealed trait, technically not a problem for Scala
|
||||
exclude[ReversedMissingMethodProblem]("sbt.util.HashFileInfo.hashArray"),
|
||||
)
|
||||
Test / fork := true,
|
||||
)
|
||||
.configure(
|
||||
addSbtIO,
|
||||
addSbtCompilerInterface,
|
||||
)
|
||||
.configure(addSbtIO)
|
||||
|
||||
// Builds on cache to provide caching for filesystem-related operations
|
||||
lazy val utilTracking = (project in file("util-tracking"))
|
||||
|
|
@ -660,6 +660,7 @@ lazy val actionsProj = (project in file("main-actions"))
|
|||
stdTaskProj,
|
||||
taskProj,
|
||||
testingProj,
|
||||
utilCache,
|
||||
utilLogging,
|
||||
utilRelation,
|
||||
utilTracking,
|
||||
|
|
@ -735,7 +736,7 @@ lazy val protocolProj = (project in file("protocol"))
|
|||
// General command support and core commands not specific to a build system
|
||||
lazy val commandProj = (project in file("main-command"))
|
||||
.enablePlugins(ContrabandPlugin, JsonCodecPlugin)
|
||||
.dependsOn(protocolProj, completeProj, utilLogging)
|
||||
.dependsOn(protocolProj, completeProj, utilLogging, utilCache)
|
||||
.settings(
|
||||
testedBaseSettings,
|
||||
name := "Command",
|
||||
|
|
@ -801,7 +802,10 @@ lazy val commandProj = (project in file("main-command"))
|
|||
// The core macro project defines the main logic of the DSL, abstracted
|
||||
// away from several sbt implementors (tasks, settings, et cetera).
|
||||
lazy val coreMacrosProj = (project in file("core-macros"))
|
||||
.dependsOn(collectionProj)
|
||||
.dependsOn(
|
||||
collectionProj,
|
||||
utilCache,
|
||||
)
|
||||
.settings(
|
||||
testedBaseSettings,
|
||||
name := "Core Macros",
|
||||
|
|
@ -1268,20 +1272,20 @@ def allProjects =
|
|||
bundledLauncherProj,
|
||||
sbtClientProj,
|
||||
buildFileProj,
|
||||
utilCache,
|
||||
utilTracking,
|
||||
) ++ lowerUtilProjects
|
||||
|
||||
// These need to be cross published to 2.12 and 2.13 for Zinc
|
||||
lazy val lowerUtilProjects =
|
||||
Seq(
|
||||
utilCore,
|
||||
utilCache,
|
||||
utilControl,
|
||||
utilInterface,
|
||||
utilLogging,
|
||||
utilPosition,
|
||||
utilRelation,
|
||||
utilScripted,
|
||||
utilTracking
|
||||
)
|
||||
|
||||
lazy val nonRoots = allProjects.map(p => LocalProject(p.id))
|
||||
|
|
|
|||
|
|
@ -4,9 +4,21 @@ package util
|
|||
package appmacro
|
||||
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import scala.reflect.{ ClassTag, TypeTest }
|
||||
import scala.quoted.*
|
||||
import sbt.util.Applicative
|
||||
import sbt.util.Monad
|
||||
import sjsonnew.{ BasicJsonProtocol, HashWriter, JsonFormat }
|
||||
import sbt.util.{
|
||||
ActionCache,
|
||||
ActionCacheStore,
|
||||
Applicative,
|
||||
BuildWideCacheConfiguration,
|
||||
Cache,
|
||||
CacheLevelTag,
|
||||
Digest,
|
||||
Monad,
|
||||
}
|
||||
import xsbti.VirtualFile
|
||||
import Types.Id
|
||||
|
||||
/**
|
||||
* Implementation of a macro that provides a direct syntax for applicative functors and monads. It
|
||||
|
|
@ -22,12 +34,13 @@ trait Cont:
|
|||
*/
|
||||
def contMapN[A: Type, F[_], Effect[_]: Type](
|
||||
tree: Expr[A],
|
||||
instanceExpr: Expr[Applicative[F]]
|
||||
applicativeExpr: Expr[Applicative[F]],
|
||||
cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]],
|
||||
)(using
|
||||
iftpe: Type[F],
|
||||
eatpe: Type[Effect[A]],
|
||||
): Expr[F[Effect[A]]] =
|
||||
contMapN[A, F, Effect](tree, instanceExpr, conv.idTransform)
|
||||
contMapN[A, F, Effect](tree, applicativeExpr, cacheConfigExpr, conv.idTransform)
|
||||
|
||||
/**
|
||||
* Implementation of a macro that provides a direct syntax for applicative functors. It is
|
||||
|
|
@ -35,13 +48,14 @@ trait Cont:
|
|||
*/
|
||||
def contMapN[A: Type, F[_], Effect[_]: Type](
|
||||
tree: Expr[A],
|
||||
instanceExpr: Expr[Applicative[F]],
|
||||
applicativeExpr: Expr[Applicative[F]],
|
||||
cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]],
|
||||
inner: conv.TermTransform[Effect]
|
||||
)(using
|
||||
iftpe: Type[F],
|
||||
eatpe: Type[Effect[A]],
|
||||
): Expr[F[Effect[A]]] =
|
||||
contImpl[A, F, Effect](Left(tree), instanceExpr, inner)
|
||||
contImpl[A, F, Effect](Left(tree), applicativeExpr, cacheConfigExpr, inner)
|
||||
|
||||
/**
|
||||
* Implementation of a macro that provides a direct syntax for applicative functors. It is
|
||||
|
|
@ -49,12 +63,13 @@ trait Cont:
|
|||
*/
|
||||
def contFlatMap[A: Type, F[_], Effect[_]: Type](
|
||||
tree: Expr[F[A]],
|
||||
instanceExpr: Expr[Applicative[F]],
|
||||
applicativeExpr: Expr[Applicative[F]],
|
||||
cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]],
|
||||
)(using
|
||||
iftpe: Type[F],
|
||||
eatpe: Type[Effect[A]],
|
||||
): Expr[F[Effect[A]]] =
|
||||
contFlatMap[A, F, Effect](tree, instanceExpr, conv.idTransform)
|
||||
contFlatMap[A, F, Effect](tree, applicativeExpr, cacheConfigExpr, conv.idTransform)
|
||||
|
||||
/**
|
||||
* Implementation of a macro that provides a direct syntax for applicative functors. It is
|
||||
|
|
@ -62,13 +77,14 @@ trait Cont:
|
|||
*/
|
||||
def contFlatMap[A: Type, F[_], Effect[_]: Type](
|
||||
tree: Expr[F[A]],
|
||||
instanceExpr: Expr[Applicative[F]],
|
||||
applicativeExpr: Expr[Applicative[F]],
|
||||
cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]],
|
||||
inner: conv.TermTransform[Effect]
|
||||
)(using
|
||||
iftpe: Type[F],
|
||||
eatpe: Type[Effect[A]],
|
||||
): Expr[F[Effect[A]]] =
|
||||
contImpl[A, F, Effect](Right(tree), instanceExpr, inner)
|
||||
contImpl[A, F, Effect](Right(tree), applicativeExpr, cacheConfigExpr, inner)
|
||||
|
||||
def summonAppExpr[F[_]: Type]: Expr[Applicative[F]] =
|
||||
import conv.qctx
|
||||
|
|
@ -78,6 +94,30 @@ trait Cont:
|
|||
.summon[Applicative[F]]
|
||||
.getOrElse(sys.error(s"Applicative[F] not found for ${TypeRepr.of[F].typeSymbol}"))
|
||||
|
||||
def summonHashWriter[A: Type]: Expr[HashWriter[A]] =
|
||||
import conv.qctx
|
||||
import qctx.reflect.*
|
||||
given qctx.type = qctx
|
||||
Expr
|
||||
.summon[HashWriter[A]]
|
||||
.getOrElse(sys.error(s"HashWriter[A] not found for ${TypeRepr.of[A].show}"))
|
||||
|
||||
def summonJsonFormat[A: Type]: Expr[JsonFormat[A]] =
|
||||
import conv.qctx
|
||||
import qctx.reflect.*
|
||||
given qctx.type = qctx
|
||||
Expr
|
||||
.summon[JsonFormat[A]]
|
||||
.getOrElse(sys.error(s"JsonFormat[A] not found for ${TypeRepr.of[A].show}"))
|
||||
|
||||
def summonClassTag[A: Type]: Expr[ClassTag[A]] =
|
||||
import conv.qctx
|
||||
import qctx.reflect.*
|
||||
given qctx.type = qctx
|
||||
Expr
|
||||
.summon[ClassTag[A]]
|
||||
.getOrElse(sys.error(s"ClassTag[A] not found for ${TypeRepr.of[A].show}"))
|
||||
|
||||
/**
|
||||
* Implementation of a macro that provides a direct syntax for applicative functors and monads.
|
||||
* It is intended to bcke used in conjunction with another macro that conditions the inputs.
|
||||
|
|
@ -117,7 +157,8 @@ trait Cont:
|
|||
*/
|
||||
def contImpl[A: Type, F[_], Effect[_]: Type](
|
||||
eitherTree: Either[Expr[A], Expr[F[A]]],
|
||||
instanceExpr: Expr[Applicative[F]],
|
||||
applicativeExpr: Expr[Applicative[F]],
|
||||
cacheConfigExprOpt: Option[Expr[BuildWideCacheConfiguration]],
|
||||
inner: conv.TermTransform[Effect]
|
||||
)(using
|
||||
iftpe: Type[F],
|
||||
|
|
@ -134,18 +175,34 @@ trait Cont:
|
|||
case Right(r) => (r, faTpe)
|
||||
|
||||
val inputBuf = ListBuffer[Input]()
|
||||
val outputBuf = ListBuffer[Output]()
|
||||
|
||||
def makeApp(body: Term, inputs: List[Input]): Expr[F[Effect[A]]] = inputs match
|
||||
case Nil => pure(body)
|
||||
case x :: Nil => genMap(body, x)
|
||||
case xs => genMapN(body, xs)
|
||||
def unitExpr: Expr[Unit] = '{ () }
|
||||
|
||||
// no inputs, so construct F[A] via Instance.pure or pure+flatten
|
||||
def pure(body: Term): Expr[F[Effect[A]]] =
|
||||
val tags = CacheLevelTag.all.toList
|
||||
def pure0[A1: Type](body: Expr[A1]): Expr[F[A1]] =
|
||||
'{
|
||||
$instanceExpr.pure[A1] { () => $body }
|
||||
}
|
||||
cacheConfigExprOpt match
|
||||
case Some(cacheConfigExpr) =>
|
||||
'{
|
||||
$applicativeExpr.pure[A1] { () =>
|
||||
${
|
||||
callActionCache[A1, Unit](outputBuf.toList, cacheConfigExpr, tags)(
|
||||
body = body,
|
||||
input = unitExpr,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
case None =>
|
||||
'{
|
||||
$applicativeExpr.pure[A1] { () => $body }
|
||||
}
|
||||
eitherTree match
|
||||
case Left(_) => pure0[Effect[A]](inner(body).asExprOf[Effect[A]])
|
||||
case Right(_) =>
|
||||
|
|
@ -156,7 +213,7 @@ trait Cont:
|
|||
def flatten(m: Expr[F[F[Effect[A]]]]): Expr[F[Effect[A]]] =
|
||||
'{
|
||||
{
|
||||
val i1 = $instanceExpr.asInstanceOf[Monad[F]]
|
||||
val i1 = $applicativeExpr.asInstanceOf[Monad[F]]
|
||||
i1.flatten[Effect[A]]($m.asInstanceOf[F[F[Effect[A]]]])
|
||||
}
|
||||
}
|
||||
|
|
@ -183,13 +240,31 @@ trait Cont:
|
|||
convert[x](name, qual) transform { (tree: Term) =>
|
||||
typed[x](Ref(param.symbol))
|
||||
}
|
||||
transformWrappers(body.asTerm.changeOwner(sym), substitute, sym)
|
||||
val modifiedBody =
|
||||
transformWrappers(body.asTerm.changeOwner(sym), substitute, sym).asExprOf[A1]
|
||||
cacheConfigExprOpt match
|
||||
case Some(cacheConfigExpr) =>
|
||||
if input.isCacheInput then
|
||||
callActionCache(outputBuf.toList, cacheConfigExpr, input.tags)(
|
||||
body = modifiedBody,
|
||||
input = Ref(param.symbol).asExprOf[a],
|
||||
).asTerm.changeOwner(sym)
|
||||
else
|
||||
callActionCache[A1, Unit](
|
||||
outputBuf.toList,
|
||||
cacheConfigExpr,
|
||||
input.tags,
|
||||
)(
|
||||
body = modifiedBody,
|
||||
input = unitExpr,
|
||||
).asTerm.changeOwner(sym)
|
||||
case None => modifiedBody.asTerm
|
||||
}
|
||||
).asExprOf[a => A1]
|
||||
val expr = input.term.asExprOf[F[a]]
|
||||
typed[F[A1]](
|
||||
'{
|
||||
$instanceExpr.map[a, A1]($expr.asInstanceOf[F[a]])($lambda)
|
||||
$applicativeExpr.map[a, A1]($expr.asInstanceOf[F[a]])($lambda)
|
||||
}.asTerm
|
||||
).asExprOf[F[A1]]
|
||||
eitherTree match
|
||||
|
|
@ -203,57 +278,137 @@ trait Cont:
|
|||
val br = makeTuple(inputs)
|
||||
val lambdaTpe =
|
||||
MethodType(List("$p0"))(_ => List(br.inputTupleTypeRepr), _ => TypeRepr.of[A1])
|
||||
val lambda = Lambda(
|
||||
owner = Symbol.spliceOwner,
|
||||
tpe = lambdaTpe,
|
||||
rhsFn = (sym, params) => {
|
||||
val p0 = params.head.asInstanceOf[Term]
|
||||
// Called when transforming the tree to add an input.
|
||||
// For `qual` of type F[A], and a `selection` qual.value,
|
||||
// the call is addType(Type A, Tree qual)
|
||||
// The result is a Tree representing a reference to
|
||||
// the bound value of the input.
|
||||
val substitute = [x] =>
|
||||
(name: String, tpe: Type[x], qual: Term, oldTree: Term) =>
|
||||
given Type[x] = tpe
|
||||
convert[x](name, qual) transform { (replacement: Term) =>
|
||||
val idx = inputs.indexWhere(input => input.qual == qual)
|
||||
Select
|
||||
.unique(Ref(p0.symbol), "apply")
|
||||
.appliedToTypes(List(br.inputTupleTypeRepr))
|
||||
.appliedToArgs(List(Literal(IntConstant(idx))))
|
||||
br.inputTupleTypeRepr.asType match
|
||||
case '[inputTypeTpe] =>
|
||||
val lambda = Lambda(
|
||||
owner = Symbol.spliceOwner,
|
||||
tpe = lambdaTpe,
|
||||
rhsFn = (sym, params) => {
|
||||
val p0 = params.head.asInstanceOf[Term]
|
||||
// Called when transforming the tree to add an input.
|
||||
// For `qual` of type F[A], and a `selection` qual.value,
|
||||
// the call is addType(Type A, Tree qual)
|
||||
// The result is a Tree representing a reference to
|
||||
// the bound value of the input.
|
||||
val substitute = [x] =>
|
||||
(name: String, tpe: Type[x], qual: Term, oldTree: Term) =>
|
||||
given Type[x] = tpe
|
||||
convert[x](name, qual) transform { (replacement: Term) =>
|
||||
val idx = inputs.indexWhere(input => input.qual == qual)
|
||||
applyTuple(p0, br.inputTupleTypeRepr, idx)
|
||||
}
|
||||
val modifiedBody =
|
||||
transformWrappers(body.asTerm.changeOwner(sym), substitute, sym).asExprOf[A1]
|
||||
cacheConfigExprOpt match
|
||||
case Some(cacheConfigExpr) =>
|
||||
if inputs.exists(_.isCacheInput) then
|
||||
val tags = inputs
|
||||
.filter(_.isCacheInput)
|
||||
.map(_.tags.toSet)
|
||||
.reduce(_ & _)
|
||||
.toList
|
||||
require(
|
||||
tags.nonEmpty,
|
||||
s"""cacheLevelTag union must be non-empty: ${inputs.mkString("\n")}"""
|
||||
)
|
||||
br.cacheInputTupleTypeRepr.asType match
|
||||
case '[cacheInputTpe] =>
|
||||
callActionCache(outputBuf.toList, cacheConfigExpr, tags)(
|
||||
body = modifiedBody,
|
||||
input = br.cacheInputExpr(p0).asExprOf[cacheInputTpe],
|
||||
).asTerm.changeOwner(sym)
|
||||
else
|
||||
val tags = CacheLevelTag.all.toList
|
||||
callActionCache[A1, Unit](outputBuf.toList, cacheConfigExpr, tags)(
|
||||
body = modifiedBody,
|
||||
input = unitExpr,
|
||||
).asTerm.changeOwner(sym)
|
||||
case None =>
|
||||
modifiedBody.asTerm
|
||||
}
|
||||
transformWrappers(body.asTerm.changeOwner(sym), substitute, sym)
|
||||
}
|
||||
)
|
||||
val tupleMapRepr = TypeRepr
|
||||
.of[Tuple.Map]
|
||||
.appliedTo(List(br.inputTupleTypeRepr, TypeRepr.of[F]))
|
||||
tupleMapRepr.asType match
|
||||
case '[tupleMap] =>
|
||||
br.inputTupleTypeRepr.asType match
|
||||
case '[inputTypeTpe] =>
|
||||
)
|
||||
val tupleMapRepr = TypeRepr
|
||||
.of[Tuple.Map]
|
||||
.appliedTo(List(br.inputTupleTypeRepr, TypeRepr.of[F]))
|
||||
tupleMapRepr.asType match
|
||||
case '[tupleMap] =>
|
||||
'{
|
||||
given Applicative[F] = $instanceExpr
|
||||
given Applicative[F] = $applicativeExpr
|
||||
import TupleMapExtension.*
|
||||
${ br.tupleExpr.asInstanceOf[Expr[Tuple.Map[inputTypeTpe & Tuple, F]]] }
|
||||
.mapN(${ lambda.asExprOf[inputTypeTpe & Tuple => A1] })
|
||||
}
|
||||
|
||||
eitherTree match
|
||||
case Left(_) =>
|
||||
genMapN0[Effect[A]](inner(body).asExprOf[Effect[A]])
|
||||
case Right(_) =>
|
||||
flatten(genMapN0[F[Effect[A]]](inner(body).asExprOf[F[Effect[A]]]))
|
||||
|
||||
// call `ActionCache.cache`
|
||||
def callActionCache[A1: Type, A2: Type](
|
||||
outputs: List[Output],
|
||||
cacheConfigExpr: Expr[BuildWideCacheConfiguration],
|
||||
tags: List[CacheLevelTag],
|
||||
)(body: Expr[A1], input: Expr[A2]): Expr[A1] =
|
||||
val codeContentHash = Expr[Long](body.show.##)
|
||||
val extraHash = Expr[Long](0L)
|
||||
val aJsonFormat = summonJsonFormat[A1]
|
||||
val aClassTag = summonClassTag[A1]
|
||||
val inputHashWriter =
|
||||
if TypeRepr.of[A2] =:= TypeRepr.of[Unit] then
|
||||
'{
|
||||
import BasicJsonProtocol.*
|
||||
summon[HashWriter[Unit]]
|
||||
}.asExprOf[HashWriter[A2]]
|
||||
else summonHashWriter[A2]
|
||||
val tagsExpr = '{ List(${ Varargs(tags.map(Expr[CacheLevelTag](_))) }: _*) }
|
||||
val block = letOutput(outputs)(body)
|
||||
'{
|
||||
given HashWriter[A2] = $inputHashWriter
|
||||
given JsonFormat[A1] = $aJsonFormat
|
||||
given ClassTag[A1] = $aClassTag
|
||||
ActionCache
|
||||
.cache(
|
||||
$input,
|
||||
codeContentHash = Digest.dummy($codeContentHash),
|
||||
extraHash = Digest.dummy($extraHash),
|
||||
tags = $tagsExpr
|
||||
)({ _ =>
|
||||
$block
|
||||
})($cacheConfigExpr)
|
||||
}
|
||||
|
||||
// wrap body in between output var declarations and var references
|
||||
def letOutput[A1: Type](
|
||||
outputs: List[Output]
|
||||
)(body: Expr[A1]): Expr[(A1, Seq[VirtualFile])] =
|
||||
Block(
|
||||
outputs.map(_.toVarDef),
|
||||
'{
|
||||
(
|
||||
$body,
|
||||
List(${ Varargs[VirtualFile](outputs.map(_.toRef.asExprOf[VirtualFile])) }: _*)
|
||||
)
|
||||
}.asTerm
|
||||
).asExprOf[(A1, Seq[VirtualFile])]
|
||||
|
||||
val WrapOutputName = "wrapOutput_\u2603\u2603"
|
||||
// Called when transforming the tree to add an input.
|
||||
// For `qual` of type F[A], and a `selection` qual.value.
|
||||
val record = [a] =>
|
||||
(name: String, tpe: Type[a], qual: Term, oldTree: Term) =>
|
||||
given t: Type[a] = tpe
|
||||
convert[a](name, qual) transform { (replacement: Term) =>
|
||||
inputBuf += Input(TypeRepr.of[a], qual, replacement, freshName("q"))
|
||||
oldTree
|
||||
if name != WrapOutputName then
|
||||
// todo cache opt-out attribute
|
||||
inputBuf += Input(TypeRepr.of[a], qual, replacement, freshName("q"))
|
||||
oldTree
|
||||
else
|
||||
val output = Output(TypeRepr.of[a], qual, freshName("o"), Symbol.spliceOwner)
|
||||
outputBuf += output
|
||||
if cacheConfigExprOpt.isDefined then output.toAssign
|
||||
else oldTree
|
||||
end if
|
||||
}
|
||||
val tx = transformWrappers(expr.asTerm, record, Symbol.spliceOwner)
|
||||
val tr = makeApp(tx, inputBuf.toList)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import scala.compiletime.summonInline
|
|||
import scala.quoted.*
|
||||
import scala.reflect.TypeTest
|
||||
import scala.collection.mutable
|
||||
import sbt.util.cacheLevel
|
||||
import sbt.util.CacheLevelTag
|
||||
|
||||
trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int):
|
||||
import qctx.reflect.*
|
||||
|
|
@ -15,10 +17,6 @@ trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int)
|
|||
counter = counter + 1
|
||||
s"$$${prefix}${counter}"
|
||||
|
||||
/**
|
||||
* Constructs a new, synthetic, local var with type `tpe`, a unique name, initialized to
|
||||
* zero-equivalent (Zero[A]), and owned by `parent`.
|
||||
*/
|
||||
def freshValDef(parent: Symbol, tpe: TypeRepr, rhs: Term): ValDef =
|
||||
tpe.asType match
|
||||
case '[a] =>
|
||||
|
|
@ -37,14 +35,27 @@ trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int)
|
|||
|
||||
def makeTuple(inputs: List[Input]): BuilderResult =
|
||||
new BuilderResult:
|
||||
override def inputTupleTypeRepr: TypeRepr =
|
||||
override lazy val inputTupleTypeRepr: TypeRepr =
|
||||
tupleTypeRepr(inputs.map(_.tpe))
|
||||
override def tupleExpr: Expr[Tuple] =
|
||||
Expr.ofTupleFromSeq(inputs.map(_.term.asExpr))
|
||||
override def cacheInputTupleTypeRepr: TypeRepr =
|
||||
tupleTypeRepr(inputs.filter(_.isCacheInput).map(_.tpe))
|
||||
override def cacheInputExpr(tupleTerm: Term): Expr[Tuple] =
|
||||
Expr.ofTupleFromSeq(inputs.zipWithIndex.flatMap { case (input, idx) =>
|
||||
if input.tags.nonEmpty then
|
||||
input.tpe.asType match
|
||||
case '[a] =>
|
||||
Some(applyTuple(tupleTerm, inputTupleTypeRepr, idx).asExprOf[a])
|
||||
else None
|
||||
})
|
||||
|
||||
trait BuilderResult:
|
||||
def inputTupleTypeRepr: TypeRepr
|
||||
def tupleExpr: Expr[Tuple]
|
||||
def cacheInputTupleTypeRepr: TypeRepr
|
||||
def cacheInputExpr(tupleTerm: Term): Expr[Tuple]
|
||||
|
||||
end BuilderResult
|
||||
|
||||
def tupleTypeRepr(param: List[TypeRepr]): TypeRepr =
|
||||
|
|
@ -52,14 +63,69 @@ trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int)
|
|||
case x :: xs => TypeRepr.of[scala.*:].appliedTo(List(x, tupleTypeRepr(xs)))
|
||||
case Nil => TypeRepr.of[EmptyTuple]
|
||||
|
||||
private val cacheLevelSym = Symbol.requiredClass("sbt.util.cacheLevel")
|
||||
final class Input(
|
||||
val tpe: TypeRepr,
|
||||
val qual: Term,
|
||||
val term: Term,
|
||||
val name: String
|
||||
val name: String,
|
||||
):
|
||||
override def toString: String =
|
||||
s"Input($tpe, $qual, $term, $name)"
|
||||
s"Input($tpe, $qual, $term, $name, $tags)"
|
||||
|
||||
def isCacheInput: Boolean = tags.nonEmpty
|
||||
lazy val tags = extractTags(qual)
|
||||
private def extractTags(tree: Term): List[CacheLevelTag] =
|
||||
def getAnnotation(tree: Term) =
|
||||
Option(tree.tpe.termSymbol) match
|
||||
case Some(x) => x.getAnnotation(cacheLevelSym)
|
||||
case None => tree.symbol.getAnnotation(cacheLevelSym)
|
||||
def extractTags0(tree: Term) =
|
||||
getAnnotation(tree) match
|
||||
case Some(annot) =>
|
||||
annot.asExprOf[cacheLevel] match
|
||||
case '{ cacheLevel(include = Array.empty[CacheLevelTag]($_)) } => Nil
|
||||
case '{ cacheLevel(include = Array[CacheLevelTag]($include*)) } =>
|
||||
include.value.get.toList
|
||||
case _ => sys.error(Printer.TreeStructure.show(annot) + " does not match")
|
||||
case None => CacheLevelTag.all.toList
|
||||
tree match
|
||||
case Inlined(_, _, tree) => extractTags(tree)
|
||||
case Apply(_, List(arg)) => extractTags(arg)
|
||||
case _ => extractTags0(tree)
|
||||
|
||||
/**
|
||||
* Represents an output expression via Def.declareOutput
|
||||
*/
|
||||
final class Output(
|
||||
val tpe: TypeRepr,
|
||||
val term: Term,
|
||||
val name: String,
|
||||
val parent: Symbol,
|
||||
):
|
||||
override def toString: String =
|
||||
s"Output($tpe, $term, $name)"
|
||||
val placeholder: Symbol =
|
||||
tpe.asType match
|
||||
case '[a] =>
|
||||
Symbol.newVal(
|
||||
parent,
|
||||
name,
|
||||
tpe,
|
||||
Flags.Mutable,
|
||||
Symbol.noSymbol
|
||||
)
|
||||
def toVarDef: ValDef =
|
||||
ValDef(placeholder, rhs = Some('{ null }.asTerm))
|
||||
def toAssign: Term = Assign(toRef, term)
|
||||
def toRef: Ref = Ref(placeholder)
|
||||
end Output
|
||||
|
||||
def applyTuple(tupleTerm: Term, tpe: TypeRepr, idx: Int): Term =
|
||||
Select
|
||||
.unique(Ref(tupleTerm.symbol), "apply")
|
||||
.appliedToTypes(List(tpe))
|
||||
.appliedToArgs(List(Literal(IntConstant(idx))))
|
||||
|
||||
trait TermTransform[F[_]]:
|
||||
def apply(in: Term): Term
|
||||
|
|
|
|||
|
|
@ -4,32 +4,40 @@ import sbt.internal.util.appmacro.*
|
|||
import verify.*
|
||||
import ContTestMacro.*
|
||||
import sbt.util.Applicative
|
||||
import sjsonnew.BasicJsonProtocol
|
||||
|
||||
object ContTest extends BasicTestSuite:
|
||||
test("pure") {
|
||||
given Applicative[List] = sbt.util.ListInstances.listMonad
|
||||
val actual = contMapNMacro[List, Int](12)
|
||||
val actual = uncachedContMapNMacro[List, Int](12)
|
||||
assert(actual == List(12))
|
||||
}
|
||||
|
||||
test("getMap") {
|
||||
given Applicative[List] = sbt.util.ListInstances.listMonad
|
||||
val actual = contMapNMacro[List, Int](ContTest.wrapInit(List(1)) + 2)
|
||||
val actual = uncachedContMapNMacro[List, Int](ContTest.wrapInit(List(1)) + 2)
|
||||
assert(actual == List(3))
|
||||
val actual2 = uncachedContMapNMacro[List, Int](ContTest.wrapInit(List(1)) + 2)
|
||||
assert(actual2 == List(3))
|
||||
}
|
||||
|
||||
test("getMapN") {
|
||||
given Applicative[List] = sbt.util.ListInstances.listMonad
|
||||
val actual = contMapNMacro[List, Int](
|
||||
val actual = uncachedContMapNMacro[List, Int](
|
||||
ContTest.wrapInit(List(1))
|
||||
+ ContTest.wrapInit(List(2)) + 3
|
||||
)
|
||||
assert(actual == List(6))
|
||||
val actual2 = uncachedContMapNMacro[List, Int](
|
||||
ContTest.wrapInit(List(1))
|
||||
+ ContTest.wrapInit(List(2)) + 4
|
||||
)
|
||||
assert(actual2 == List(7))
|
||||
}
|
||||
|
||||
test("getMapN2") {
|
||||
given Applicative[List] = sbt.util.ListInstances.listMonad
|
||||
val actual = contMapNMacro[List, Int]({
|
||||
val actual = uncachedContMapNMacro[List, Int]({
|
||||
val x = ContTest.wrapInit(List(1))
|
||||
val y = ContTest.wrapInit(List(2))
|
||||
x + y + 3
|
||||
|
|
|
|||
|
|
@ -3,19 +3,23 @@ package sbt.internal
|
|||
import sbt.internal.util.Types.Id
|
||||
import sbt.internal.util.appmacro.*
|
||||
import sbt.util.Applicative
|
||||
import sbt.util.{ ActionCacheStore, InMemoryActionCacheStore }
|
||||
import scala.quoted.*
|
||||
import ConvertTestMacro.InputInitConvert
|
||||
|
||||
object ContTestMacro:
|
||||
inline def contMapNMacro[F[_]: Applicative, A](inline expr: A): List[A] =
|
||||
${ contMapNMacroImpl[F, A]('expr) }
|
||||
inline def uncachedContMapNMacro[F[_]: Applicative, A](inline expr: A): List[A] =
|
||||
${ uncachedContMapNMacroImpl[F, A]('expr) }
|
||||
|
||||
def contMapNMacroImpl[F[_]: Type, A: Type](expr: Expr[A])(using
|
||||
def uncachedContMapNMacroImpl[F[_]: Type, A: Type](expr: Expr[A])(using
|
||||
qctx: Quotes
|
||||
): Expr[List[A]] =
|
||||
object ContSyntax extends Cont
|
||||
import ContSyntax.*
|
||||
val convert1: Convert[qctx.type] = new InputInitConvert(qctx)
|
||||
convert1.contMapN[A, List, Id](expr, convert1.summonAppExpr[List], convert1.idTransform)
|
||||
|
||||
convert1.contMapN[A, List, Id](
|
||||
tree = expr,
|
||||
applicativeExpr = convert1.summonAppExpr[List],
|
||||
cacheConfigExpr = None,
|
||||
)
|
||||
end ContTestMacro
|
||||
|
|
|
|||
|
|
@ -1,85 +0,0 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import sbt.internal.inc.{ AnalyzingCompiler, PlainVirtualFile }
|
||||
import sbt.internal.util.ManagedLogger
|
||||
import sbt.util.CacheStoreFactory
|
||||
import sbt.util.Logger
|
||||
import xsbti.Reporter
|
||||
import xsbti.compile.JavaTools
|
||||
import sbt.internal.inc.MappedFileConverter
|
||||
|
||||
object Doc {
|
||||
import RawCompileLike._
|
||||
|
||||
def scaladoc(
|
||||
label: String,
|
||||
cacheStoreFactory: CacheStoreFactory,
|
||||
compiler: AnalyzingCompiler
|
||||
): Gen =
|
||||
scaladoc(label, cacheStoreFactory, compiler, Seq())
|
||||
|
||||
def scaladoc(
|
||||
label: String,
|
||||
cacheStoreFactory: CacheStoreFactory,
|
||||
compiler: AnalyzingCompiler,
|
||||
fileInputOptions: Seq[String]
|
||||
): Gen =
|
||||
cached(
|
||||
cacheStoreFactory,
|
||||
fileInputOptions,
|
||||
prepare(
|
||||
label + " Scala API documentation",
|
||||
(sources, classpath, outputDirectory, options, maxErrors, log) => {
|
||||
compiler.doc(
|
||||
sources map { x =>
|
||||
PlainVirtualFile(x.toPath)
|
||||
},
|
||||
classpath map { x =>
|
||||
PlainVirtualFile(x.toPath)
|
||||
},
|
||||
MappedFileConverter.empty,
|
||||
outputDirectory.toPath,
|
||||
options,
|
||||
maxErrors,
|
||||
log
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
@deprecated("Going away", "1.1.1")
|
||||
def javadoc(
|
||||
label: String,
|
||||
cacheStoreFactory: CacheStoreFactory,
|
||||
doc: JavaTools,
|
||||
log: Logger,
|
||||
reporter: Reporter,
|
||||
): Gen = ???
|
||||
|
||||
@deprecated("Going away", "1.1.1")
|
||||
def javadoc(
|
||||
label: String,
|
||||
cacheStoreFactory: CacheStoreFactory,
|
||||
doc: JavaTools,
|
||||
log: Logger,
|
||||
reporter: Reporter,
|
||||
fileInputOptions: Seq[String],
|
||||
): Gen = ???
|
||||
|
||||
@deprecated("Going away", "1.1.1")
|
||||
val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java")
|
||||
}
|
||||
|
||||
@deprecated("Going away", "1.1.1")
|
||||
sealed trait Doc {
|
||||
@deprecated("Going away", "1.1.1")
|
||||
type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit
|
||||
}
|
||||
|
|
@ -19,13 +19,15 @@ import sbt.ConcurrentRestrictions.Tag
|
|||
import sbt.protocol.testing._
|
||||
import sbt.internal.util.Util.{ AnyOps, none }
|
||||
import sbt.internal.util.{ Terminal => UTerminal }
|
||||
import xsbti.{ FileConverter, HashedVirtualFileRef }
|
||||
|
||||
private[sbt] object ForkTests {
|
||||
def apply(
|
||||
runners: Map[TestFramework, Runner],
|
||||
opts: ProcessedOptions,
|
||||
config: Execution,
|
||||
classpath: Seq[File],
|
||||
classpath: Seq[HashedVirtualFileRef],
|
||||
converter: FileConverter,
|
||||
fork: ForkOptions,
|
||||
log: Logger,
|
||||
tags: (Tag, Int)*
|
||||
|
|
@ -36,10 +38,12 @@ private[sbt] object ForkTests {
|
|||
def all(work: Seq[ClassLoader => Unit]) = work.fork(f => f(dummyLoader))
|
||||
|
||||
val main =
|
||||
if (opts.tests.isEmpty)
|
||||
if opts.tests.isEmpty then
|
||||
constant(TestOutput(TestResult.Passed, Map.empty[String, SuiteResult], Iterable.empty))
|
||||
else
|
||||
mainTestTask(runners, opts, classpath, fork, log, config.parallel).tagw(config.tags: _*)
|
||||
mainTestTask(runners, opts, classpath, converter, fork, log, config.parallel).tagw(
|
||||
config.tags: _*
|
||||
)
|
||||
main.tagw(tags: _*).dependsOn(all(opts.setup): _*) flatMap { results =>
|
||||
all(opts.cleanup).join.map(_ => results)
|
||||
}
|
||||
|
|
@ -49,31 +53,34 @@ private[sbt] object ForkTests {
|
|||
runners: Map[TestFramework, Runner],
|
||||
tests: Vector[TestDefinition],
|
||||
config: Execution,
|
||||
classpath: Seq[File],
|
||||
classpath: Seq[HashedVirtualFileRef],
|
||||
converter: FileConverter,
|
||||
fork: ForkOptions,
|
||||
log: Logger,
|
||||
tags: (Tag, Int)*
|
||||
): Task[TestOutput] = {
|
||||
val opts = processOptions(config, tests, log)
|
||||
apply(runners, opts, config, classpath, fork, log, tags: _*)
|
||||
apply(runners, opts, config, classpath, converter, fork, log, tags: _*)
|
||||
}
|
||||
|
||||
def apply(
|
||||
runners: Map[TestFramework, Runner],
|
||||
tests: Vector[TestDefinition],
|
||||
config: Execution,
|
||||
classpath: Seq[File],
|
||||
classpath: Seq[HashedVirtualFileRef],
|
||||
converter: FileConverter,
|
||||
fork: ForkOptions,
|
||||
log: Logger,
|
||||
tag: Tag
|
||||
): Task[TestOutput] = {
|
||||
apply(runners, tests, config, classpath, fork, log, tag -> 1)
|
||||
apply(runners, tests, config, classpath, converter, fork, log, tag -> 1)
|
||||
}
|
||||
|
||||
private[this] def mainTestTask(
|
||||
runners: Map[TestFramework, Runner],
|
||||
opts: ProcessedOptions,
|
||||
classpath: Seq[File],
|
||||
classpath: Seq[HashedVirtualFileRef],
|
||||
converter: FileConverter,
|
||||
fork: ForkOptions,
|
||||
log: Logger,
|
||||
parallel: Boolean
|
||||
|
|
@ -148,8 +155,8 @@ private[sbt] object ForkTests {
|
|||
testListeners.foreach(_.doInit())
|
||||
val acceptorThread = new Thread(Acceptor)
|
||||
acceptorThread.start()
|
||||
|
||||
val fullCp = classpath ++ Seq(
|
||||
val cpFiles = classpath.map(converter.toPath).map(_.toFile())
|
||||
val fullCp = cpFiles ++ Seq(
|
||||
IO.classLocationPath[ForkMain].toFile,
|
||||
IO.classLocationPath[Framework].toFile
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,236 +0,0 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.time.OffsetDateTime
|
||||
import java.util.jar.{ Attributes, Manifest }
|
||||
import scala.collection.JavaConverters._
|
||||
import sbt.io.IO
|
||||
|
||||
import sjsonnew.JsonFormat
|
||||
|
||||
import sbt.util.Logger
|
||||
|
||||
import sbt.util.{ CacheStoreFactory, FilesInfo, ModifiedFileInfo, PlainFileInfo }
|
||||
import sbt.util.FileInfo.{ exists, lastModified }
|
||||
import sbt.util.CacheImplicits._
|
||||
import sbt.util.Tracked.{ inputChanged, outputChanged }
|
||||
import scala.sys.process.Process
|
||||
|
||||
sealed trait PackageOption
|
||||
|
||||
/**
|
||||
* == Package ==
|
||||
*
|
||||
* This module provides an API to package jar files.
|
||||
*
|
||||
* @see [[https://docs.oracle.com/javase/tutorial/deployment/jar/index.html]]
|
||||
*/
|
||||
object Package {
|
||||
final case class JarManifest(m: Manifest) extends PackageOption {
|
||||
assert(m != null)
|
||||
}
|
||||
final case class MainClass(mainClassName: String) extends PackageOption
|
||||
final case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption
|
||||
def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = {
|
||||
val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value)
|
||||
new ManifestAttributes(converted: _*)
|
||||
}
|
||||
// 2010-01-01
|
||||
private val default2010Timestamp: Long = 1262304000000L
|
||||
final case class FixedTimestamp(value: Option[Long]) extends PackageOption
|
||||
val keepTimestamps: Option[Long] = None
|
||||
val fixed2010Timestamp: Option[Long] = Some(default2010Timestamp)
|
||||
def gitCommitDateTimestamp: Option[Long] =
|
||||
try {
|
||||
Some(
|
||||
OffsetDateTime
|
||||
.parse(Process("git show -s --format=%cI").!!.trim)
|
||||
.toInstant()
|
||||
.toEpochMilli()
|
||||
)
|
||||
} catch {
|
||||
case e: Exception if e.getMessage.startsWith("Nonzero") =>
|
||||
sys.error(
|
||||
s"git repository was expected for package timestamp; use Package.fixed2010Timestamp or Package.keepTimestamps instead"
|
||||
)
|
||||
}
|
||||
def setFixedTimestamp(value: Option[Long]): PackageOption =
|
||||
FixedTimestamp(value)
|
||||
|
||||
/** by default we overwrite all timestamps in JAR to epoch time 2010-01-01 for repeatable build */
|
||||
lazy val defaultTimestamp: Option[Long] =
|
||||
sys.env
|
||||
.get("SOURCE_DATE_EPOCH")
|
||||
.map(_.toLong * 1000)
|
||||
.orElse(Some(default2010Timestamp))
|
||||
|
||||
def timeFromConfiguration(config: Configuration): Option[Long] =
|
||||
(config.options.collect { case t: FixedTimestamp => t }).headOption match {
|
||||
case Some(FixedTimestamp(value)) => value
|
||||
case _ => defaultTimestamp
|
||||
}
|
||||
|
||||
def mergeAttributes(a1: Attributes, a2: Attributes) = a1.asScala ++= a2.asScala
|
||||
// merges `mergeManifest` into `manifest` (mutating `manifest` in the process)
|
||||
def mergeManifests(manifest: Manifest, mergeManifest: Manifest): Unit = {
|
||||
mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes)
|
||||
val entryMap = manifest.getEntries.asScala
|
||||
for ((key, value) <- mergeManifest.getEntries.asScala) {
|
||||
entryMap.get(key) match {
|
||||
case Some(attributes) => mergeAttributes(attributes, value); ()
|
||||
case None => entryMap.put(key, value); ()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The jar package configuration. Contains all relevant information to create a jar file.
|
||||
*
|
||||
* @param sources the jar contents
|
||||
* @param jar the destination jar file
|
||||
* @param options additional package information, e.g. jar manifest, main class or manifest attributes
|
||||
*/
|
||||
final class Configuration(
|
||||
val sources: Seq[(File, String)],
|
||||
val jar: File,
|
||||
val options: Seq[PackageOption]
|
||||
)
|
||||
|
||||
/**
|
||||
* @param conf the package configuration that should be build
|
||||
* @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible
|
||||
* @param log feedback for the user
|
||||
*/
|
||||
def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit =
|
||||
apply(conf, cacheStoreFactory, log, timeFromConfiguration(conf))
|
||||
|
||||
/**
|
||||
* @param conf the package configuration that should be build
|
||||
* @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible
|
||||
* @param log feedback for the user
|
||||
* @param time static timestamp to use for all entries, if any.
|
||||
*/
|
||||
def apply(
|
||||
conf: Configuration,
|
||||
cacheStoreFactory: CacheStoreFactory,
|
||||
log: Logger,
|
||||
time: Option[Long]
|
||||
): Unit = {
|
||||
val manifest = new Manifest
|
||||
val main = manifest.getMainAttributes
|
||||
for (option <- conf.options) {
|
||||
option match {
|
||||
case JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); ()
|
||||
case MainClass(mainClassName) => main.put(Attributes.Name.MAIN_CLASS, mainClassName); ()
|
||||
case ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; ()
|
||||
case FixedTimestamp(value) => ()
|
||||
case _ => log.warn("Ignored unknown package option " + option)
|
||||
}
|
||||
}
|
||||
setVersion(main)
|
||||
|
||||
type Inputs = (Seq[(File, String)], FilesInfo[ModifiedFileInfo], Manifest)
|
||||
val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") {
|
||||
(inChanged, inputs: Inputs) =>
|
||||
import exists.format
|
||||
val (sources, _, manifest) = inputs
|
||||
outputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) =>
|
||||
if (inChanged || outChanged) {
|
||||
makeJar(sources, jar.file, manifest, log, time)
|
||||
jar.file
|
||||
()
|
||||
} else log.debug("Jar uptodate: " + jar.file)
|
||||
}
|
||||
}
|
||||
|
||||
val inputFiles = conf.sources.map(_._1).toSet
|
||||
val inputs = (conf.sources.distinct, lastModified(inputFiles), manifest)
|
||||
cachedMakeJar(inputs)(() => exists(conf.jar))
|
||||
()
|
||||
}
|
||||
|
||||
/**
|
||||
* updates the manifest version is there is none present.
|
||||
*
|
||||
* @param main the current jar attributes
|
||||
*/
|
||||
def setVersion(main: Attributes): Unit = {
|
||||
val version = Attributes.Name.MANIFEST_VERSION
|
||||
if (main.getValue(version) eq null) {
|
||||
main.put(version, "1.0")
|
||||
()
|
||||
}
|
||||
}
|
||||
def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = {
|
||||
import Attributes.Name._
|
||||
val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR)
|
||||
val attribVals = Seq(name, version, orgName)
|
||||
ManifestAttributes(attribKeys zip attribVals: _*)
|
||||
}
|
||||
def addImplManifestAttributes(
|
||||
name: String,
|
||||
version: String,
|
||||
homepage: Option[java.net.URL],
|
||||
org: String,
|
||||
orgName: String
|
||||
): PackageOption = {
|
||||
import Attributes.Name._
|
||||
|
||||
// The ones in Attributes.Name are deprecated saying:
|
||||
// "Extension mechanism will be removed in a future release. Use class path instead."
|
||||
val IMPLEMENTATION_VENDOR_ID = new Attributes.Name("Implementation-Vendor-Id")
|
||||
val IMPLEMENTATION_URL = new Attributes.Name("Implementation-URL")
|
||||
|
||||
val attribKeys = Seq(
|
||||
IMPLEMENTATION_TITLE,
|
||||
IMPLEMENTATION_VERSION,
|
||||
IMPLEMENTATION_VENDOR,
|
||||
IMPLEMENTATION_VENDOR_ID,
|
||||
)
|
||||
val attribVals = Seq(name, version, orgName, org)
|
||||
ManifestAttributes((attribKeys zip attribVals) ++ {
|
||||
homepage map (h => (IMPLEMENTATION_URL, h.toString))
|
||||
}: _*)
|
||||
}
|
||||
|
||||
@deprecated("Specify whether to use a static timestamp", "1.4.0")
|
||||
def makeJar(sources: Seq[(File, String)], jar: File, manifest: Manifest, log: Logger): Unit =
|
||||
makeJar(sources, jar, manifest, log, None)
|
||||
|
||||
def makeJar(
|
||||
sources: Seq[(File, String)],
|
||||
jar: File,
|
||||
manifest: Manifest,
|
||||
log: Logger,
|
||||
time: Option[Long]
|
||||
): Unit = {
|
||||
val path = jar.getAbsolutePath
|
||||
log.debug("Packaging " + path + " ...")
|
||||
if (jar.exists)
|
||||
if (jar.isFile)
|
||||
IO.delete(jar)
|
||||
else
|
||||
sys.error(path + " exists, but is not a regular file")
|
||||
log.debug(sourcesDebugString(sources))
|
||||
IO.jar(sources, jar, manifest, time)
|
||||
log.debug("Done packaging.")
|
||||
}
|
||||
def sourcesDebugString(sources: Seq[(File, String)]): String =
|
||||
"Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t"))
|
||||
|
||||
implicit def manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]](
|
||||
m => {
|
||||
val bos = new java.io.ByteArrayOutputStream()
|
||||
m write bos
|
||||
bos.toByteArray
|
||||
},
|
||||
bs => new Manifest(new java.io.ByteArrayInputStream(bs))
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,338 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.time.OffsetDateTime
|
||||
import java.util.jar.{ Attributes, Manifest }
|
||||
import scala.collection.JavaConverters._
|
||||
import sbt.io.IO
|
||||
|
||||
import sjsonnew.{
|
||||
:*:,
|
||||
Builder,
|
||||
IsoLList,
|
||||
JsonFormat,
|
||||
LList,
|
||||
LNil,
|
||||
Unbuilder,
|
||||
deserializationError,
|
||||
flatUnionFormat4
|
||||
}
|
||||
|
||||
import sbt.util.Logger
|
||||
import sbt.util.{ CacheStoreFactory, FilesInfo, ModifiedFileInfo, PlainFileInfo }
|
||||
import sbt.util.FileInfo.{ exists, lastModified }
|
||||
import sbt.util.CacheImplicits._
|
||||
import sbt.util.Tracked.{ inputChanged, outputChanged }
|
||||
import scala.sys.process.Process
|
||||
import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile, VirtualFileRef }
|
||||
|
||||
/**
|
||||
* == Package ==
|
||||
*
|
||||
* This module provides an API to package jar files.
|
||||
*
|
||||
* @see [[https://docs.oracle.com/javase/tutorial/deployment/jar/index.html]]
|
||||
*/
|
||||
object Pkg:
|
||||
def JarManifest(m: Manifest) = PackageOption.JarManifest(m)
|
||||
def MainClass(mainClassName: String) = PackageOption.MainClass(mainClassName)
|
||||
def MainfestAttributes(attributes: (Attributes.Name, String)*) =
|
||||
PackageOption.ManifestAttributes(attributes: _*)
|
||||
def ManifestAttributes(attributes: (String, String)*) = {
|
||||
val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value)
|
||||
PackageOption.ManifestAttributes(converted: _*)
|
||||
}
|
||||
// 2010-01-01
|
||||
private val default2010Timestamp: Long = 1262304000000L
|
||||
def FixedTimestamp(value: Option[Long]) = PackageOption.FixedTimestamp(value)
|
||||
val keepTimestamps: Option[Long] = None
|
||||
val fixed2010Timestamp: Option[Long] = Some(default2010Timestamp)
|
||||
def gitCommitDateTimestamp: Option[Long] =
|
||||
try {
|
||||
Some(
|
||||
OffsetDateTime
|
||||
.parse(Process("git show -s --format=%cI").!!.trim)
|
||||
.toInstant()
|
||||
.toEpochMilli()
|
||||
)
|
||||
} catch {
|
||||
case e: Exception if e.getMessage.startsWith("Nonzero") =>
|
||||
sys.error(
|
||||
s"git repository was expected for package timestamp; use Package.fixed2010Timestamp or Package.keepTimestamps instead"
|
||||
)
|
||||
}
|
||||
def setFixedTimestamp(value: Option[Long]): PackageOption =
|
||||
FixedTimestamp(value)
|
||||
|
||||
/** by default we overwrite all timestamps in JAR to epoch time 2010-01-01 for repeatable build */
|
||||
lazy val defaultTimestamp: Option[Long] =
|
||||
sys.env
|
||||
.get("SOURCE_DATE_EPOCH")
|
||||
.map(_.toLong * 1000)
|
||||
.orElse(Some(default2010Timestamp))
|
||||
|
||||
def timeFromConfiguration(config: Configuration): Option[Long] =
|
||||
(config.options.collect { case t: PackageOption.FixedTimestamp => t }).headOption match
|
||||
case Some(PackageOption.FixedTimestamp(value)) => value
|
||||
case _ => defaultTimestamp
|
||||
|
||||
def mergeAttributes(a1: Attributes, a2: Attributes) = a1.asScala ++= a2.asScala
|
||||
// merges `mergeManifest` into `manifest` (mutating `manifest` in the process)
|
||||
def mergeManifests(manifest: Manifest, mergeManifest: Manifest): Unit = {
|
||||
mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes)
|
||||
val entryMap = manifest.getEntries.asScala
|
||||
for ((key, value) <- mergeManifest.getEntries.asScala) {
|
||||
entryMap.get(key) match {
|
||||
case Some(attributes) => mergeAttributes(attributes, value); ()
|
||||
case None => entryMap.put(key, value); ()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The jar package configuration. Contains all relevant information to create a jar file.
|
||||
*
|
||||
* @param sources the jar contents
|
||||
* @param jar the destination jar file
|
||||
* @param options additional package information, e.g. jar manifest, main class or manifest attributes
|
||||
*/
|
||||
final class Configuration(
|
||||
val sources: Seq[(HashedVirtualFileRef, String)],
|
||||
val jar: VirtualFileRef,
|
||||
val options: Seq[PackageOption]
|
||||
)
|
||||
|
||||
object Configuration:
|
||||
given IsoLList.Aux[
|
||||
Configuration,
|
||||
Vector[(HashedVirtualFileRef, String)] :*: VirtualFileRef :*: Seq[PackageOption] :*: LNil
|
||||
] =
|
||||
import sbt.util.CacheImplicits.given
|
||||
import sbt.util.PathHashWriters.given
|
||||
LList.iso(
|
||||
(c: Configuration) =>
|
||||
("sources", c.sources.toVector) :*: ("jar", c.jar) :*: ("options", c.options) :*: LNil,
|
||||
(in: Vector[(HashedVirtualFileRef, String)] :*: VirtualFileRef :*: Seq[PackageOption] :*:
|
||||
LNil) => Configuration(in.head, in.tail.head, in.tail.tail.head),
|
||||
)
|
||||
given JsonFormat[Configuration] = summon[JsonFormat[Configuration]]
|
||||
end Configuration
|
||||
|
||||
/**
|
||||
* @param conf the package configuration that should be build
|
||||
* @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible
|
||||
* @param log feedback for the user
|
||||
*/
|
||||
def apply(conf: Configuration, converter: FileConverter, log: Logger): VirtualFile =
|
||||
apply(conf, converter, log, timeFromConfiguration(conf))
|
||||
|
||||
/**
|
||||
* @param conf the package configuration that should be build
|
||||
* @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible
|
||||
* @param log feedback for the user
|
||||
* @param time static timestamp to use for all entries, if any.
|
||||
*/
|
||||
def apply(
|
||||
conf: Configuration,
|
||||
converter: FileConverter,
|
||||
log: Logger,
|
||||
time: Option[Long]
|
||||
): VirtualFile =
|
||||
val manifest = toManifest(conf, log)
|
||||
val out = converter.toPath(conf.jar).toFile()
|
||||
val sources = conf.sources.map { case (vf, path) =>
|
||||
converter.toPath(vf).toFile() -> path
|
||||
}
|
||||
makeJar(sources, out, manifest, log, time)
|
||||
converter.toVirtualFile(out.toPath())
|
||||
|
||||
def toManifest(conf: Configuration, log: Logger): Manifest =
|
||||
val manifest = new Manifest
|
||||
val main = manifest.getMainAttributes
|
||||
for option <- conf.options do
|
||||
option match
|
||||
case PackageOption.JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); ()
|
||||
case PackageOption.MainClass(mainClassName) =>
|
||||
main.put(Attributes.Name.MAIN_CLASS, mainClassName); ()
|
||||
case PackageOption.ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; ()
|
||||
case PackageOption.FixedTimestamp(value) => ()
|
||||
case _ => log.warn("Ignored unknown package option " + option)
|
||||
setVersion(main)
|
||||
manifest
|
||||
|
||||
/**
|
||||
* updates the manifest version is there is none present.
|
||||
*
|
||||
* @param main the current jar attributes
|
||||
*/
|
||||
def setVersion(main: Attributes): Unit = {
|
||||
val version = Attributes.Name.MANIFEST_VERSION
|
||||
if (main.getValue(version) eq null) {
|
||||
main.put(version, "1.0")
|
||||
()
|
||||
}
|
||||
}
|
||||
def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = {
|
||||
import Attributes.Name._
|
||||
val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR)
|
||||
val attribVals = Seq(name, version, orgName)
|
||||
PackageOption.ManifestAttributes(attribKeys.zip(attribVals): _*)
|
||||
}
|
||||
def addImplManifestAttributes(
|
||||
name: String,
|
||||
version: String,
|
||||
homepage: Option[java.net.URL],
|
||||
org: String,
|
||||
orgName: String
|
||||
): PackageOption = {
|
||||
import Attributes.Name._
|
||||
|
||||
// The ones in Attributes.Name are deprecated saying:
|
||||
// "Extension mechanism will be removed in a future release. Use class path instead."
|
||||
val IMPLEMENTATION_VENDOR_ID = new Attributes.Name("Implementation-Vendor-Id")
|
||||
val IMPLEMENTATION_URL = new Attributes.Name("Implementation-URL")
|
||||
|
||||
val attribKeys = Seq(
|
||||
IMPLEMENTATION_TITLE,
|
||||
IMPLEMENTATION_VERSION,
|
||||
IMPLEMENTATION_VENDOR,
|
||||
IMPLEMENTATION_VENDOR_ID,
|
||||
)
|
||||
val attribVals = Seq(name, version, orgName, org)
|
||||
PackageOption.ManifestAttributes(attribKeys.zip(attribVals) ++ {
|
||||
homepage map (h => (IMPLEMENTATION_URL, h.toString))
|
||||
}: _*)
|
||||
}
|
||||
|
||||
def makeJar(
|
||||
sources: Seq[(File, String)],
|
||||
jar: File,
|
||||
manifest: Manifest,
|
||||
log: Logger,
|
||||
time: Option[Long]
|
||||
): Unit = {
|
||||
val path = jar.getAbsolutePath
|
||||
log.debug("Packaging " + path + " ...")
|
||||
if (jar.exists)
|
||||
if (jar.isFile)
|
||||
IO.delete(jar)
|
||||
else
|
||||
sys.error(path + " exists, but is not a regular file")
|
||||
log.debug(sourcesDebugString(sources))
|
||||
IO.jar(sources, jar, manifest, time)
|
||||
log.debug("Done packaging.")
|
||||
}
|
||||
def sourcesDebugString(sources: Seq[(File, String)]): String =
|
||||
"Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t"))
|
||||
|
||||
given manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]](
|
||||
m => {
|
||||
val bos = new java.io.ByteArrayOutputStream()
|
||||
m write bos
|
||||
bos.toByteArray
|
||||
},
|
||||
bs => new Manifest(new java.io.ByteArrayInputStream(bs))
|
||||
)
|
||||
end Pkg
|
||||
|
||||
enum PackageOption:
|
||||
case JarManifest(m: Manifest)
|
||||
case MainClass(mainClassName: String)
|
||||
case ManifestAttributes(attributes: (Attributes.Name, String)*)
|
||||
case FixedTimestamp(value: Option[Long])
|
||||
|
||||
object PackageOption:
|
||||
import Pkg.manifestFormat
|
||||
|
||||
private given jarManifestFormat: JsonFormat[PackageOption.JarManifest] =
|
||||
new JsonFormat[PackageOption.JarManifest]:
|
||||
override def read[J](
|
||||
jsOpt: Option[J],
|
||||
unbuilder: Unbuilder[J]
|
||||
): PackageOption.JarManifest =
|
||||
jsOpt match
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val m = unbuilder.readField[Manifest]("m")
|
||||
unbuilder.endObject()
|
||||
PackageOption.JarManifest(m)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
override def write[J](obj: PackageOption.JarManifest, builder: Builder[J]): Unit =
|
||||
builder.beginObject()
|
||||
builder.addField("m", obj.m)
|
||||
builder.endObject()
|
||||
|
||||
private given mainClassFormat: JsonFormat[PackageOption.MainClass] =
|
||||
new JsonFormat[PackageOption.MainClass]:
|
||||
override def read[J](
|
||||
jsOpt: Option[J],
|
||||
unbuilder: Unbuilder[J]
|
||||
): PackageOption.MainClass =
|
||||
jsOpt match
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val mainClassName = unbuilder.readField[String]("mainClassName")
|
||||
unbuilder.endObject()
|
||||
PackageOption.MainClass(mainClassName)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
override def write[J](obj: PackageOption.MainClass, builder: Builder[J]): Unit =
|
||||
builder.beginObject()
|
||||
builder.addField("mainClassName", obj.mainClassName)
|
||||
builder.endObject()
|
||||
|
||||
private given manifestAttributesFormat: JsonFormat[PackageOption.ManifestAttributes] =
|
||||
new JsonFormat[PackageOption.ManifestAttributes]:
|
||||
override def read[J](
|
||||
jsOpt: Option[J],
|
||||
unbuilder: Unbuilder[J]
|
||||
): PackageOption.ManifestAttributes =
|
||||
jsOpt match
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val attributes = unbuilder.readField[Vector[(String, String)]]("attributes")
|
||||
unbuilder.endObject()
|
||||
PackageOption.ManifestAttributes(attributes.map { case (k, v) =>
|
||||
Attributes.Name(k) -> v
|
||||
}: _*)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
override def write[J](obj: PackageOption.ManifestAttributes, builder: Builder[J]): Unit =
|
||||
builder.beginObject()
|
||||
builder.addField(
|
||||
"attributes",
|
||||
obj.attributes.toVector.map { case (k, v) => k.toString -> v }
|
||||
)
|
||||
builder.endObject()
|
||||
|
||||
private given fixedTimeStampFormat: JsonFormat[PackageOption.FixedTimestamp] =
|
||||
new JsonFormat[PackageOption.FixedTimestamp]:
|
||||
override def read[J](
|
||||
jsOpt: Option[J],
|
||||
unbuilder: Unbuilder[J]
|
||||
): PackageOption.FixedTimestamp =
|
||||
jsOpt match
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val value = unbuilder.readField[Option[Long]]("value")
|
||||
unbuilder.endObject()
|
||||
PackageOption.FixedTimestamp(value)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
override def write[J](obj: PackageOption.FixedTimestamp, builder: Builder[J]): Unit =
|
||||
builder.beginObject()
|
||||
builder.addField("value", obj.value)
|
||||
builder.endObject()
|
||||
|
||||
given JsonFormat[PackageOption] = flatUnionFormat4[
|
||||
PackageOption,
|
||||
PackageOption.JarManifest,
|
||||
PackageOption.MainClass,
|
||||
PackageOption.ManifestAttributes,
|
||||
PackageOption.FixedTimestamp,
|
||||
]("type")
|
||||
end PackageOption
|
||||
|
|
@ -8,13 +8,13 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
|
||||
import java.nio.file.Path
|
||||
import sbt.internal.inc.classpath.{ ClassLoaderCache => IncClassLoaderCache }
|
||||
import sbt.internal.classpath.ClassLoaderCache
|
||||
import sbt.internal.server.ServerHandler
|
||||
import sbt.internal.util.AttributeKey
|
||||
import sbt.librarymanagement.ModuleID
|
||||
import sbt.util.Level
|
||||
import sbt.util.{ ActionCacheStore, Level }
|
||||
import scala.annotation.nowarn
|
||||
import scala.concurrent.duration.FiniteDuration
|
||||
import xsbti.VirtualFile
|
||||
|
|
@ -106,6 +106,20 @@ object BasicKeys {
|
|||
10000
|
||||
)
|
||||
|
||||
val cacheStores =
|
||||
AttributeKey[Seq[ActionCacheStore]](
|
||||
"cacheStores",
|
||||
"Cache backends",
|
||||
10000
|
||||
)
|
||||
|
||||
val rootOutputDirectory =
|
||||
AttributeKey[Path](
|
||||
"rootOutputDirectory",
|
||||
"Build-wide output directory",
|
||||
10000
|
||||
)
|
||||
|
||||
// Unlike other BasicKeys, this is not used directly as a setting key,
|
||||
// and severLog / logLevel is used instead.
|
||||
private[sbt] val serverLogLevel =
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import sbt.Def.{ Classpath, Initialize }
|
|||
import sbt.internal.io.Source
|
||||
import sbt.internal.util.Attributed
|
||||
import sbt.io.{ AllPassFilter, NothingFilter }
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
object Append:
|
||||
@implicitNotFound("No Append.Value[${A1}, ${A2}] found, so ${A2} cannot be appended to ${A1}")
|
||||
|
|
@ -67,11 +68,11 @@ object Append:
|
|||
implicit def appendLong: Value[Long, Long] = _ + _
|
||||
implicit def appendDouble: Value[Double, Double] = _ + _
|
||||
|
||||
implicit def appendClasspath: Sequence[Classpath, Seq[File], File] =
|
||||
new Sequence[Classpath, Seq[File], File] {
|
||||
def appendValues(a: Classpath, b: Seq[File]): Classpath = a ++ Attributed.blankSeq(b)
|
||||
def appendValue(a: Classpath, b: File): Classpath = a :+ Attributed.blank(b)
|
||||
}
|
||||
given Sequence[Classpath, Seq[HashedVirtualFileRef], HashedVirtualFileRef] with
|
||||
override def appendValues(a: Classpath, b: Seq[HashedVirtualFileRef]): Classpath =
|
||||
a ++ Attributed.blankSeq(b)
|
||||
override def appendValue(a: Classpath, b: HashedVirtualFileRef): Classpath =
|
||||
a :+ Attributed.blank(b)
|
||||
|
||||
implicit def appendSet[T, V <: T]: Sequence[Set[T], Set[V], V] =
|
||||
new Sequence[Set[T], Set[V], V] {
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.Path
|
||||
import java.net.URI
|
||||
|
||||
import scala.annotation.compileTimeOnly
|
||||
|
|
@ -18,15 +19,15 @@ import sbt.Scope.{ GlobalScope, ThisScope }
|
|||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util.{ Terminal => ITerminal, * }
|
||||
import sbt.util.{ ActionCacheStore, AggregateActionCacheStore, BuildWideCacheConfiguration, InMemoryActionCacheStore }
|
||||
import Util._
|
||||
import sbt.util.Show
|
||||
import xsbti.VirtualFile
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFile }
|
||||
import sjsonnew.JsonFormat
|
||||
|
||||
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
|
||||
object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
||||
type Classpath = Seq[Attributed[File]]
|
||||
type VirtualClasspath = Seq[Attributed[VirtualFile]]
|
||||
type Classpath = Seq[Attributed[HashedVirtualFileRef]]
|
||||
|
||||
def settings(ss: SettingsDefinition*): Seq[Setting[_]] = ss.flatMap(_.settings)
|
||||
|
||||
|
|
@ -229,8 +230,21 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
|
||||
import language.experimental.macros
|
||||
|
||||
// These are here, as opposed to RemoteCahe, since we need them from TaskMacro etc
|
||||
private[sbt] var _cacheStore: ActionCacheStore = InMemoryActionCacheStore()
|
||||
def cacheStore: ActionCacheStore = _cacheStore
|
||||
private[sbt] var _outputDirectory: Option[Path] = None
|
||||
def cacheConfiguration: BuildWideCacheConfiguration =
|
||||
BuildWideCacheConfiguration(
|
||||
_cacheStore,
|
||||
_outputDirectory.getOrElse(sys.error("outputDirectory has not been set")),
|
||||
)
|
||||
|
||||
inline def cachedTask[A1: JsonFormat](inline a1: A1): Def.Initialize[Task[A1]] =
|
||||
${ TaskMacro.taskMacroImpl[A1]('a1, cached = true) }
|
||||
|
||||
inline def task[A1](inline a1: A1): Def.Initialize[Task[A1]] =
|
||||
${ TaskMacro.taskMacroImpl[A1]('a1) }
|
||||
${ TaskMacro.taskMacroImpl[A1]('a1, cached = false) }
|
||||
|
||||
inline def taskDyn[A1](inline a1: Def.Initialize[Task[A1]]): Def.Initialize[Task[A1]] =
|
||||
${ TaskMacro.taskDynMacroImpl[A1]('a1) }
|
||||
|
|
@ -247,7 +261,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
${ InputTaskMacro.inputTaskMacroImpl[A1]('a) }
|
||||
|
||||
inline def taskIf[A1](inline a: A1): Def.Initialize[Task[A1]] =
|
||||
${ TaskMacro.taskIfImpl[A1]('a) }
|
||||
${ TaskMacro.taskIfImpl[A1]('a, cached = true) }
|
||||
|
||||
private[sbt] def selectITask[A1, A2](
|
||||
fab: Initialize[Task[Either[A1, A2]]],
|
||||
|
|
@ -289,6 +303,10 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
*/
|
||||
def promise[A]: PromiseWrap[A] = new PromiseWrap[A]()
|
||||
|
||||
inline def declareOutput(inline vf: VirtualFile): Unit =
|
||||
InputWrapper.`wrapOutput_\u2603\u2603`[VirtualFile](vf)
|
||||
|
||||
|
||||
// The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to
|
||||
// be used in task and setting macros as inputs with an ultimate result of type T
|
||||
|
||||
|
|
|
|||
|
|
@ -468,7 +468,7 @@ object Scoped:
|
|||
|
||||
/** Internal function for the task macro. */
|
||||
inline def taskMacro[A](inline a: A): Initialize[Task[A]] =
|
||||
${ TaskMacro.taskMacroImpl[A]('a) }
|
||||
${ TaskMacro.taskMacroImpl[A]('a, cached = false) }
|
||||
|
||||
private[sbt] inline def :==(app: A1): Setting[Task[A1]] =
|
||||
set(Def.valueStrict(std.TaskExtra.constant(app)))
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ class FullConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart:
|
|||
case InputWrapper.WrapPreviousName => Converted.success(in)
|
||||
case InputWrapper.WrapInitName => wrapInit[A](in)
|
||||
case InputWrapper.WrapTaskName => wrapTask[A](in)
|
||||
case InputWrapper.WrapOutputName => Converted.success(in)
|
||||
case _ => Converted.NotApplicable()
|
||||
|
||||
private def wrapInit[A: Type](tree: Term): Converted =
|
||||
|
|
|
|||
|
|
@ -98,7 +98,7 @@ object InputTaskMacro:
|
|||
|
||||
val inner: convert1.TermTransform[F1] = (in: Term) => f(in.asExprOf[A1]).asTerm
|
||||
val cond = conditionInputTaskTree(tree.asTerm).asExprOf[A1]
|
||||
convert1.contMapN[A1, Def.Initialize, F1](cond, convert1.appExpr, inner)
|
||||
convert1.contMapN[A1, Def.Initialize, F1](cond, convert1.appExpr, None, inner)
|
||||
|
||||
private[this] def iParserMacro[F1[_]: Type, A1: Type](tree: Expr[A1])(
|
||||
f: Expr[A1] => Expr[F1[A1]]
|
||||
|
|
@ -106,13 +106,12 @@ object InputTaskMacro:
|
|||
import qctx.reflect.*
|
||||
val convert1 = new ParserConvert(qctx, 1000)
|
||||
val inner: convert1.TermTransform[F1] = (in: Term) => f(in.asExprOf[A1]).asTerm
|
||||
convert1.contMapN[A1, ParserInstance.F1, F1](tree, convert1.appExpr, inner)
|
||||
convert1.contMapN[A1, ParserInstance.F1, F1](tree, convert1.appExpr, None, inner)
|
||||
|
||||
private[this] def iTaskMacro[A1: Type](tree: Expr[A1])(using qctx: Quotes): Expr[Task[A1]] =
|
||||
import qctx.reflect.*
|
||||
val convert1 = new TaskConvert(qctx, 2000)
|
||||
convert1.contMapN[A1, Task, Id](tree, convert1.appExpr)
|
||||
|
||||
convert1.contMapN[A1, Task, Id](tree, convert1.appExpr, None)
|
||||
/*
|
||||
private[this] def inputTaskDynMacro0[A1: Type](
|
||||
expr: Expr[Def.Initialize[Task[A1]]]
|
||||
|
|
@ -233,7 +232,11 @@ object InputTaskMacro:
|
|||
val p0 = params.head.asInstanceOf[Ident]
|
||||
val body2 =
|
||||
convert1
|
||||
.contFlatMap[A2, TaskMacro.F, Id](body.asExprOf[TaskMacro.F[A2]], convert1.appExpr)
|
||||
.contFlatMap[A2, TaskMacro.F, Id](
|
||||
body.asExprOf[TaskMacro.F[A2]],
|
||||
convert1.appExpr,
|
||||
None,
|
||||
)
|
||||
.asTerm
|
||||
object refTransformer extends TreeMap:
|
||||
override def transformTerm(tree: Term)(owner: Symbol): Term =
|
||||
|
|
@ -26,6 +26,7 @@ object InputWrapper:
|
|||
|
||||
private[std] final val WrapTaskName = "wrapTask_\u2603\u2603"
|
||||
private[std] final val WrapInitName = "wrapInit_\u2603\u2603"
|
||||
private[std] final val WrapOutputName = "wrapOutput_\u2603\u2603"
|
||||
private[std] final val WrapInitTaskName = "wrapInitTask_\u2603\u2603"
|
||||
private[std] final val WrapInitInputName = "wrapInitInputTask_\u2603\u2603"
|
||||
private[std] final val WrapInputName = "wrapInputTask_\u2603\u2603"
|
||||
|
|
@ -41,6 +42,11 @@ object InputWrapper:
|
|||
)
|
||||
def `wrapInit_\u2603\u2603`[T](@deprecated("unused", "") in: Any): T = implDetailError
|
||||
|
||||
@compileTimeOnly(
|
||||
"`declareOutput` can only be used within a task macro, such as Def.cachedTask."
|
||||
)
|
||||
def `wrapOutput_\u2603\u2603`[A](@deprecated("unused", "") in: Any): A = implDetailError
|
||||
|
||||
@compileTimeOnly(
|
||||
"`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
|
||||
)
|
||||
|
|
|
|||
|
|
@ -47,16 +47,16 @@ object SettingMacro:
|
|||
|
||||
def settingMacroImpl[A1: Type](in: Expr[A1])(using qctx: Quotes): Expr[Initialize[A1]] =
|
||||
val convert1 = InitializeConvert(qctx, 0)
|
||||
convert1.contMapN[A1, F, Id](in, convert1.appExpr)
|
||||
convert1.contMapN[A1, F, Id](in, convert1.appExpr, None)
|
||||
|
||||
def settingDynImpl[A1: Type](in: Expr[Initialize[A1]])(using qctx: Quotes): Expr[Initialize[A1]] =
|
||||
val convert1 = InitializeConvert(qctx, 0)
|
||||
convert1.contFlatMap[A1, F, Id](in, convert1.appExpr)
|
||||
convert1.contFlatMap[A1, F, Id](in, convert1.appExpr, None)
|
||||
|
||||
def inputMacroImpl[A1: Type](in: Expr[State => Parser[A1]])(using
|
||||
qctx: Quotes
|
||||
): Expr[ParserGen[A1]] =
|
||||
val convert1 = InitializeConvert(qctx, 0)
|
||||
val init1 = convert1.contMapN[State => Parser[A1], F, Id](in, convert1.appExpr)
|
||||
val init1 = convert1.contMapN[State => Parser[A1], F, Id](in, convert1.appExpr, None)
|
||||
'{ ParserGen[A1]($init1) }
|
||||
end SettingMacro
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ package sbt
|
|||
package std
|
||||
|
||||
import Def.{ Initialize, Setting }
|
||||
import sbt.util.{ Applicative, Monad }
|
||||
import sbt.util.{ ActionCacheStore, Applicative, Monad }
|
||||
import sbt.internal.util.Types.Id
|
||||
import sbt.internal.util.appmacro.{
|
||||
Cont,
|
||||
|
|
@ -27,7 +27,7 @@ import language.experimental.macros
|
|||
import scala.annotation.tailrec
|
||||
import scala.reflect.internal.util.UndefinedPosition
|
||||
import scala.quoted.*
|
||||
import sjsonnew.JsonFormat
|
||||
import sjsonnew.{ BasicJsonProtocol, JsonFormat }
|
||||
|
||||
object TaskMacro:
|
||||
final val AssignInitName = "set"
|
||||
|
|
@ -53,15 +53,23 @@ object TaskMacro:
|
|||
|
||||
// import LinterDSL.{ Empty => EmptyLinter }
|
||||
|
||||
def taskMacroImpl[A1: Type](t: Expr[A1])(using qctx: Quotes): Expr[Initialize[Task[A1]]] =
|
||||
def taskMacroImpl[A1: Type](t: Expr[A1], cached: Boolean)(using
|
||||
qctx: Quotes
|
||||
): Expr[Initialize[Task[A1]]] =
|
||||
t match
|
||||
case '{ if ($cond) then $thenp else $elsep } => taskIfImpl[A1](t)
|
||||
case '{ if ($cond) then $thenp else $elsep } => taskIfImpl[A1](t, cached)
|
||||
case _ =>
|
||||
val convert1 = new FullConvert(qctx, 0)
|
||||
convert1.contMapN[A1, F, Id](t, convert1.appExpr)
|
||||
val cacheConfigExpr =
|
||||
if cached then Some('{ Def.cacheConfiguration })
|
||||
else None
|
||||
convert1.contMapN[A1, F, Id](t, convert1.appExpr, cacheConfigExpr)
|
||||
|
||||
def taskIfImpl[A1: Type](expr: Expr[A1])(using qctx: Quotes): Expr[Initialize[Task[A1]]] =
|
||||
def taskIfImpl[A1: Type](expr: Expr[A1], cached: Boolean)(using
|
||||
qctx: Quotes
|
||||
): Expr[Initialize[Task[A1]]] =
|
||||
import qctx.reflect.*
|
||||
val convert1 = new FullConvert(qctx, 1000)
|
||||
expr match
|
||||
case '{ if ($cond) then $thenp else $elsep } =>
|
||||
'{
|
||||
|
|
@ -78,7 +86,7 @@ object TaskMacro:
|
|||
t: Expr[Initialize[Task[A1]]]
|
||||
)(using qctx: Quotes): Expr[Initialize[Task[A1]]] =
|
||||
val convert1 = new FullConvert(qctx, 1000)
|
||||
convert1.contFlatMap[A1, F, Id](t, convert1.appExpr)
|
||||
convert1.contFlatMap[A1, F, Id](t, convert1.appExpr, None)
|
||||
|
||||
/** Translates <task: TaskKey[T]>.previous(format) to Previous.runtime(<task>)(format).value */
|
||||
def previousImpl[A1: Type](t: Expr[TaskKey[A1]])(using
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
package sbt.internal.remotecache
|
||||
final class CompileRemoteCacheArtifact private (
|
||||
artifact: sbt.librarymanagement.Artifact,
|
||||
packaged: sbt.TaskKey[java.io.File],
|
||||
packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef],
|
||||
val extractDirectory: java.io.File,
|
||||
val analysisFile: java.io.File) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable {
|
||||
|
||||
|
|
@ -22,13 +22,13 @@ final class CompileRemoteCacheArtifact private (
|
|||
override def toString: String = {
|
||||
"CompileRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + analysisFile + ")"
|
||||
}
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile): CompileRemoteCacheArtifact = {
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile): CompileRemoteCacheArtifact = {
|
||||
new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile)
|
||||
}
|
||||
def withArtifact(artifact: sbt.librarymanagement.Artifact): CompileRemoteCacheArtifact = {
|
||||
copy(artifact = artifact)
|
||||
}
|
||||
def withPackaged(packaged: sbt.TaskKey[java.io.File]): CompileRemoteCacheArtifact = {
|
||||
def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): CompileRemoteCacheArtifact = {
|
||||
copy(packaged = packaged)
|
||||
}
|
||||
def withExtractDirectory(extractDirectory: java.io.File): CompileRemoteCacheArtifact = {
|
||||
|
|
@ -40,5 +40,5 @@ final class CompileRemoteCacheArtifact private (
|
|||
}
|
||||
object CompileRemoteCacheArtifact {
|
||||
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, analysisFile: java.io.File): CompileRemoteCacheArtifact = new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile)
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], extractDirectory: java.io.File, analysisFile: java.io.File): CompileRemoteCacheArtifact = new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
package sbt.internal.remotecache
|
||||
final class CustomRemoteCacheArtifact private (
|
||||
artifact: sbt.librarymanagement.Artifact,
|
||||
packaged: sbt.TaskKey[java.io.File],
|
||||
packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef],
|
||||
val extractDirectory: java.io.File,
|
||||
val preserveLastModified: Boolean) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable {
|
||||
|
||||
|
|
@ -22,13 +22,13 @@ final class CustomRemoteCacheArtifact private (
|
|||
override def toString: String = {
|
||||
"CustomRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + preserveLastModified + ")"
|
||||
}
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, preserveLastModified: Boolean = preserveLastModified): CustomRemoteCacheArtifact = {
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, preserveLastModified: Boolean = preserveLastModified): CustomRemoteCacheArtifact = {
|
||||
new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified)
|
||||
}
|
||||
def withArtifact(artifact: sbt.librarymanagement.Artifact): CustomRemoteCacheArtifact = {
|
||||
copy(artifact = artifact)
|
||||
}
|
||||
def withPackaged(packaged: sbt.TaskKey[java.io.File]): CustomRemoteCacheArtifact = {
|
||||
def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): CustomRemoteCacheArtifact = {
|
||||
copy(packaged = packaged)
|
||||
}
|
||||
def withExtractDirectory(extractDirectory: java.io.File): CustomRemoteCacheArtifact = {
|
||||
|
|
@ -40,5 +40,5 @@ final class CustomRemoteCacheArtifact private (
|
|||
}
|
||||
object CustomRemoteCacheArtifact {
|
||||
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, preserveLastModified: Boolean): CustomRemoteCacheArtifact = new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified)
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], extractDirectory: java.io.File, preserveLastModified: Boolean): CustomRemoteCacheArtifact = new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
package sbt.internal.remotecache
|
||||
final class PomRemoteCacheArtifact private (
|
||||
artifact: sbt.librarymanagement.Artifact,
|
||||
packaged: sbt.TaskKey[java.io.File]) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable {
|
||||
packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable {
|
||||
|
||||
|
||||
|
||||
|
|
@ -20,17 +20,17 @@ final class PomRemoteCacheArtifact private (
|
|||
override def toString: String = {
|
||||
"PomRemoteCacheArtifact(" + artifact + ", " + packaged + ")"
|
||||
}
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged): PomRemoteCacheArtifact = {
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged): PomRemoteCacheArtifact = {
|
||||
new PomRemoteCacheArtifact(artifact, packaged)
|
||||
}
|
||||
def withArtifact(artifact: sbt.librarymanagement.Artifact): PomRemoteCacheArtifact = {
|
||||
copy(artifact = artifact)
|
||||
}
|
||||
def withPackaged(packaged: sbt.TaskKey[java.io.File]): PomRemoteCacheArtifact = {
|
||||
def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): PomRemoteCacheArtifact = {
|
||||
copy(packaged = packaged)
|
||||
}
|
||||
}
|
||||
object PomRemoteCacheArtifact {
|
||||
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File]): PomRemoteCacheArtifact = new PomRemoteCacheArtifact(artifact, packaged)
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): PomRemoteCacheArtifact = new PomRemoteCacheArtifact(artifact, packaged)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
package sbt.internal.remotecache
|
||||
abstract class RemoteCacheArtifact(
|
||||
val artifact: sbt.librarymanagement.Artifact,
|
||||
val packaged: sbt.TaskKey[java.io.File]) extends Serializable {
|
||||
val packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]) extends Serializable {
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
package sbt.internal.remotecache
|
||||
final class TestRemoteCacheArtifact private (
|
||||
artifact: sbt.librarymanagement.Artifact,
|
||||
packaged: sbt.TaskKey[java.io.File],
|
||||
packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef],
|
||||
val extractDirectory: java.io.File,
|
||||
val analysisFile: java.io.File,
|
||||
val testResult: java.io.File) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable {
|
||||
|
|
@ -23,13 +23,13 @@ final class TestRemoteCacheArtifact private (
|
|||
override def toString: String = {
|
||||
"TestRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + analysisFile + ", " + testResult + ")"
|
||||
}
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile, testResult: java.io.File = testResult): TestRemoteCacheArtifact = {
|
||||
private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile, testResult: java.io.File = testResult): TestRemoteCacheArtifact = {
|
||||
new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult)
|
||||
}
|
||||
def withArtifact(artifact: sbt.librarymanagement.Artifact): TestRemoteCacheArtifact = {
|
||||
copy(artifact = artifact)
|
||||
}
|
||||
def withPackaged(packaged: sbt.TaskKey[java.io.File]): TestRemoteCacheArtifact = {
|
||||
def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): TestRemoteCacheArtifact = {
|
||||
copy(packaged = packaged)
|
||||
}
|
||||
def withExtractDirectory(extractDirectory: java.io.File): TestRemoteCacheArtifact = {
|
||||
|
|
@ -44,5 +44,5 @@ final class TestRemoteCacheArtifact private (
|
|||
}
|
||||
object TestRemoteCacheArtifact {
|
||||
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, analysisFile: java.io.File, testResult: java.io.File): TestRemoteCacheArtifact = new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult)
|
||||
def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], extractDirectory: java.io.File, analysisFile: java.io.File, testResult: java.io.File): TestRemoteCacheArtifact = new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@
|
|||
},
|
||||
{
|
||||
"name": "packaged",
|
||||
"type": "sbt.TaskKey[java.io.File]"
|
||||
"type": "sbt.TaskKey[xsbti.HashedVirtualFileRef]"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
|
|
@ -80,4 +80,4 @@
|
|||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import java.io.File
|
|||
|
||||
import scala.util.control.NonFatal
|
||||
import scala.util.{ Failure, Success, Try }
|
||||
import xsbti.FileConverter
|
||||
|
||||
abstract class BackgroundJobService extends Closeable {
|
||||
|
||||
|
|
@ -70,14 +71,20 @@ abstract class BackgroundJobService extends Closeable {
|
|||
def waitFor(job: JobHandle): Unit
|
||||
|
||||
/** Copies classpath to temporary directories. */
|
||||
def copyClasspath(products: Classpath, full: Classpath, workingDirectory: File): Classpath
|
||||
def copyClasspath(
|
||||
products: Classpath,
|
||||
full: Classpath,
|
||||
workingDirectory: File,
|
||||
converter: FileConverter
|
||||
): Classpath
|
||||
|
||||
private[sbt] def copyClasspath(
|
||||
products: Classpath,
|
||||
full: Classpath,
|
||||
workingDirectory: File,
|
||||
hashContents: Boolean
|
||||
): Classpath = copyClasspath(products, full, workingDirectory)
|
||||
hashContents: Boolean,
|
||||
converter: FileConverter,
|
||||
): Classpath = copyClasspath(products, full, workingDirectory, converter)
|
||||
}
|
||||
|
||||
object BackgroundJobService {
|
||||
|
|
|
|||
|
|
@ -398,7 +398,7 @@ object Cross {
|
|||
scope / scalaVersion := version,
|
||||
scope / crossScalaVersions := scalaVersions,
|
||||
scope / scalaHome := Some(home),
|
||||
scope / scalaInstance := inst
|
||||
scope / scalaInstance := inst,
|
||||
)
|
||||
case None =>
|
||||
Seq(
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -30,6 +30,7 @@ import scala.annotation.nowarn
|
|||
import scala.Console.RED
|
||||
import scala.concurrent.duration.Duration
|
||||
import scala.util.control.NonFatal
|
||||
import xsbti.FileConverter
|
||||
|
||||
/**
|
||||
* An API that allows you to cancel executing tasks upon some signal.
|
||||
|
|
@ -142,8 +143,8 @@ object EvaluateTaskConfig {
|
|||
}
|
||||
|
||||
final case class PluginData(
|
||||
dependencyClasspath: Seq[Attributed[File]],
|
||||
definitionClasspath: Seq[Attributed[File]],
|
||||
dependencyClasspath: Def.Classpath,
|
||||
definitionClasspath: Def.Classpath,
|
||||
resolvers: Option[Vector[Resolver]],
|
||||
report: Option[UpdateReport],
|
||||
scalacOptions: Seq[String],
|
||||
|
|
@ -151,14 +152,15 @@ final case class PluginData(
|
|||
unmanagedSources: Seq[File],
|
||||
managedSourceDirectories: Seq[File],
|
||||
managedSources: Seq[File],
|
||||
buildTarget: Option[BuildTargetIdentifier]
|
||||
buildTarget: Option[BuildTargetIdentifier],
|
||||
converter: FileConverter,
|
||||
) {
|
||||
val classpath: Seq[Attributed[File]] = definitionClasspath ++ dependencyClasspath
|
||||
val classpath: Def.Classpath = definitionClasspath ++ dependencyClasspath
|
||||
}
|
||||
|
||||
object PluginData {
|
||||
private[sbt] def apply(dependencyClasspath: Def.Classpath): PluginData =
|
||||
PluginData(dependencyClasspath, Nil, None, None, Nil, Nil, Nil, Nil, Nil, None)
|
||||
private[sbt] def apply(dependencyClasspath: Def.Classpath, converter: FileConverter): PluginData =
|
||||
PluginData(dependencyClasspath, Nil, None, None, Nil, Nil, Nil, Nil, Nil, None, converter)
|
||||
}
|
||||
|
||||
object EvaluateTask {
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ import sbt.internal.remotecache.RemoteCacheArtifact
|
|||
import sbt.internal.server.BuildServerProtocol.BspFullWorkspace
|
||||
import sbt.internal.server.{ BuildServerReporter, ServerHandler }
|
||||
import sbt.internal.util.{ AttributeKey, ProgressState, SourcePosition }
|
||||
import sbt.internal.util.StringAttributeKey
|
||||
import sbt.io._
|
||||
import sbt.librarymanagement.Configurations.CompilerPlugin
|
||||
import sbt.librarymanagement.LibraryManagementCodec._
|
||||
|
|
@ -35,8 +36,8 @@ import sbt.librarymanagement._
|
|||
import sbt.librarymanagement.ivy.{ Credentials, IvyConfiguration, IvyPaths, UpdateOptions }
|
||||
import sbt.nio.file.Glob
|
||||
import sbt.testing.Framework
|
||||
import sbt.util.{ Level, Logger, LoggerContext }
|
||||
import xsbti.{ FileConverter, VirtualFile }
|
||||
import sbt.util.{ cacheLevel, ActionCacheStore, Level, Logger, LoggerContext }
|
||||
import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile, VirtualFileRef }
|
||||
import xsbti.compile._
|
||||
import xsbti.compile.analysis.ReadStamps
|
||||
|
||||
|
|
@ -83,7 +84,12 @@ object Keys {
|
|||
val buildDependencies = settingKey[BuildDependencies]("Definitive source of inter-project dependencies for compilation and dependency management.\n\tThis is populated by default by the dependencies declared on Project instances, but may be modified.\n\tThe main restriction is that new builds may not be introduced.").withRank(DSetting)
|
||||
val appConfiguration = settingKey[xsbti.AppConfiguration]("Provides access to the launched sbt configuration, including the ScalaProvider, Launcher, and GlobalLock.").withRank(DSetting)
|
||||
val thisProject = settingKey[ResolvedProject]("Provides the current project for the referencing scope.").withRank(CSetting)
|
||||
|
||||
@cacheLevel(include = Array.empty)
|
||||
val thisProjectRef = settingKey[ProjectRef]("Provides a fully-resolved reference to the current project for the referencing scope.").withRank(CSetting)
|
||||
val configurationStr = StringAttributeKey("configuration")
|
||||
|
||||
@cacheLevel(include = Array.empty)
|
||||
val configuration = settingKey[Configuration]("Provides the current configuration of the referencing scope.").withRank(CSetting)
|
||||
val commands = settingKey[Seq[Command]]("Defines commands to be registered when this project or build is the current selected one.").withRank(CSetting)
|
||||
val initialize = settingKey[Unit]("A convenience setting for performing side-effects during initialization.").withRank(BSetting)
|
||||
|
|
@ -108,8 +114,13 @@ object Keys {
|
|||
val serverUseJni = SettingKey(BasicKeys.serverUseJni)
|
||||
val fullServerHandlers = SettingKey(BasicKeys.fullServerHandlers)
|
||||
val serverHandlers = settingKey[Seq[ServerHandler]]("User-defined server handlers.")
|
||||
val cacheStores = settingKey[Seq[ActionCacheStore]]("Cache backends")
|
||||
val rootOutputDirectory = SettingKey(BasicKeys.rootOutputDirectory)
|
||||
|
||||
// val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
|
||||
val analysis = StringAttributeKey("analysis")
|
||||
|
||||
|
||||
val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting)
|
||||
val suppressSbtShellNotification = settingKey[Boolean]("""True to suppress the "Executing in batch mode.." message.""").withRank(CSetting)
|
||||
val pollInterval = settingKey[FiniteDuration]("Interval between checks for modified sources by the continuous execution command.").withRank(BMinusSetting)
|
||||
val watchAntiEntropy = settingKey[FiniteDuration]("Duration for which the watch EventMonitor will ignore events for a file after that file has triggered a build.").withRank(BMinusSetting)
|
||||
|
|
@ -159,6 +170,7 @@ object Keys {
|
|||
val resources = taskKey[Seq[File]]("All resource files, both managed and unmanaged.").withRank(BTask)
|
||||
|
||||
// Output paths
|
||||
@cacheLevel(include = Array.empty)
|
||||
val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting)
|
||||
val earlyOutput = settingKey[VirtualFile]("JAR file for pickles used for build pipelining")
|
||||
val backendOutput = settingKey[VirtualFile]("Directory or JAR file for compiled classes and copied resources")
|
||||
|
|
@ -184,7 +196,10 @@ object Keys {
|
|||
val cleanupCommands = settingKey[String]("Commands to execute before the Scala interpreter exits.").withRank(BMinusSetting)
|
||||
val asciiGraphWidth = settingKey[Int]("Determines maximum width of the settings graph in ASCII mode").withRank(AMinusSetting)
|
||||
val compileOptions = taskKey[CompileOptions]("Collects basic options to configure compilers").withRank(DTask)
|
||||
|
||||
@cacheLevel(include = Array.empty)
|
||||
val compileInputs = taskKey[Inputs]("Collects all inputs needed for compilation.").withRank(DTask)
|
||||
val compileInputs2 = taskKey[CompileInputs2]("")
|
||||
val scalaHome = settingKey[Option[File]]("If Some, defines the local Scala installation to use for compilation, running, and testing.").withRank(ASetting)
|
||||
val scalaInstance = taskKey[ScalaInstance]("Defines the Scala instance to use for compilation, running, and testing.").withRank(DTask)
|
||||
val scalaOrganization = settingKey[String]("Organization/group ID of the Scala used in the project. Default value is 'org.scala-lang'. This is an advanced setting used for clones of the Scala Language. It should be disregarded in standard use cases.").withRank(CSetting)
|
||||
|
|
@ -222,11 +237,13 @@ object Keys {
|
|||
val consoleProject = taskKey[Unit]("Starts the Scala interpreter with the sbt and the build definition on the classpath and useful imports.").withRank(AMinusTask)
|
||||
val compile = taskKey[CompileAnalysis]("Compiles sources.").withRank(APlusTask)
|
||||
val manipulateBytecode = taskKey[CompileResult]("Manipulates generated bytecode").withRank(BTask)
|
||||
val compileIncremental = taskKey[CompileResult]("Actually runs the incremental compilation").withRank(DTask)
|
||||
val compileIncremental = taskKey[(Boolean, HashedVirtualFileRef)]("Actually runs the incremental compilation").withRank(DTask)
|
||||
val previousCompile = taskKey[PreviousResult]("Read the incremental compiler analysis from disk").withRank(DTask)
|
||||
val tastyFiles = taskKey[Seq[File]]("Returns the TASTy files produced by compilation").withRank(DTask)
|
||||
private[sbt] val compileScalaBackend = taskKey[CompileResult]("Compiles only Scala sources if pipelining is enabled. Compiles both Scala and Java sources otherwise").withRank(Invisible)
|
||||
private[sbt] val compileEarly = taskKey[CompileAnalysis]("Compiles only Scala sources if pipelining is enabled, and produce an early output (pickle JAR)").withRank(Invisible)
|
||||
|
||||
@cacheLevel(include = Array.empty)
|
||||
private[sbt] val earlyOutputPing = taskKey[PromiseWrap[Boolean]]("When pipelining is enabled, this returns true when early output (pickle JAR) is created; false otherwise").withRank(Invisible)
|
||||
private[sbt] val compileJava = taskKey[CompileResult]("Compiles only Java sources (called only for pipelining)").withRank(Invisible)
|
||||
private[sbt] val compileSplit = taskKey[CompileResult]("When pipelining is enabled, compile Scala then Java; otherwise compile both").withRank(Invisible)
|
||||
|
|
@ -238,6 +255,8 @@ object Keys {
|
|||
val earlyCompileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting)
|
||||
val compileAnalysisFile = taskKey[File]("Zinc analysis storage.").withRank(DSetting)
|
||||
val earlyCompileAnalysisFile = taskKey[File]("Zinc analysis storage for early compilation").withRank(DSetting)
|
||||
|
||||
@cacheLevel(include = Array.empty)
|
||||
val compileIncSetup = taskKey[Setup]("Configures aspects of incremental compilation.").withRank(DTask)
|
||||
val compilerCache = taskKey[GlobalsCache]("Cache of scala.tools.nsc.Global instances. This should typically be cached so that it isn't recreated every task run.").withRank(DTask)
|
||||
val stateCompilerCache = AttributeKey[GlobalsCache]("stateCompilerCache", "Internal use: Global cache.")
|
||||
|
|
@ -251,6 +270,7 @@ object Keys {
|
|||
val sourcePositionMappers = taskKey[Seq[xsbti.Position => Option[xsbti.Position]]]("Maps positions in generated source files to the original source it was generated from").withRank(DTask)
|
||||
private[sbt] val externalHooks = taskKey[ExternalHooks]("The external hooks used by zinc.")
|
||||
val auxiliaryClassFiles = taskKey[Seq[AuxiliaryClassFiles]]("The auxiliary class files that must be managed by Zinc (for instance the TASTy files)")
|
||||
@cacheLevel(include = Array.empty)
|
||||
val fileConverter = settingKey[FileConverter]("The file converter used to convert between Path and VirtualFile")
|
||||
val allowMachinePath = settingKey[Boolean]("Allow machine-specific paths during conversion.")
|
||||
val reportAbsolutePath = settingKey[Boolean]("Report absolute paths during compilation.")
|
||||
|
|
@ -259,20 +279,21 @@ object Keys {
|
|||
private[sbt] val reusableStamper = taskKey[ReadStamps]("The stamper can be reused across subprojects and sessions.")
|
||||
|
||||
// package keys
|
||||
val packageBin = taskKey[File]("Produces a main artifact, such as a binary jar.").withRank(ATask)
|
||||
val `package` = taskKey[File]("Produces the main artifact, such as a binary jar. This is typically an alias for the task that actually does the packaging.").withRank(APlusTask)
|
||||
val packageDoc = taskKey[File]("Produces a documentation artifact, such as a jar containing API documentation.").withRank(AMinusTask)
|
||||
val packageSrc = taskKey[File]("Produces a source artifact, such as a jar containing sources and resources.").withRank(AMinusTask)
|
||||
val packageCache = taskKey[File]("Produces the main artifact for caching.")
|
||||
val packageBin = taskKey[HashedVirtualFileRef]("Produces a main artifact, such as a binary jar.").withRank(ATask)
|
||||
val `package` = taskKey[HashedVirtualFileRef]("Produces the main artifact, such as a binary jar. This is typically an alias for the task that actually does the packaging.").withRank(APlusTask)
|
||||
val packageDoc = taskKey[HashedVirtualFileRef]("Produces a documentation artifact, such as a jar containing API documentation.").withRank(AMinusTask)
|
||||
val packageSrc = taskKey[HashedVirtualFileRef]("Produces a source artifact, such as a jar containing sources and resources.").withRank(AMinusTask)
|
||||
val packageCache = taskKey[HashedVirtualFileRef]("Produces the main artifact for caching.")
|
||||
|
||||
val packageOptions = taskKey[Seq[PackageOption]]("Options for packaging.").withRank(BTask)
|
||||
val packageTimestamp = settingKey[Option[Long]]("Overwrites timestamps in JAR file to make the build reproducible; None keeps the existing timestamps (useful for web resources)").withRank(CSetting)
|
||||
val packageConfiguration = taskKey[Package.Configuration]("Collects all inputs needed for packaging.").withRank(DTask)
|
||||
val artifactPath = settingKey[File]("The location of a generated artifact.").withRank(BPlusSetting)
|
||||
val packageConfiguration = taskKey[Pkg.Configuration]("Collects all inputs needed for packaging.").withRank(DTask)
|
||||
val artifactPath = settingKey[VirtualFileRef]("The location of a generated artifact.").withRank(BPlusSetting)
|
||||
val artifactStr = StringAttributeKey("artifact")
|
||||
val artifact = settingKey[Artifact]("Describes an artifact.").withRank(BMinusSetting)
|
||||
val artifactClassifier = settingKey[Option[String]]("Sets the classifier used by the default artifact definition.").withRank(BSetting)
|
||||
val artifactName = settingKey[(ScalaVersion, ModuleID, Artifact) => String]("Function that produces the artifact name from its definition.").withRank(CSetting)
|
||||
val mappings = taskKey[Seq[(File, String)]]("Defines the mappings from a file to a path, used by packaging, for example.").withRank(BTask)
|
||||
val mappings = taskKey[Seq[(HashedVirtualFileRef, String)]]("Defines the mappings from a file to a path, used by packaging, for example.").withRank(BTask)
|
||||
val fileMappings = taskKey[Seq[(File, File)]]("Defines the mappings from a file to a file, used for copying files, for example.").withRank(BMinusTask)
|
||||
|
||||
// Run Keys
|
||||
|
|
@ -331,7 +352,6 @@ object Keys {
|
|||
|
||||
// Classpath/Dependency Management Keys
|
||||
type Classpath = Def.Classpath
|
||||
type VirtualClasspath = Def.VirtualClasspath
|
||||
|
||||
val name = settingKey[String]("Project name.").withRank(APlusSetting)
|
||||
val normalizedName = settingKey[String]("Project name transformed from mixed case and spaces to lowercase and dash-separated.").withRank(BSetting)
|
||||
|
|
@ -344,8 +364,8 @@ object Keys {
|
|||
val organizationHomepage = settingKey[Option[URL]]("Organization homepage.").withRank(BMinusSetting)
|
||||
val developers = settingKey[List[Developer]]("List of developers implicated in the project").withRank(BMinusSetting)
|
||||
val apiURL = settingKey[Option[URL]]("Base URL for API documentation.").withRank(BMinusSetting)
|
||||
val entryApiURL = AttributeKey[URL]("entryApiURL", "Base URL for the API documentation for a classpath entry.")
|
||||
val apiMappings = taskKey[Map[File, URL]]("Mappings from classpath entry to API documentation base URL.").withRank(BMinusSetting)
|
||||
val entryApiURL = StringAttributeKey("entryApiURL") // , "Base URL for the API documentation for a classpath entry.")
|
||||
val apiMappings = taskKey[Map[HashedVirtualFileRef, URL]]("Mappings from classpath entry to API documentation base URL.").withRank(BMinusSetting)
|
||||
val autoAPIMappings = settingKey[Boolean]("If true, automatically manages mappings to the API doc URL.").withRank(BMinusSetting)
|
||||
val scmInfo = settingKey[Option[ScmInfo]]("Basic SCM information for the project.").withRank(BMinusSetting)
|
||||
val projectInfo = settingKey[ModuleInfo]("Addition project information like formal name, homepage, licenses etc.").withRank(CSetting)
|
||||
|
|
@ -363,16 +383,15 @@ object Keys {
|
|||
val internalDependencyClasspath = taskKey[Classpath]("The internal (inter-project) classpath.").withRank(CTask)
|
||||
val externalDependencyClasspath = taskKey[Classpath]("The classpath consisting of library dependencies, both managed and unmanaged.").withRank(BMinusTask)
|
||||
val dependencyClasspath = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(BPlusTask)
|
||||
val dependencyVirtualClasspath = taskKey[VirtualClasspath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(CTask)
|
||||
val dependencyPicklePath = taskKey[VirtualClasspath]("The classpath consisting of internal pickles and external, managed and unmanaged dependencies. This task is promise-blocked.")
|
||||
val internalDependencyPicklePath = taskKey[VirtualClasspath]("The internal (inter-project) pickles. This task is promise-blocked.")
|
||||
val dependencyPicklePath = taskKey[Classpath]("The classpath consisting of internal pickles and external, managed and unmanaged dependencies. This task is promise-blocked.")
|
||||
val internalDependencyPicklePath = taskKey[Classpath]("The internal (inter-project) pickles. This task is promise-blocked.")
|
||||
val fullClasspath = taskKey[Classpath]("The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies.").withRank(BPlusTask)
|
||||
val trackInternalDependencies = settingKey[TrackLevel]("The level of tracking for the internal (inter-project) dependency.").withRank(BSetting)
|
||||
val exportToInternal = settingKey[TrackLevel]("The level of tracking for this project by the internal callers.").withRank(BSetting)
|
||||
val exportedProductJars = taskKey[Classpath]("Build products that go on the exported classpath as JARs.")
|
||||
val exportedProductJarsIfMissing = taskKey[Classpath]("Build products that go on the exported classpath as JARs if missing.")
|
||||
val exportedProductJarsNoTracking = taskKey[Classpath]("Just the exported classpath as JARs without triggering the compilation.")
|
||||
val exportedPickles = taskKey[VirtualClasspath]("Build products that go on the exported compilation classpath as JARs. Note this is promise-blocked.").withRank(DTask)
|
||||
val exportedPickles = taskKey[Classpath]("Build products that go on the exported compilation classpath as JARs. Note this is promise-blocked.").withRank(DTask)
|
||||
val pickleProducts = taskKey[Seq[VirtualFile]]("Pickle JARs").withRank(DTask)
|
||||
val internalDependencyAsJars = taskKey[Classpath]("The internal (inter-project) classpath as JARs.")
|
||||
val dependencyClasspathAsJars = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies, all as JARs.")
|
||||
|
|
@ -392,7 +411,7 @@ object Keys {
|
|||
val pushRemoteCacheConfiguration = taskKey[PublishConfiguration]("")
|
||||
val pushRemoteCacheTo = settingKey[Option[Resolver]]("The resolver to publish remote cache to.")
|
||||
val remoteCacheResolvers = settingKey[Seq[Resolver]]("Resolvers for remote cache.")
|
||||
val remoteCachePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.")
|
||||
val remoteCachePom = taskKey[HashedVirtualFileRef]("Generates a pom for publishing when publishing Maven-style.")
|
||||
val localCacheDirectory = settingKey[File]("Operating system specific cache directory.")
|
||||
val usePipelining = settingKey[Boolean]("Use subproject pipelining for compilation.").withRank(BSetting)
|
||||
val exportPipelining = settingKey[Boolean]("Product early output so downstream subprojects can do pipelining.").withRank(BSetting)
|
||||
|
|
@ -401,6 +420,8 @@ object Keys {
|
|||
val bspConfig = taskKey[Unit]("Create or update the BSP connection files").withRank(DSetting)
|
||||
val bspEnabled = SettingKey[Boolean](BasicKeys.bspEnabled)
|
||||
val bspSbtEnabled = settingKey[Boolean]("Should BSP export meta-targets for the SBT build itself?")
|
||||
|
||||
@cacheLevel(include = Array.empty)
|
||||
val bspTargetIdentifier = settingKey[BuildTargetIdentifier]("Build target identifier of a project and configuration.").withRank(DSetting)
|
||||
val bspWorkspace = settingKey[Map[BuildTargetIdentifier, Scope]]("Mapping of BSP build targets to sbt scopes").withRank(DSetting)
|
||||
private[sbt] val bspFullWorkspace = settingKey[BspFullWorkspace]("Mapping of BSP build targets to sbt scopes and meta-targets for the SBT build itself").withRank(DSetting)
|
||||
|
|
@ -431,6 +452,8 @@ object Keys {
|
|||
val bspScalaTestClassesItem = taskKey[Seq[ScalaTestClassesItem]]("").withRank(DTask)
|
||||
val bspScalaMainClasses = inputKey[Unit]("Corresponds to buildTarget/scalaMainClasses request").withRank(DTask)
|
||||
val bspScalaMainClassesItem = taskKey[ScalaMainClassesItem]("").withRank(DTask)
|
||||
|
||||
@cacheLevel(include = Array.empty)
|
||||
val bspReporter = taskKey[BuildServerReporter]("").withRank(DTask)
|
||||
|
||||
val useCoursier = settingKey[Boolean]("Use Coursier for dependency resolution.").withRank(BSetting)
|
||||
|
|
@ -489,12 +512,12 @@ object Keys {
|
|||
val makePomConfiguration = settingKey[MakePomConfiguration]("Configuration for generating a pom.").withRank(DSetting)
|
||||
val makeIvyXmlConfiguration = taskKey[PublishConfiguration]("Configuration for generating ivy.xml.").withRank(DSetting)
|
||||
val makeIvyXmlLocalConfiguration = taskKey[PublishConfiguration]("Configuration for generating ivy.xml.").withRank(DSetting)
|
||||
val packagedArtifacts = taskKey[Map[Artifact, File]]("Packages all artifacts for publishing and maps the Artifact definition to the generated file.").withRank(CTask)
|
||||
val packagedArtifacts = taskKey[Map[Artifact, HashedVirtualFileRef]]("Packages all artifacts for publishing and maps the Artifact definition to the generated file.").withRank(CTask)
|
||||
val publishMavenStyle = settingKey[Boolean]("Configures whether to generate and publish a pom (true) or Ivy file (false).").withRank(BSetting)
|
||||
val credentials = taskKey[Seq[Credentials]]("The credentials to use for updating and publishing.").withRank(BMinusTask)
|
||||
val allCredentials = taskKey[Seq[Credentials]]("Aggregated credentials across current and root subprojects. Do not rewire this task.").withRank(DTask)
|
||||
|
||||
val makePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.").withRank(BPlusTask)
|
||||
val makePom = taskKey[HashedVirtualFileRef]("Generates a pom for publishing when publishing Maven-style.").withRank(BPlusTask)
|
||||
val deliver = taskKey[File]("Generates the Ivy file for publishing to a repository.").withRank(BTask)
|
||||
val deliverLocal = taskKey[File]("Generates the Ivy file for publishing to the local repository.").withRank(BTask)
|
||||
// makeIvyXml is currently identical to the confusingly-named "deliver", which may be deprecated in the future
|
||||
|
|
@ -509,8 +532,10 @@ object Keys {
|
|||
val pomAllRepositories = settingKey[Boolean]("If true, includes repositories used in module configurations in the pom repositories section. If false, only the common repositories are included.").withRank(BMinusSetting)
|
||||
|
||||
val moduleName = settingKey[String]("The name of the current module, used for dependency management.").withRank(BSetting)
|
||||
val outputPath = settingKey[String]("Path of the output directory relative from the rootOutputDirectory.").withRank(DSetting)
|
||||
val version = settingKey[String]("The version/revision of the current module.").withRank(APlusSetting)
|
||||
val isSnapshot = settingKey[Boolean]("True if the version of the project is a snapshot version.").withRank(BPlusSetting)
|
||||
val moduleIDStr = StringAttributeKey("moduleID")
|
||||
val moduleID = settingKey[ModuleID]("A dependency management descriptor. This is currently used for associating a ModuleID with a classpath entry.").withRank(BPlusSetting)
|
||||
val projectID = settingKey[ModuleID]("The dependency management descriptor for the current module.").withRank(BMinusSetting)
|
||||
val overrideBuildResolvers = settingKey[Boolean]("Whether or not all the build resolvers should be overridden with what's defined from the launcher.").withRank(BMinusSetting)
|
||||
|
|
@ -550,7 +575,7 @@ object Keys {
|
|||
val managedDirectory = settingKey[File]("Directory to which managed dependencies are retrieved.").withRank(BSetting)
|
||||
val classpathTypes = settingKey[Set[String]]("Artifact types that are included on the classpath.").withRank(BSetting)
|
||||
val publishArtifact = settingKey[Boolean]("Enables (true) or disables (false) publishing an artifact.").withRank(AMinusSetting)
|
||||
val packagedArtifact = taskKey[(Artifact, File)]("Generates a packaged artifact, returning the Artifact and the produced File.").withRank(CTask)
|
||||
val packagedArtifact = taskKey[(Artifact, HashedVirtualFileRef)]("Generates a packaged artifact, returning the Artifact and the produced File.").withRank(CTask)
|
||||
val checksums = settingKey[Seq[String]]("The list of checksums to generate and to verify for dependencies.").withRank(BSetting)
|
||||
val forceUpdatePeriod = settingKey[Option[FiniteDuration]]("Duration after which to force a full update to occur").withRank(CSetting)
|
||||
val versionScheme = settingKey[Option[String]]("""Version scheme used for the subproject: Supported values are Some("early-semver"), Some("pvp"), and Some("semver-spec")""").withRank(BSetting)
|
||||
|
|
@ -584,6 +609,7 @@ object Keys {
|
|||
val forcegc = settingKey[Boolean]("Enables (true) or disables (false) forcing garbage collection after task run when needed.").withRank(BMinusSetting)
|
||||
val minForcegcInterval = settingKey[Duration]("Minimal interval to check for forcing garbage collection.")
|
||||
val settingsData = std.FullInstance.settingsData
|
||||
@cacheLevel(include = Array.empty)
|
||||
val streams = taskKey[TaskStreams]("Provides streams for logging and persisting data.").withRank(DTask)
|
||||
val taskDefinitionKey = Def.taskDefinitionKey
|
||||
val (executionRoots, dummyRoots) = Def.dummy[Seq[ScopedKey[_]]]("executionRoots", "The list of root tasks for this task execution. Roots are the top-level tasks that were directly requested to be run.")
|
||||
|
|
|
|||
|
|
@ -956,10 +956,10 @@ object BuiltinCommands {
|
|||
def doLoadProject(s0: State, action: LoadAction): State = {
|
||||
welcomeBanner(s0)
|
||||
checkSBTVersionChanged(s0)
|
||||
RemoteCache.initializeRemoteCache(s0)
|
||||
val (s1, base) = Project.loadAction(SessionVar.clear(s0), action)
|
||||
IO.createDirectory(base)
|
||||
val s2 = if (s1 has Keys.stateCompilerCache) s1 else registerCompilerCache(s1)
|
||||
|
||||
val (eval, structure) =
|
||||
try Load.defaultLoad(s2, base, s2.log, Project.inPluginProject(s2), Project.extraBuilds(s2))
|
||||
catch {
|
||||
|
|
@ -979,6 +979,7 @@ object BuiltinCommands {
|
|||
st => setupGlobalFileTreeRepository(addCacheStoreFactoryFactory(st))
|
||||
)
|
||||
val s4 = s3.put(Keys.useLog4J.key, Project.extract(s3).get(Keys.useLog4J))
|
||||
RemoteCache.initializeRemoteCache(s4)
|
||||
addSuperShellParams(CheckBuildSources.init(LintUnused.lintUnusedFunc(s4)))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,12 +8,14 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.{ Path => NioPath }
|
||||
import java.net.URI
|
||||
import java.util.Locale
|
||||
// import Project._
|
||||
import Keys.{
|
||||
stateBuildStructure,
|
||||
bspEnabled,
|
||||
cacheStores,
|
||||
colorShellPrompt,
|
||||
commands,
|
||||
historyPath,
|
||||
|
|
@ -22,6 +24,7 @@ import Keys.{
|
|||
shellPrompt,
|
||||
templateResolverInfos,
|
||||
autoStartServer,
|
||||
rootOutputDirectory,
|
||||
serverHost,
|
||||
serverIdleTimeout,
|
||||
serverLog,
|
||||
|
|
@ -51,7 +54,7 @@ import sbt.internal.util.Types.const // , idFun }
|
|||
import sbt.internal.util.complete.DefaultParsers
|
||||
import sbt.internal.server.ServerHandler
|
||||
import sbt.librarymanagement.Configuration
|
||||
import sbt.util.{ Show, Level }
|
||||
import sbt.util.{ ActionCacheStore, Show, Level }
|
||||
import sjsonnew.JsonFormat
|
||||
import scala.annotation.targetName
|
||||
import scala.concurrent.{ Await, TimeoutException }
|
||||
|
|
@ -319,6 +322,8 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
val connectionType: Option[ConnectionType] = get(serverConnectionType)
|
||||
val srvLogLevel: Option[Level.Value] = (ref / serverLog / logLevel).get(structure.data)
|
||||
val hs: Option[Seq[ServerHandler]] = get(ThisBuild / fullServerHandlers)
|
||||
val caches: Option[Seq[ActionCacheStore]] = get(cacheStores)
|
||||
val rod: Option[NioPath] = get(rootOutputDirectory)
|
||||
val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true))
|
||||
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(
|
||||
s.definedCommands,
|
||||
|
|
@ -344,6 +349,8 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
.setCond(colorShellPrompt.key, newPrompt)
|
||||
.setCond(BasicKeys.serverLogLevel, srvLogLevel)
|
||||
.setCond(fullServerHandlers.key, hs)
|
||||
.setCond(cacheStores.key, caches)
|
||||
.setCond(rootOutputDirectory.key, rod)
|
||||
s.copy(
|
||||
attributes = newAttrs,
|
||||
definedCommands = newDefinedCommands
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ package sbt
|
|||
package internal
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.{ Files, Path }
|
||||
|
||||
import org.apache.ivy.core.module.descriptor.{ DefaultArtifact, Artifact => IArtifact }
|
||||
import org.apache.ivy.core.report.DownloadStatus
|
||||
|
|
@ -22,9 +22,16 @@ import sbt.ProjectExtra.*
|
|||
import sbt.ScopeFilter.Make._
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.coursierint.LMCoursier
|
||||
import sbt.internal.inc.{ HashUtil, JarUtils }
|
||||
import sbt.internal.inc.{
|
||||
CompileOutput,
|
||||
FileAnalysisStore,
|
||||
HashUtil,
|
||||
JarUtils,
|
||||
MappedFileConverter
|
||||
}
|
||||
import sbt.internal.librarymanagement._
|
||||
import sbt.internal.remotecache._
|
||||
import sbt.internal.inc.Analysis
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
import sbt.librarymanagement._
|
||||
|
|
@ -34,15 +41,89 @@ import sbt.nio.FileStamp
|
|||
import sbt.nio.Keys.{ inputFileStamps, outputFileStamps }
|
||||
import sbt.std.TaskExtra._
|
||||
import sbt.util.InterfaceUtil.toOption
|
||||
import sbt.util.Logger
|
||||
import sbt.util.{
|
||||
ActionCacheStore,
|
||||
AggregateActionCacheStore,
|
||||
CacheImplicits,
|
||||
DiskActionCacheStore,
|
||||
InMemoryActionCacheStore,
|
||||
Logger
|
||||
}
|
||||
import sjsonnew.JsonFormat
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.compile.{ AnalysisContents, CompileAnalysis, MiniSetup, MiniOptions }
|
||||
|
||||
import scala.annotation.nowarn
|
||||
import scala.collection.mutable
|
||||
|
||||
object RemoteCache {
|
||||
final val cachedCompileClassifier = "cached-compile"
|
||||
final val cachedTestClassifier = "cached-test"
|
||||
final val commitLength = 10
|
||||
|
||||
def cacheStore: ActionCacheStore = Def.cacheStore
|
||||
|
||||
// TODO: cap with caffeine
|
||||
private[sbt] val analysisStore: mutable.Map[HashedVirtualFileRef, CompileAnalysis] =
|
||||
mutable.Map.empty
|
||||
|
||||
// TODO: figure out a good timing to initialize cache
|
||||
// currently this is called twice so metabuild can call compile with a minimal setting
|
||||
private[sbt] def initializeRemoteCache(s: State): Unit =
|
||||
val outDir =
|
||||
s.get(BasicKeys.rootOutputDirectory).getOrElse((s.baseDir / "target" / "out").toPath())
|
||||
Def._outputDirectory = Some(outDir)
|
||||
val caches = s.get(BasicKeys.cacheStores)
|
||||
caches match
|
||||
case Some(xs) => Def._cacheStore = AggregateActionCacheStore(xs)
|
||||
case None =>
|
||||
val tempDiskCache = (s.baseDir / "target" / "bootcache").toPath()
|
||||
Def._cacheStore = DiskActionCacheStore(tempDiskCache)
|
||||
|
||||
private[sbt] def getCachedAnalysis(ref: String): CompileAnalysis =
|
||||
getCachedAnalysis(CacheImplicits.strToHashedVirtualFileRef(ref))
|
||||
private[sbt] def getCachedAnalysis(ref: HashedVirtualFileRef): CompileAnalysis =
|
||||
analysisStore.getOrElseUpdate(
|
||||
ref, {
|
||||
val vfs = cacheStore.getBlobs(ref :: Nil)
|
||||
if vfs.nonEmpty then
|
||||
val outputDirectory = Def.cacheConfiguration.outputDirectory
|
||||
cacheStore.syncBlobs(vfs, outputDirectory).headOption match
|
||||
case Some(file) => FileAnalysisStore.binary(file.toFile()).get.get.getAnalysis
|
||||
case None => Analysis.empty
|
||||
else Analysis.empty
|
||||
}
|
||||
)
|
||||
|
||||
private[sbt] val tempConverter: MappedFileConverter = MappedFileConverter.empty
|
||||
private[sbt] def postAnalysis(analysis: CompileAnalysis): Option[HashedVirtualFileRef] =
|
||||
IO.withTemporaryFile("analysis", ".tmp", true): file =>
|
||||
val output = CompileOutput.empty
|
||||
val option = MiniOptions.of(Array(), Array(), Array())
|
||||
val setup = MiniSetup.of(
|
||||
output,
|
||||
option,
|
||||
"",
|
||||
xsbti.compile.CompileOrder.Mixed,
|
||||
false,
|
||||
Array()
|
||||
)
|
||||
FileAnalysisStore.binary(file).set(AnalysisContents.create(analysis, setup))
|
||||
val vf = tempConverter.toVirtualFile(file.toPath)
|
||||
val refs = cacheStore.putBlobs(vf :: Nil)
|
||||
refs.headOption match
|
||||
case Some(ref) =>
|
||||
analysisStore(ref) = analysis
|
||||
Some(ref)
|
||||
case None => None
|
||||
|
||||
private[sbt] def artifactToStr(art: Artifact): String = {
|
||||
import LibraryManagementCodec._
|
||||
import sjsonnew.support.scalajson.unsafe._
|
||||
val format: JsonFormat[Artifact] = summon[JsonFormat[Artifact]]
|
||||
CompactPrinter(Converter.toJsonUnsafe(art)(format))
|
||||
}
|
||||
|
||||
def gitCommitId: String =
|
||||
scala.sys.process.Process("git rev-parse HEAD").!!.trim.take(commitLength)
|
||||
|
||||
|
|
@ -67,6 +148,17 @@ object RemoteCache {
|
|||
// base is used only to resolve relative paths, which should never happen
|
||||
IvyPaths(base.toString, localCacheDirectory.value.toString)
|
||||
},
|
||||
rootOutputDirectory := {
|
||||
appConfiguration.value.baseDirectory
|
||||
.toPath()
|
||||
.resolve("target")
|
||||
.resolve("out")
|
||||
},
|
||||
cacheStores := {
|
||||
List(
|
||||
DiskActionCacheStore(localCacheDirectory.value.toPath())
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
lazy val projectSettings: Seq[Def.Setting[_]] = (Seq(
|
||||
|
|
@ -117,17 +209,20 @@ object RemoteCache {
|
|||
remoteCachePom / pushRemoteCacheArtifact := true,
|
||||
remoteCachePom := {
|
||||
val s = streams.value
|
||||
val converter = fileConverter.value
|
||||
val config = (remoteCachePom / makePomConfiguration).value
|
||||
val publisher = Keys.publisher.value
|
||||
publisher.makePomFile((pushRemoteCache / ivyModule).value, config, s.log)
|
||||
config.file.get
|
||||
converter.toVirtualFile(config.file.get.toPath)
|
||||
},
|
||||
remoteCachePom / artifactPath := {
|
||||
Defaults.prefixArtifactPathSetting(makePom / artifact, "remote-cache").value
|
||||
},
|
||||
remoteCachePom / makePomConfiguration := {
|
||||
val converter = fileConverter.value
|
||||
val config = makePomConfiguration.value
|
||||
config.withFile((remoteCachePom / artifactPath).value)
|
||||
val out = converter.toPath((remoteCachePom / artifactPath).value)
|
||||
config.withFile(out.toFile())
|
||||
},
|
||||
remoteCachePom / remoteCacheArtifact := {
|
||||
PomRemoteCacheArtifact((makePom / artifact).value, remoteCachePom)
|
||||
|
|
@ -178,17 +273,20 @@ object RemoteCache {
|
|||
inTask(packageCache)(
|
||||
Seq(
|
||||
packageCache.in(Defaults.TaskZero) := {
|
||||
val converter = fileConverter.value
|
||||
val original = packageBin.in(Defaults.TaskZero).value
|
||||
val originalFile = converter.toPath(original)
|
||||
val artp = artifactPath.value
|
||||
val artpFile = converter.toPath(artp)
|
||||
val af = compileAnalysisFile.value
|
||||
IO.copyFile(original, artp)
|
||||
IO.copyFile(originalFile.toFile(), artpFile.toFile())
|
||||
// skip zip manipulation if the artp is a blank file
|
||||
if (af.exists && artp.length() > 0) {
|
||||
JarUtils.includeInJar(artp, Vector(af -> s"META-INF/inc_compile.zip"))
|
||||
if (af.exists && artpFile.toFile().length() > 0) {
|
||||
JarUtils.includeInJar(artpFile.toFile(), Vector(af -> s"META-INF/inc_compile.zip"))
|
||||
}
|
||||
val rf = getResourceFilePaths().value
|
||||
if (rf.exists) {
|
||||
JarUtils.includeInJar(artp, Vector(rf -> s"META-INF/copy-resources.txt"))
|
||||
JarUtils.includeInJar(artpFile.toFile(), Vector(rf -> s"META-INF/copy-resources.txt"))
|
||||
}
|
||||
// val testStream = (test / streams).?.value
|
||||
// testStream foreach { s =>
|
||||
|
|
@ -197,7 +295,7 @@ object RemoteCache {
|
|||
// JarUtils.includeInJar(artp, Vector(sf -> s"META-INF/succeeded_tests"))
|
||||
// }
|
||||
// }
|
||||
artp
|
||||
converter.toVirtualFile(artpFile)
|
||||
},
|
||||
pushRemoteCacheArtifact := true,
|
||||
remoteCacheArtifact := cacheArtifactTask.value,
|
||||
|
|
@ -238,12 +336,17 @@ object RemoteCache {
|
|||
combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc)
|
||||
},
|
||||
pushRemoteCacheConfiguration := {
|
||||
val converter = fileConverter.value
|
||||
val artifacts = (pushRemoteCacheConfiguration / packagedArtifacts).value.toVector.map {
|
||||
case (a, vf) =>
|
||||
a -> converter.toPath(vf).toFile
|
||||
}
|
||||
Classpaths.publishConfig(
|
||||
(pushRemoteCacheConfiguration / publishMavenStyle).value,
|
||||
Classpaths.deliverPattern(crossTarget.value),
|
||||
if (isSnapshot.value) "integration" else "release",
|
||||
ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector,
|
||||
(pushRemoteCacheConfiguration / packagedArtifacts).value.toVector,
|
||||
artifacts,
|
||||
(pushRemoteCacheConfiguration / checksums).value.toVector,
|
||||
Classpaths.getPublishTo(pushRemoteCacheTo.value).name,
|
||||
ivyLoggingLevel.value,
|
||||
|
|
@ -455,12 +558,12 @@ object RemoteCache {
|
|||
// }
|
||||
}
|
||||
|
||||
private def defaultArtifactTasks: Seq[TaskKey[File]] =
|
||||
private def defaultArtifactTasks: Seq[TaskKey[HashedVirtualFileRef]] =
|
||||
Seq(Compile / packageCache, Test / packageCache)
|
||||
|
||||
private def enabledOnly[A](
|
||||
key: SettingKey[A],
|
||||
pkgTasks: Seq[TaskKey[File]]
|
||||
pkgTasks: Seq[TaskKey[HashedVirtualFileRef]]
|
||||
): Def.Initialize[Seq[A]] =
|
||||
(Classpaths.forallIn(key, pkgTasks) zipWith
|
||||
Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) =>
|
||||
|
|
|
|||
|
|
@ -198,6 +198,13 @@ object ScriptedPlugin extends AutoPlugin {
|
|||
}
|
||||
|
||||
private[this] def getJars(config: Configuration): Initialize[Task[PathFinder]] = Def.task {
|
||||
PathFinder(Classpaths.managedJars(config, classpathTypes.value, Keys.update.value).map(_.data))
|
||||
val converter = Keys.fileConverter.value
|
||||
PathFinder(
|
||||
Classpaths
|
||||
.managedJars(config, classpathTypes.value, Keys.update.value, converter)
|
||||
.map(_.data)
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile())
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,34 +9,45 @@ package sbt
|
|||
package internal
|
||||
|
||||
import java.io.File
|
||||
import java.net.{ MalformedURLException, URL }
|
||||
import java.net.{ MalformedURLException, URI, URL }
|
||||
|
||||
import sbt.internal.librarymanagement.mavenint.SbtPomExtraProperties
|
||||
import sbt.librarymanagement.ModuleID
|
||||
|
||||
import sbt.internal.util.Attributed
|
||||
import sbt.util.Logger
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
private[sbt] object APIMappings {
|
||||
def extract(cp: Seq[Attributed[File]], log: Logger): Seq[(File, URL)] =
|
||||
def extract(
|
||||
cp: Seq[Attributed[HashedVirtualFileRef]],
|
||||
log: Logger
|
||||
): Seq[(HashedVirtualFileRef, URL)] =
|
||||
cp.flatMap(entry => extractFromEntry(entry, log))
|
||||
|
||||
def extractFromEntry(entry: Attributed[File], log: Logger): Option[(File, URL)] =
|
||||
entry.get(Keys.entryApiURL) match {
|
||||
case Some(u) => Some((entry.data, u))
|
||||
def extractFromEntry(
|
||||
entry: Attributed[HashedVirtualFileRef],
|
||||
log: Logger
|
||||
): Option[(HashedVirtualFileRef, URL)] =
|
||||
entry.get(Keys.entryApiURL) match
|
||||
case Some(u) => Some((entry.data, URI(u).toURL))
|
||||
case None =>
|
||||
entry.get(Keys.moduleID.key).flatMap { mid =>
|
||||
entry.get(Keys.moduleIDStr).flatMap { str =>
|
||||
val mid = Classpaths.moduleIdJsonKeyFormat.read(str)
|
||||
extractFromID(entry.data, mid, log)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def extractFromID(entry: File, mid: ModuleID, log: Logger): Option[(File, URL)] =
|
||||
for {
|
||||
private[this] def extractFromID(
|
||||
entry: HashedVirtualFileRef,
|
||||
mid: ModuleID,
|
||||
log: Logger
|
||||
): Option[(HashedVirtualFileRef, URL)] =
|
||||
for
|
||||
urlString <- mid.extraAttributes.get(SbtPomExtraProperties.POM_API_KEY)
|
||||
u <- parseURL(urlString, entry, log)
|
||||
} yield (entry, u)
|
||||
yield (entry, u)
|
||||
|
||||
private[this] def parseURL(s: String, forEntry: File, log: Logger): Option[URL] =
|
||||
private[this] def parseURL(s: String, forEntry: HashedVirtualFileRef, log: Logger): Option[URL] =
|
||||
try Some(new URL(s))
|
||||
catch {
|
||||
case e: MalformedURLException =>
|
||||
|
|
@ -44,8 +55,8 @@ private[sbt] object APIMappings {
|
|||
None
|
||||
}
|
||||
|
||||
def store[T](attr: Attributed[T], entryAPI: Option[URL]): Attributed[T] = entryAPI match {
|
||||
case None => attr
|
||||
case Some(u) => attr.put(Keys.entryApiURL, u)
|
||||
}
|
||||
def store[A](attr: Attributed[A], entryAPI: Option[URL]): Attributed[A] =
|
||||
entryAPI match
|
||||
case None => attr
|
||||
case Some(u) => attr.put(Keys.entryApiURL, u.toString)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import Def.Setting
|
|||
import sbt.io.Hash
|
||||
import sbt.internal.util.Attributed
|
||||
import sbt.internal.inc.ReflectUtilities
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
trait BuildDef {
|
||||
def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects
|
||||
|
|
@ -73,6 +74,9 @@ private[sbt] object BuildDef {
|
|||
)
|
||||
|
||||
def analyzed(in: Seq[Attributed[_]]): Seq[xsbti.compile.CompileAnalysis] =
|
||||
in.flatMap { _.metadata.get(Keys.analysis) }
|
||||
|
||||
in.flatMap: a =>
|
||||
a.metadata
|
||||
.get(Keys.analysis)
|
||||
.map: str =>
|
||||
RemoteCache.getCachedAnalysis(str)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attribute
|
|||
import sbt.internal.util.Attributed.data
|
||||
import sbt.util.Logger
|
||||
import scala.annotation.nowarn
|
||||
import xsbti.FileConverter
|
||||
|
||||
final class BuildStructure(
|
||||
val units: Map[URI, LoadedBuildUnit],
|
||||
|
|
@ -109,9 +110,10 @@ final class LoadedBuildUnit(
|
|||
* It includes build definition and plugin classes and classes for .sbt file statements and expressions.
|
||||
*/
|
||||
def classpath: Seq[Path] =
|
||||
val converter = unit.converter
|
||||
unit.definitions.target.map(
|
||||
_.toPath()
|
||||
) ++ unit.plugins.classpath.map(_.toPath()) ++ unit.definitions.dslDefinitions.classpath
|
||||
) ++ unit.plugins.classpath.map(converter.toPath) ++ unit.definitions.dslDefinitions.classpath
|
||||
|
||||
/**
|
||||
* The class loader to use for this build unit's publicly visible code.
|
||||
|
|
@ -239,7 +241,7 @@ final class LoadedPlugins(
|
|||
val loader: ClassLoader,
|
||||
val detected: DetectedPlugins
|
||||
) {
|
||||
def fullClasspath: Seq[Attributed[File]] = pluginData.classpath
|
||||
def fullClasspath: Def.Classpath = pluginData.classpath
|
||||
def classpath = data(fullClasspath)
|
||||
}
|
||||
|
||||
|
|
@ -253,7 +255,8 @@ final class BuildUnit(
|
|||
val uri: URI,
|
||||
val localBase: File,
|
||||
val definitions: LoadedDefinitions,
|
||||
val plugins: LoadedPlugins
|
||||
val plugins: LoadedPlugins,
|
||||
val converter: FileConverter,
|
||||
) {
|
||||
override def toString =
|
||||
if (uri.getScheme == "file") localBase.toString
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import sbt.nio.FileStamp
|
|||
import sbt.nio.FileStamp.LastModified
|
||||
import sbt.nio.Keys._
|
||||
import sbt.util.Logger
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
private[sbt] object ClassLoaders {
|
||||
private implicit class SeqFileOps(val files: Seq[File]) extends AnyVal {
|
||||
|
|
@ -36,14 +37,21 @@ private[sbt] object ClassLoaders {
|
|||
*/
|
||||
private[sbt] def testTask: Def.Initialize[Task[ClassLoader]] = Def.task {
|
||||
val si = scalaInstance.value
|
||||
val cp = fullClasspath.value.map(_.data)
|
||||
val converter = fileConverter.value
|
||||
val cp = fullClasspath.value
|
||||
.map(_.data)
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile)
|
||||
val dependencyStamps = modifiedTimes((dependencyClasspathFiles / outputFileStamps).value).toMap
|
||||
def getLm(f: File): Long = dependencyStamps.getOrElse(f, IO.getModifiedTimeOrZero(f))
|
||||
val rawCP = cp.map(f => f -> getLm(f))
|
||||
val fullCP =
|
||||
if (si.isManagedVersion) rawCP
|
||||
if si.isManagedVersion then rawCP
|
||||
else si.libraryJars.map(j => j -> IO.getModifiedTimeOrZero(j)).toSeq ++ rawCP
|
||||
val exclude = dependencyJars(exportedProducts).value.toSet ++ si.libraryJars
|
||||
val exclude: Set[File] = dependencyJars(exportedProducts).value
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile)
|
||||
.toSet ++ si.libraryJars
|
||||
val logger = state.value.globalLogging.full
|
||||
val close = closeClassLoaders.value
|
||||
val allowZombies = allowZombieClassLoaders.value
|
||||
|
|
@ -51,7 +59,11 @@ private[sbt] object ClassLoaders {
|
|||
strategy = classLoaderLayeringStrategy.value,
|
||||
si = si,
|
||||
fullCP = fullCP,
|
||||
allDependenciesSet = dependencyJars(dependencyClasspath).value.filterNot(exclude).toSet,
|
||||
allDependenciesSet = dependencyJars(dependencyClasspath).value
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile)
|
||||
.filterNot(exclude)
|
||||
.toSet,
|
||||
cache = extendedClassLoaderCache.value,
|
||||
resources = ClasspathUtil.createClasspathResources(fullCP.map(_._1.toPath), si),
|
||||
tmp = IO.createUniqueDirectory(taskTemporaryDirectory.value),
|
||||
|
|
@ -71,6 +83,7 @@ private[sbt] object ClassLoaders {
|
|||
val opts = forkOptions.value
|
||||
new ForkRun(opts)
|
||||
else {
|
||||
val converter = fileConverter.value
|
||||
val resolvedScope = resolvedScoped.value.scope
|
||||
val instance = scalaInstance.value
|
||||
val s = streams.value
|
||||
|
|
@ -91,20 +104,25 @@ private[sbt] object ClassLoaders {
|
|||
)
|
||||
s.log.warn(s"$showJavaOptions will be ignored, $showFork is set to false")
|
||||
|
||||
val exclude = dependencyJars(exportedProducts).value.toSet ++ instance.libraryJars
|
||||
val allDeps = dependencyJars(dependencyClasspath).value.filterNot(exclude)
|
||||
val exclude = dependencyJars(exportedProducts).value
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile)
|
||||
.toSet ++ instance.libraryJars
|
||||
val allDeps = dependencyJars(dependencyClasspath).value
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile)
|
||||
.filterNot(exclude)
|
||||
val logger = state.value.globalLogging.full
|
||||
val allowZombies = allowZombieClassLoaders.value
|
||||
val close = closeClassLoaders.value
|
||||
val newLoader =
|
||||
(classpath: Seq[File]) => {
|
||||
val mappings = classpath.map(f => f.getName -> f).toMap
|
||||
val cp = classpath.map(_.toPath)
|
||||
(cp: Seq[Path]) => {
|
||||
val mappings = cp.map(_.toFile()).map(f => f.getName -> f).toMap
|
||||
val transformedDependencies = allDeps.map(f => mappings.getOrElse(f.getName, f))
|
||||
buildLayers(
|
||||
strategy = classLoaderLayeringStrategy.value: @sbtUnchecked,
|
||||
si = instance,
|
||||
fullCP = classpath.map(f => f -> IO.getModifiedTimeOrZero(f)),
|
||||
fullCP = cp.map(_.toFile()).map(f => f -> IO.getModifiedTimeOrZero(f)),
|
||||
allDependenciesSet = transformedDependencies.toSet,
|
||||
cache = extendedClassLoaderCache.value: @sbtUnchecked,
|
||||
resources = ClasspathUtil.createClasspathResources(cp, instance),
|
||||
|
|
@ -218,8 +236,9 @@ private[sbt] object ClassLoaders {
|
|||
}
|
||||
|
||||
private def dependencyJars(
|
||||
key: sbt.TaskKey[Seq[Attributed[File]]]
|
||||
): Def.Initialize[Task[Seq[File]]] = Def.task(data(key.value).filter(_.getName.endsWith(".jar")))
|
||||
key: sbt.TaskKey[Seq[Attributed[HashedVirtualFileRef]]]
|
||||
): Def.Initialize[Task[Seq[HashedVirtualFileRef]]] =
|
||||
Def.task(data(key.value).filter(_.id.endsWith(".jar")))
|
||||
|
||||
private[this] def modifiedTimes(stamps: Seq[(Path, FileStamp)]): Seq[(File, Long)] = stamps.map {
|
||||
case (p, LastModified(lm)) => p.toFile -> lm
|
||||
|
|
|
|||
|
|
@ -23,30 +23,28 @@ import sbt.librarymanagement.Configurations.names
|
|||
import sbt.std.TaskExtra._
|
||||
import sbt.util._
|
||||
import scala.collection.JavaConverters._
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
|
||||
import xsbti.compile.CompileAnalysis
|
||||
|
||||
private[sbt] object ClasspathImpl {
|
||||
|
||||
// Since we can't predict the path for pickleProduct,
|
||||
// we can't reduce the track level.
|
||||
def exportedPicklesTask: Initialize[Task[VirtualClasspath]] =
|
||||
def exportedPicklesTask: Initialize[Task[Classpath]] =
|
||||
Def.task {
|
||||
// conditional task: do not refactor
|
||||
if (exportPipelining.value) {
|
||||
if exportPipelining.value then
|
||||
val module = projectID.value
|
||||
val config = configuration.value
|
||||
val products = pickleProducts.value
|
||||
val analysis = compileEarly.value
|
||||
val xs = products map { _ -> analysis }
|
||||
for { (f, analysis) <- xs } yield APIMappings
|
||||
for (f, analysis) <- xs
|
||||
yield APIMappings
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
.put(moduleID.key, module)
|
||||
.put(configuration.key, config)
|
||||
} else {
|
||||
val c = fileConverter.value
|
||||
val ps = exportedProducts.value
|
||||
ps.map(attr => attr.map(x => c.toVirtualFile(x.toPath)))
|
||||
}
|
||||
.put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module))
|
||||
.put(Keys.configurationStr, config.name)
|
||||
else exportedProducts.value
|
||||
}
|
||||
|
||||
def trackedExportedProducts(track: TrackLevel): Initialize[Task[Classpath]] =
|
||||
|
|
@ -55,11 +53,12 @@ private[sbt] object ClasspathImpl {
|
|||
val art = (packageBin / artifact).value
|
||||
val module = projectID.value
|
||||
val config = configuration.value
|
||||
for { (f, analysis) <- trackedExportedProductsImplTask(track).value } yield APIMappings
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
.put(artifact.key, art)
|
||||
.put(moduleID.key, module)
|
||||
.put(configuration.key, config)
|
||||
for (f, analysis) <- trackedExportedProductsImplTask(track).value
|
||||
yield APIMappings
|
||||
.store(analyzed[HashedVirtualFileRef](f, analysis), apiURL.value)
|
||||
.put(Keys.artifactStr, RemoteCache.artifactToStr(art))
|
||||
.put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module))
|
||||
.put(Keys.configurationStr, config.name)
|
||||
}
|
||||
|
||||
def trackedExportedJarProducts(track: TrackLevel): Initialize[Task[Classpath]] =
|
||||
|
|
@ -68,16 +67,18 @@ private[sbt] object ClasspathImpl {
|
|||
val art = (packageBin / artifact).value
|
||||
val module = projectID.value
|
||||
val config = configuration.value
|
||||
for { (f, analysis) <- trackedJarProductsImplTask(track).value } yield APIMappings
|
||||
val converter = fileConverter.value
|
||||
for (f, analysis) <- trackedJarProductsImplTask(track).value
|
||||
yield APIMappings
|
||||
.store(analyzed(f, analysis), apiURL.value)
|
||||
.put(artifact.key, art)
|
||||
.put(moduleID.key, module)
|
||||
.put(configuration.key, config)
|
||||
.put(Keys.artifactStr, RemoteCache.artifactToStr(art))
|
||||
.put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module))
|
||||
.put(Keys.configurationStr, config.name)
|
||||
}
|
||||
|
||||
private[this] def trackedExportedProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
Def.taskIf {
|
||||
if {
|
||||
val _ = (packageBin / dynamicDependency).value
|
||||
|
|
@ -88,7 +89,7 @@ private[sbt] object ClasspathImpl {
|
|||
|
||||
private[this] def trackedNonJarProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
(Def
|
||||
.task {
|
||||
val dirs = productDirectories.value
|
||||
|
|
@ -98,41 +99,55 @@ private[sbt] object ClasspathImpl {
|
|||
.flatMapTask {
|
||||
case (TrackLevel.TrackAlways, _, _) =>
|
||||
Def.task {
|
||||
products.value map { (_, compile.value) }
|
||||
val converter = fileConverter.value
|
||||
val a = compile.value
|
||||
products.value
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map { (_, a) }
|
||||
}
|
||||
case (TrackLevel.TrackIfMissing, dirs, view)
|
||||
if view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).isEmpty =>
|
||||
Def.task {
|
||||
products.value map { (_, compile.value) }
|
||||
val converter = fileConverter.value
|
||||
val a = compile.value
|
||||
products.value
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map { (_, a) }
|
||||
}
|
||||
case (_, dirs, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty)
|
||||
dirs.map(_ -> analysis)
|
||||
dirs
|
||||
.map { x => converter.toVirtualFile(x.toPath()) }
|
||||
.map(_ -> analysis)
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def trackedJarProductsImplTask(
|
||||
track: TrackLevel
|
||||
): Initialize[Task[Seq[(File, CompileAnalysis)]]] =
|
||||
): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] =
|
||||
(Def
|
||||
.task {
|
||||
val jar = (packageBin / artifactPath).value
|
||||
(TrackLevel.intersection(track, exportToInternal.value), jar)
|
||||
val converter = fileConverter.value
|
||||
val vf = (packageBin / artifactPath).value
|
||||
val jar = converter.toPath(vf)
|
||||
(TrackLevel.intersection(track, exportToInternal.value), vf, jar)
|
||||
})
|
||||
.flatMapTask {
|
||||
case (TrackLevel.TrackAlways, _) =>
|
||||
case (TrackLevel.TrackAlways, _, _) =>
|
||||
Def.task {
|
||||
Seq((packageBin.value, compile.value))
|
||||
}
|
||||
case (TrackLevel.TrackIfMissing, jar) if !jar.exists =>
|
||||
case (TrackLevel.TrackIfMissing, _, jar) if !jar.toFile().exists =>
|
||||
Def.task {
|
||||
Seq((packageBin.value, compile.value))
|
||||
}
|
||||
case (_, jar) =>
|
||||
case (_, vf, _) =>
|
||||
Def.task {
|
||||
val converter = fileConverter.value
|
||||
val analysisOpt = previousCompile.value.analysis.toOption
|
||||
Seq(jar) map { x =>
|
||||
Seq(vf).map(converter.toPath).map(converter.toVirtualFile).map { x =>
|
||||
(
|
||||
x,
|
||||
if (analysisOpt.isDefined) analysisOpt.get
|
||||
|
|
@ -186,7 +201,7 @@ private[sbt] object ClasspathImpl {
|
|||
)
|
||||
}
|
||||
|
||||
def internalDependencyPicklePathTask: Initialize[Task[VirtualClasspath]] = {
|
||||
def internalDependencyPicklePathTask: Initialize[Task[Classpath]] = {
|
||||
def implTask(
|
||||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
|
|
@ -195,8 +210,8 @@ private[sbt] object ClasspathImpl {
|
|||
deps: BuildDependencies,
|
||||
track: TrackLevel,
|
||||
log: Logger
|
||||
): Initialize[Task[VirtualClasspath]] =
|
||||
Def.value[Task[VirtualClasspath]] {
|
||||
): Initialize[Task[Classpath]] =
|
||||
Def.value[Task[Classpath]] {
|
||||
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
|
||||
exportedPickles,
|
||||
exportedPickles,
|
||||
|
|
@ -243,11 +258,20 @@ private[sbt] object ClasspathImpl {
|
|||
log: Logger
|
||||
): Initialize[Task[Classpath]] =
|
||||
Def.value[Task[Classpath]] {
|
||||
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
|
||||
interDependencies[Attributed[HashedVirtualFileRef]](
|
||||
projectRef,
|
||||
deps,
|
||||
conf,
|
||||
self,
|
||||
data,
|
||||
track,
|
||||
false,
|
||||
log,
|
||||
)(
|
||||
exportedProductJarsNoTracking,
|
||||
exportedProductJarsIfMissing,
|
||||
exportedProductJars
|
||||
)
|
||||
): Task[Classpath]
|
||||
}
|
||||
|
||||
def unmanagedDependenciesTask: Initialize[Task[Classpath]] =
|
||||
|
|
@ -328,7 +352,12 @@ private[sbt] object ClasspathImpl {
|
|||
}
|
||||
|
||||
def analyzed[A](data: A, analysis: CompileAnalysis) =
|
||||
Attributed.blank(data).put(Keys.analysis, analysis)
|
||||
RemoteCache.postAnalysis(analysis) match
|
||||
case Some(ref) =>
|
||||
Attributed
|
||||
.blank(data)
|
||||
.put(Keys.analysis, CacheImplicits.hashedVirtualFileRefToStr(ref))
|
||||
case None => Attributed.blank(data)
|
||||
|
||||
def interSort(
|
||||
projectRef: ProjectRef,
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
package sbt.internal
|
||||
|
||||
import scala.reflect.ClassTag
|
||||
import sjsonnew.*
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
// CompileOption has the list of sources etc
|
||||
case class CompileInputs2(
|
||||
classpath: Vector[HashedVirtualFileRef],
|
||||
sources: Vector[HashedVirtualFileRef],
|
||||
scalacOptions: Vector[String],
|
||||
javacOptions: Vector[String],
|
||||
)
|
||||
|
||||
object CompileInputs2:
|
||||
import sbt.util.CacheImplicits.given
|
||||
|
||||
given IsoLList.Aux[
|
||||
CompileInputs2,
|
||||
Vector[HashedVirtualFileRef] :*: Vector[HashedVirtualFileRef] :*: Vector[String] :*:
|
||||
Vector[String] :*: LNil
|
||||
] =
|
||||
LList.iso(
|
||||
{ (v: CompileInputs2) =>
|
||||
("classpath", v.classpath) :*:
|
||||
("sources", v.sources) :*:
|
||||
("scalacOptions", v.scalacOptions) :*:
|
||||
("javacOptions", v.javacOptions) :*:
|
||||
LNil
|
||||
},
|
||||
{
|
||||
(in: Vector[HashedVirtualFileRef] :*: Vector[HashedVirtualFileRef] :*: Vector[String] :*:
|
||||
Vector[String] :*: LNil) =>
|
||||
CompileInputs2(in.head, in.tail.head, in.tail.tail.head, in.tail.tail.tail.head)
|
||||
}
|
||||
)
|
||||
given JsonFormat[CompileInputs2] = summon
|
||||
end CompileInputs2
|
||||
|
|
@ -30,6 +30,8 @@ import scala.concurrent.duration._
|
|||
import scala.util.Try
|
||||
import sbt.util.LoggerContext
|
||||
import java.util.concurrent.TimeoutException
|
||||
import xsbti.FileConverter
|
||||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
/**
|
||||
* Interface between sbt and a thing running in the background.
|
||||
|
|
@ -232,18 +234,21 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
|
|||
products: Classpath,
|
||||
full: Classpath,
|
||||
workingDirectory: File,
|
||||
hashFileContents: Boolean
|
||||
hashFileContents: Boolean,
|
||||
converter: FileConverter,
|
||||
): Classpath = {
|
||||
def syncTo(dir: File)(source0: Attributed[File]): Attributed[File] = {
|
||||
val source = source0.data
|
||||
def syncTo(
|
||||
dir: File
|
||||
)(source0: Attributed[HashedVirtualFileRef]): Attributed[HashedVirtualFileRef] = {
|
||||
val source1 = source0.data
|
||||
val source = converter.toPath(source1).toFile()
|
||||
val hash8 = Hash.toHex(Hash(source.toString)).take(8)
|
||||
val id: File => String = if (hashFileContents) hash else lastModified
|
||||
val dest = dir / hash8 / id(source) / source.getName
|
||||
if (!dest.exists) {
|
||||
if !dest.exists then
|
||||
if (source.isDirectory) IO.copyDirectory(source, dest)
|
||||
else IO.copyFile(source, dest)
|
||||
}
|
||||
Attributed.blank(dest)
|
||||
Attributed.blank(converter.toVirtualFile(dest.toPath))
|
||||
}
|
||||
val xs = (products.toVector map { syncTo(workingDirectory / "target") }) ++
|
||||
((full diff products) map { syncTo(serviceTempDir / "target") })
|
||||
|
|
@ -298,9 +303,10 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
|
|||
override def copyClasspath(
|
||||
products: Classpath,
|
||||
full: Classpath,
|
||||
workingDirectory: File
|
||||
workingDirectory: File,
|
||||
converter: FileConverter,
|
||||
): Classpath =
|
||||
copyClasspath(products, full, workingDirectory, hashFileContents = true)
|
||||
copyClasspath(products, full, workingDirectory, hashFileContents = true, converter)
|
||||
}
|
||||
|
||||
private[sbt] object BackgroundThreadPool {
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ object GlobalPlugin {
|
|||
)
|
||||
private[this] def injectInternalClasspath(
|
||||
config: Configuration,
|
||||
cp: Seq[Attributed[File]]
|
||||
cp: Def.Classpath,
|
||||
): Setting[_] =
|
||||
(config / internalDependencyClasspath) ~= { prev =>
|
||||
(prev ++ cp).distinct
|
||||
|
|
|
|||
|
|
@ -51,7 +51,11 @@ object IvyConsole {
|
|||
val depSettings: Seq[Setting[_]] = Seq(
|
||||
libraryDependencies ++= managed.reverse,
|
||||
resolvers ++= repos.reverse.toVector,
|
||||
Compile / unmanagedJars ++= Attributed blankSeq unmanaged.reverse,
|
||||
Compile / unmanagedJars ++= {
|
||||
val converter = fileConverter.value
|
||||
val u = unmanaged.reverse.map(_.toPath).map(converter.toVirtualFile)
|
||||
Attributed.blankSeq(u)
|
||||
},
|
||||
Global / logLevel := Level.Warn,
|
||||
Global / showSuccess := false
|
||||
)
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyDependencyResoluti
|
|||
import sbt.librarymanagement.{ Configuration, Configurations, Resolver }
|
||||
import sbt.nio.Settings
|
||||
import sbt.util.{ Logger, Show }
|
||||
import xsbti.VirtualFile
|
||||
import xsbti.{ HashedVirtualFileRef, VirtualFile }
|
||||
import xsbti.compile.{ ClasspathOptionsUtil, Compilers }
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
|
|
@ -71,15 +71,20 @@ private[sbt] object Load {
|
|||
val launcher = scalaProvider.launcher
|
||||
val stagingDirectory = getStagingDirectory(state, globalBase).getCanonicalFile
|
||||
val javaHome = Paths.get(sys.props("java.home"))
|
||||
val out = baseDirectory.toPath.resolve("target").resolve("out")
|
||||
val rootPaths = Map(
|
||||
"OUT" -> out,
|
||||
"BASE" -> baseDirectory.toPath,
|
||||
"SBT_BOOT" -> launcher.bootDirectory.toPath,
|
||||
"IVY_HOME" -> launcher.ivyHome.toPath,
|
||||
"JAVA_HOME" -> javaHome,
|
||||
)
|
||||
val loader = getClass.getClassLoader
|
||||
val classpath =
|
||||
Attributed.blankSeq(provider.mainClasspath.toIndexedSeq ++ scalaProvider.jars.toIndexedSeq)
|
||||
val converter = MappedFileConverter(rootPaths, false)
|
||||
val cp0 = provider.mainClasspath.toIndexedSeq ++ scalaProvider.jars.toIndexedSeq
|
||||
val classpath = Attributed.blankSeq(
|
||||
cp0.map(_.toPath).map(p => converter.toVirtualFile(p): HashedVirtualFileRef)
|
||||
)
|
||||
val ivyConfiguration =
|
||||
InlineIvyConfiguration()
|
||||
.withPaths(
|
||||
|
|
@ -127,7 +132,7 @@ private[sbt] object Load {
|
|||
inject,
|
||||
None,
|
||||
Nil,
|
||||
converter = MappedFileConverter(rootPaths, false),
|
||||
converter = converter,
|
||||
log
|
||||
)
|
||||
}
|
||||
|
|
@ -172,10 +177,11 @@ private[sbt] object Load {
|
|||
def buildGlobalSettings(
|
||||
base: File,
|
||||
files: Seq[VirtualFile],
|
||||
config: LoadBuildConfiguration
|
||||
config: LoadBuildConfiguration,
|
||||
): ClassLoader => Seq[Setting[_]] = {
|
||||
val converter = config.converter
|
||||
val eval = mkEval(
|
||||
classpath = data(config.globalPluginClasspath).map(_.toPath()),
|
||||
classpath = data(config.globalPluginClasspath).map(converter.toPath),
|
||||
base = base,
|
||||
options = defaultEvalOptions,
|
||||
)
|
||||
|
|
@ -443,9 +449,10 @@ private[sbt] object Load {
|
|||
}
|
||||
|
||||
def mkEval(unit: BuildUnit): Eval = {
|
||||
val converter = unit.converter
|
||||
val defs = unit.definitions
|
||||
mkEval(
|
||||
(defs.target ++ unit.plugins.classpath).map(_.toPath()),
|
||||
(defs.target).map(_.toPath) ++ unit.plugins.classpath.map(converter.toPath),
|
||||
defs.base,
|
||||
unit.plugins.pluginData.scalacOptions,
|
||||
)
|
||||
|
|
@ -541,7 +548,7 @@ private[sbt] object Load {
|
|||
}
|
||||
|
||||
def addOverrides(unit: BuildUnit, loaders: BuildLoader): BuildLoader =
|
||||
loaders updatePluginManagement PluginManagement.extractOverrides(unit.plugins.fullClasspath)
|
||||
loaders.updatePluginManagement(PluginManagement.extractOverrides(unit.plugins.fullClasspath))
|
||||
|
||||
def addResolvers(unit: BuildUnit, isRoot: Boolean, loaders: BuildLoader): BuildLoader =
|
||||
unit.definitions.builds.flatMap(_.buildLoaders).toList match {
|
||||
|
|
@ -740,6 +747,7 @@ private[sbt] object Load {
|
|||
val buildLevelExtraProjects = plugs.detected.autoPlugins flatMap { d =>
|
||||
d.value.extraProjects map { _.setProjectOrigin(ProjectOrigin.ExtraProject) }
|
||||
}
|
||||
val converter = config.converter
|
||||
|
||||
// NOTE - because we create an eval here, we need a clean-eval later for this URI.
|
||||
lazy val eval = timed("Load.loadUnit: mkEval", log) {
|
||||
|
|
@ -752,7 +760,7 @@ private[sbt] object Load {
|
|||
// new BuildServerEvalReporter(buildTarget, new ConsoleReporter(settings))
|
||||
// }
|
||||
mkEval(
|
||||
classpath = plugs.classpath.map(_.toPath()),
|
||||
classpath = plugs.classpath.map(converter.toPath),
|
||||
defDir,
|
||||
plugs.pluginData.scalacOptions,
|
||||
mkReporter,
|
||||
|
|
@ -834,7 +842,7 @@ private[sbt] object Load {
|
|||
plugs.detected.builds.names,
|
||||
valDefinitions
|
||||
)
|
||||
new BuildUnit(uri, normBase, loadedDefs, plugs)
|
||||
new BuildUnit(uri, normBase, loadedDefs, plugs, converter)
|
||||
}
|
||||
|
||||
private[this] def autoID(
|
||||
|
|
@ -1266,11 +1274,10 @@ private[sbt] object Load {
|
|||
)
|
||||
}
|
||||
|
||||
def globalPluginClasspath(globalPlugin: Option[GlobalPlugin]): Seq[Attributed[File]] =
|
||||
globalPlugin match {
|
||||
def globalPluginClasspath(globalPlugin: Option[GlobalPlugin]): Def.Classpath =
|
||||
globalPlugin match
|
||||
case Some(cp) => cp.data.fullClasspath
|
||||
case None => Nil
|
||||
}
|
||||
|
||||
/** These are the settings defined when loading a project "meta" build. */
|
||||
@nowarn
|
||||
|
|
@ -1287,6 +1294,7 @@ private[sbt] object Load {
|
|||
val managedSrcDirs = (Configurations.Compile / managedSourceDirectories).value
|
||||
val managedSrcs = (Configurations.Compile / managedSources).value
|
||||
val buildTarget = (Configurations.Compile / bspTargetIdentifier).value
|
||||
val converter = fileConverter.value
|
||||
PluginData(
|
||||
removeEntries(cp, prod),
|
||||
prod,
|
||||
|
|
@ -1297,7 +1305,8 @@ private[sbt] object Load {
|
|||
unmanagedSrcs,
|
||||
managedSrcDirs,
|
||||
managedSrcs,
|
||||
Some(buildTarget)
|
||||
Some(buildTarget),
|
||||
converter,
|
||||
)
|
||||
},
|
||||
scalacOptions += "-Wconf:cat=unused-nowarn:s",
|
||||
|
|
@ -1306,14 +1315,13 @@ private[sbt] object Load {
|
|||
)
|
||||
|
||||
private[this] def removeEntries(
|
||||
cp: Seq[Attributed[File]],
|
||||
remove: Seq[Attributed[File]]
|
||||
): Seq[Attributed[File]] = {
|
||||
cp: Def.Classpath,
|
||||
remove: Def.Classpath
|
||||
): Def.Classpath =
|
||||
val files = data(remove).toSet
|
||||
cp filter { f =>
|
||||
!files.contains(f.data)
|
||||
}
|
||||
}
|
||||
|
||||
def enableSbtPlugin(config: LoadBuildConfiguration): LoadBuildConfiguration =
|
||||
config.copy(
|
||||
|
|
@ -1353,7 +1361,19 @@ private[sbt] object Load {
|
|||
loadPluginDefinition(
|
||||
dir,
|
||||
config,
|
||||
PluginData(config.globalPluginClasspath, Nil, None, None, Nil, Nil, Nil, Nil, Nil, None)
|
||||
PluginData(
|
||||
config.globalPluginClasspath,
|
||||
Nil,
|
||||
None,
|
||||
None,
|
||||
Nil,
|
||||
Nil,
|
||||
Nil,
|
||||
Nil,
|
||||
Nil,
|
||||
None,
|
||||
config.converter,
|
||||
)
|
||||
)
|
||||
|
||||
def buildPlugins(dir: File, s: State, config: LoadBuildConfiguration): LoadedPlugins =
|
||||
|
|
@ -1392,11 +1412,11 @@ private[sbt] object Load {
|
|||
*/
|
||||
def buildPluginClasspath(
|
||||
config: LoadBuildConfiguration,
|
||||
depcp: Seq[Attributed[File]]
|
||||
): Def.Classpath = {
|
||||
if (depcp.isEmpty) config.classpath
|
||||
depcp: Def.Classpath,
|
||||
): Def.Classpath =
|
||||
if depcp.isEmpty
|
||||
then config.classpath
|
||||
else (depcp ++ config.classpath).distinct
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a classloader with a hierarchical structure, where the parent
|
||||
|
|
@ -1412,22 +1432,27 @@ private[sbt] object Load {
|
|||
config: LoadBuildConfiguration,
|
||||
dependencyClasspath: Def.Classpath,
|
||||
definitionClasspath: Def.Classpath
|
||||
): ClassLoader = {
|
||||
): ClassLoader =
|
||||
val manager = config.pluginManagement
|
||||
val parentLoader: ClassLoader = {
|
||||
if (dependencyClasspath.isEmpty) manager.initialLoader
|
||||
else {
|
||||
val converter = config.converter
|
||||
val parentLoader: ClassLoader =
|
||||
if dependencyClasspath.isEmpty then manager.initialLoader
|
||||
else
|
||||
// Load only the dependency classpath for the common plugin classloader
|
||||
val loader = manager.loader
|
||||
loader.add(sbt.io.Path.toURLs(data(dependencyClasspath)))
|
||||
loader.add(
|
||||
sbt.io.Path.toURLs(
|
||||
data(dependencyClasspath)
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile())
|
||||
)
|
||||
)
|
||||
loader
|
||||
}
|
||||
}
|
||||
|
||||
// Load the definition classpath separately to avoid conflicts, see #511.
|
||||
if (definitionClasspath.isEmpty) parentLoader
|
||||
else ClasspathUtil.toLoader(data(definitionClasspath).map(_.toPath), parentLoader)
|
||||
}
|
||||
if definitionClasspath.isEmpty then parentLoader
|
||||
else
|
||||
val cp = data(definitionClasspath).map(converter.toPath)
|
||||
ClasspathUtil.toLoader(cp, parentLoader)
|
||||
|
||||
def buildPluginDefinition(dir: File, s: State, config: LoadBuildConfiguration): PluginData = {
|
||||
val (eval, pluginDef) = apply(dir, s, config)
|
||||
|
|
@ -1554,9 +1579,10 @@ final case class LoadBuildConfiguration(
|
|||
Nil,
|
||||
Nil,
|
||||
Nil,
|
||||
None
|
||||
None,
|
||||
converter,
|
||||
)
|
||||
case None => PluginData(globalPluginClasspath)
|
||||
case None => PluginData(globalPluginClasspath, converter)
|
||||
}
|
||||
val baseDir = globalPlugin match {
|
||||
case Some(x) => x.base
|
||||
|
|
|
|||
|
|
@ -13,13 +13,15 @@ import java.io.File
|
|||
import java.net.URL
|
||||
import Attributed.data
|
||||
import sbt.internal.BuildDef.analyzed
|
||||
import xsbti.FileConverter
|
||||
import xsbt.api.{ Discovered, Discovery }
|
||||
import xsbti.compile.CompileAnalysis
|
||||
import sbt.internal.inc.ModuleUtilities
|
||||
|
||||
import sbt.io.IO
|
||||
import scala.reflect.ClassTag
|
||||
|
||||
object PluginDiscovery {
|
||||
object PluginDiscovery:
|
||||
|
||||
/**
|
||||
* Relative paths of resources that list top-level modules that are available.
|
||||
|
|
@ -39,8 +41,8 @@ object PluginDiscovery {
|
|||
|
||||
/** Discovers and loads the sbt-plugin-related top-level modules from the classpath and source analysis in `data` and using the provided class `loader`. */
|
||||
def discoverAll(data: PluginData, loader: ClassLoader): DetectedPlugins = {
|
||||
def discover[T](resource: String)(implicit manifest: Manifest[T]) =
|
||||
binarySourceModules[T](data, loader, resource)
|
||||
def discover[A1: ClassTag](resource: String) =
|
||||
binarySourceModules[A1](data, loader, resource)
|
||||
import Paths._
|
||||
// TODO - Fix this once we can autodetect AutoPlugins defined by sbt itself.
|
||||
val defaultAutoPlugins = Seq(
|
||||
|
|
@ -97,13 +99,14 @@ object PluginDiscovery {
|
|||
* available as analyzed source and extending from any of `subclasses` as per [[sourceModuleNames]].
|
||||
*/
|
||||
def binarySourceModuleNames(
|
||||
classpath: Seq[Attributed[File]],
|
||||
classpath: Def.Classpath,
|
||||
converter: FileConverter,
|
||||
loader: ClassLoader,
|
||||
resourceName: String,
|
||||
subclasses: String*
|
||||
): Seq[String] =
|
||||
(
|
||||
binaryModuleNames(data(classpath), loader, resourceName) ++
|
||||
binaryModuleNames(classpath, converter, loader, resourceName) ++
|
||||
(analyzed(classpath) flatMap (a => sourceModuleNames(a, subclasses: _*)))
|
||||
).distinct
|
||||
|
||||
|
|
@ -125,42 +128,52 @@ object PluginDiscovery {
|
|||
* doesn't bring in any resources outside of the intended `classpath`, such as from parent loaders.
|
||||
*/
|
||||
def binaryModuleNames(
|
||||
classpath: Seq[File],
|
||||
classpath: Def.Classpath,
|
||||
converter: FileConverter,
|
||||
loader: ClassLoader,
|
||||
resourceName: String
|
||||
): Seq[String] = {
|
||||
): Seq[String] =
|
||||
import collection.JavaConverters._
|
||||
loader.getResources(resourceName).asScala.toSeq.filter(onClasspath(classpath)) flatMap { u =>
|
||||
loader
|
||||
.getResources(resourceName)
|
||||
.asScala
|
||||
.toSeq
|
||||
.filter(onClasspath(classpath, converter)) flatMap { u =>
|
||||
IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty)
|
||||
}
|
||||
}
|
||||
|
||||
/** Returns `true` if `url` is an entry in `classpath`. */
|
||||
def onClasspath(classpath: Seq[File])(url: URL): Boolean =
|
||||
IO.urlAsFile(url) exists (classpath.contains _)
|
||||
def onClasspath(classpath: Def.Classpath, converter: FileConverter)(url: URL): Boolean =
|
||||
val cpFiles = classpath.map(_.data).map(converter.toPath).map(_.toFile)
|
||||
IO.urlAsFile(url) exists (cpFiles.contains _)
|
||||
|
||||
private[sbt] def binarySourceModules[T](
|
||||
private[sbt] def binarySourceModules[A: ClassTag](
|
||||
data: PluginData,
|
||||
loader: ClassLoader,
|
||||
resourceName: String
|
||||
)(implicit classTag: reflect.ClassTag[T]): DetectedModules[T] = {
|
||||
): DetectedModules[A] =
|
||||
val classpath = data.classpath
|
||||
val classTag = summon[ClassTag[A]]
|
||||
val namesAndValues =
|
||||
if (classpath.isEmpty) Nil
|
||||
else {
|
||||
if classpath.isEmpty then Nil
|
||||
else
|
||||
val names =
|
||||
binarySourceModuleNames(classpath, loader, resourceName, classTag.runtimeClass.getName)
|
||||
loadModules[T](data, names, loader)
|
||||
}
|
||||
new DetectedModules(namesAndValues)
|
||||
}
|
||||
binarySourceModuleNames(
|
||||
classpath,
|
||||
data.converter,
|
||||
loader,
|
||||
resourceName,
|
||||
classTag.runtimeClass.getName
|
||||
)
|
||||
loadModules[A](data, names, loader)
|
||||
DetectedModules(namesAndValues)
|
||||
|
||||
private[this] def loadModules[T: reflect.ClassTag](
|
||||
private[this] def loadModules[A: reflect.ClassTag](
|
||||
data: PluginData,
|
||||
names: Seq[String],
|
||||
loader: ClassLoader
|
||||
): Seq[(String, T)] =
|
||||
try ModuleUtilities.getCheckedObjects[T](names, loader)
|
||||
): Seq[(String, A)] =
|
||||
try ModuleUtilities.getCheckedObjects[A](names, loader)
|
||||
catch {
|
||||
case e: ExceptionInInitializerError =>
|
||||
val cause = e.getCause
|
||||
|
|
@ -183,4 +196,4 @@ object PluginDiscovery {
|
|||
)
|
||||
throw new IncompatiblePluginsException(msgBase + msgExtra, t)
|
||||
}
|
||||
}
|
||||
end PluginDiscovery
|
||||
|
|
|
|||
|
|
@ -62,7 +62,13 @@ object PluginManagement {
|
|||
)
|
||||
|
||||
def extractOverrides(classpath: Classpath): Set[ModuleID] =
|
||||
classpath flatMap { _.metadata get Keys.moduleID.key map keepOverrideInfo } toSet;
|
||||
(classpath
|
||||
.flatMap: cp =>
|
||||
cp.metadata
|
||||
.get(Keys.moduleIDStr)
|
||||
.map: str =>
|
||||
keepOverrideInfo(Classpaths.moduleIdJsonKeyFormat.read(str)))
|
||||
.toSet
|
||||
|
||||
def keepOverrideInfo(m: ModuleID): ModuleID =
|
||||
ModuleID(m.organization, m.name, m.revision).withCrossVersion(m.crossVersion)
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ object SysProp {
|
|||
.orElse(windowsCacheDir)
|
||||
.orElse(macCacheDir)
|
||||
.getOrElse(linuxCache)
|
||||
baseCache.getAbsoluteFile / "v1"
|
||||
baseCache.getAbsoluteFile / "v2"
|
||||
}
|
||||
|
||||
lazy val sbtCredentialsEnv: Option[Credentials] =
|
||||
|
|
|
|||
|
|
@ -249,8 +249,11 @@ object BuildServerProtocol {
|
|||
val buildItems = builds.map { build =>
|
||||
val plugins: LoadedPlugins = build._2.unit.plugins
|
||||
val scalacOptions = plugins.pluginData.scalacOptions
|
||||
val pluginClassPath = plugins.classpath
|
||||
val classpath = (pluginClassPath ++ sbtJars).map(_.toURI).toVector
|
||||
val pluginClasspath = plugins.classpath
|
||||
val converter = plugins.pluginData.converter
|
||||
val classpath =
|
||||
pluginClasspath.map(converter.toPath).map(_.toFile).map(_.toURI).toVector ++
|
||||
(sbtJars).map(_.toURI).toVector
|
||||
val item = ScalacOptionsItem(
|
||||
build._1,
|
||||
scalacOptions.toVector,
|
||||
|
|
@ -772,7 +775,12 @@ object BuildServerProtocol {
|
|||
|
||||
private def jvmEnvironmentItem(): Initialize[Task[JvmEnvironmentItem]] = Def.task {
|
||||
val target = Keys.bspTargetIdentifier.value
|
||||
val classpath = Keys.fullClasspath.value.map(_.data.toURI).toVector
|
||||
val converter = fileConverter.value
|
||||
val classpath = Keys.fullClasspath.value
|
||||
.map(_.data)
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile.toURI)
|
||||
.toVector
|
||||
val jvmOptions = Keys.javaOptions.value.toVector
|
||||
val baseDir = Keys.baseDirectory.value.getAbsolutePath
|
||||
val env = envVars.value
|
||||
|
|
@ -796,7 +804,7 @@ object BuildServerProtocol {
|
|||
val internalDependencyClasspath = for {
|
||||
(ref, configs) <- bspInternalDependencyConfigurations.value
|
||||
config <- configs
|
||||
} yield ref / config / Keys.classDirectory
|
||||
} yield ref / config / Keys.packageBin
|
||||
(
|
||||
target,
|
||||
scalacOptions,
|
||||
|
|
@ -814,12 +822,17 @@ object BuildServerProtocol {
|
|||
internalDependencyClasspath
|
||||
) =>
|
||||
Def.task {
|
||||
val classpath = internalDependencyClasspath.join.value.distinct ++
|
||||
val converter = fileConverter.value
|
||||
val cp0 = internalDependencyClasspath.join.value.distinct ++
|
||||
externalDependencyClasspath.map(_.data)
|
||||
val classpath = cp0
|
||||
.map(converter.toPath)
|
||||
.map(_.toFile.toURI)
|
||||
.toVector
|
||||
ScalacOptionsItem(
|
||||
target,
|
||||
scalacOptions.toVector,
|
||||
classpath.map(_.toURI).toVector,
|
||||
classpath,
|
||||
classDirectory.toURI
|
||||
)
|
||||
}
|
||||
|
|
@ -964,8 +977,10 @@ object BuildServerProtocol {
|
|||
.toMap
|
||||
)
|
||||
val runner = new ForkRun(forkOpts)
|
||||
val converter = fileConverter.value
|
||||
val cp = classpath.map(converter.toPath)
|
||||
val statusCode = runner
|
||||
.run(mainClass.`class`, classpath, mainClass.arguments, logger)
|
||||
.run(mainClass.`class`, cp, mainClass.arguments, logger)
|
||||
.fold(
|
||||
_ => StatusCode.Error,
|
||||
_ => StatusCode.Success
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
|
||||
|
||||
## files extension on Classpath
|
||||
|
||||
```scala
|
||||
+ given FileConverter = fileConverter.value
|
||||
val cp = (Compile / classpath).value.files
|
||||
```
|
||||
|
|
@ -4,8 +4,8 @@ import sbt.contraband.ContrabandPlugin.autoImport._
|
|||
|
||||
object Dependencies {
|
||||
// WARNING: Please Scala update versions in PluginCross.scala too
|
||||
val scala212 = "2.12.17"
|
||||
val scala213 = "2.13.8"
|
||||
val scala212 = "2.12.18"
|
||||
val scala213 = "2.13.12"
|
||||
val scala3 = "3.3.1"
|
||||
val checkPluginCross = settingKey[Unit]("Make sure scalaVersion match up")
|
||||
val baseScalaVersion = scala3
|
||||
|
|
@ -16,7 +16,7 @@ object Dependencies {
|
|||
private val ioVersion = nightlyVersion.getOrElse("1.8.0")
|
||||
private val lmVersion =
|
||||
sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("2.0.0-alpha13")
|
||||
val zincVersion = nightlyVersion.getOrElse("2.0.0-alpha8")
|
||||
val zincVersion = nightlyVersion.getOrElse("2.0.0-alpha10")
|
||||
|
||||
private val sbtIO = "org.scala-sbt" %% "io" % ioVersion
|
||||
|
||||
|
|
@ -147,4 +147,5 @@ object Dependencies {
|
|||
val hedgehog = "qa.hedgehog" %% "hedgehog-sbt" % "0.7.0"
|
||||
val disruptor = "com.lmax" % "disruptor" % "3.4.2"
|
||||
val kindProjector = ("org.typelevel" % "kind-projector" % "0.13.2").cross(CrossVersion.full)
|
||||
val zeroAllocationHashing = "net.openhft" % "zero-allocation-hashing" % "0.10.1"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,15 +14,15 @@ object HouseRulesPlugin extends AutoPlugin {
|
|||
scalacOptions += "-language:implicitConversions",
|
||||
scalacOptions ++= "-Xfuture".ifScala213OrMinus.value.toList,
|
||||
scalacOptions += "-Xlint",
|
||||
scalacOptions ++= "-Xfatal-warnings"
|
||||
.ifScala(v => {
|
||||
sys.props.get("sbt.build.fatal") match {
|
||||
case Some(_) => java.lang.Boolean.getBoolean("sbt.build.fatal")
|
||||
case _ => v == 12
|
||||
}
|
||||
})
|
||||
.value
|
||||
.toList,
|
||||
// scalacOptions ++= "-Xfatal-warnings"
|
||||
// .ifScala(v => {
|
||||
// sys.props.get("sbt.build.fatal") match {
|
||||
// case Some(_) => java.lang.Boolean.getBoolean("sbt.build.fatal")
|
||||
// case _ => v == 12
|
||||
// }
|
||||
// })
|
||||
// .value
|
||||
// .toList,
|
||||
scalacOptions ++= "-Ykind-projector".ifScala3.value.toList,
|
||||
scalacOptions ++= "-Ysemanticdb".ifScala3.value.toList,
|
||||
scalacOptions ++= "-Yinline-warnings".ifScala211OrMinus.value.toList,
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.{ Path => NioPath }
|
||||
import java.lang.reflect.Method
|
||||
import java.lang.reflect.Modifier.{ isPublic, isStatic }
|
||||
import sbt.internal.inc.ScalaInstance
|
||||
|
|
@ -20,11 +21,16 @@ import scala.sys.process.Process
|
|||
import scala.util.control.NonFatal
|
||||
import scala.util.{ Failure, Success, Try }
|
||||
|
||||
sealed trait ScalaRun {
|
||||
def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Try[Unit]
|
||||
}
|
||||
sealed trait ScalaRun:
|
||||
def run(mainClass: String, classpath: Seq[NioPath], options: Seq[String], log: Logger): Try[Unit]
|
||||
|
||||
class ForkRun(config: ForkOptions) extends ScalaRun {
|
||||
def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Try[Unit] = {
|
||||
def run(
|
||||
mainClass: String,
|
||||
classpath: Seq[NioPath],
|
||||
options: Seq[String],
|
||||
log: Logger
|
||||
): Try[Unit] = {
|
||||
def processExitCode(exitCode: Int, label: String): Try[Unit] =
|
||||
if (exitCode == 0) Success(())
|
||||
else
|
||||
|
|
@ -47,7 +53,12 @@ class ForkRun(config: ForkOptions) extends ScalaRun {
|
|||
processExitCode(exitCode, "runner")
|
||||
}
|
||||
|
||||
def fork(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Process = {
|
||||
def fork(
|
||||
mainClass: String,
|
||||
classpath: Seq[NioPath],
|
||||
options: Seq[String],
|
||||
log: Logger
|
||||
): Process = {
|
||||
log.info(s"running (fork) $mainClass ${Run.runOptionsStr(options)}")
|
||||
|
||||
val c = configLogged(log)
|
||||
|
|
@ -64,23 +75,23 @@ class ForkRun(config: ForkOptions) extends ScalaRun {
|
|||
|
||||
private def scalaOptions(
|
||||
mainClass: String,
|
||||
classpath: Seq[File],
|
||||
options: Seq[String]
|
||||
classpath: Seq[NioPath],
|
||||
options: Seq[String],
|
||||
): Seq[String] =
|
||||
"-classpath" :: Path.makeString(classpath) :: mainClass :: options.toList
|
||||
"-classpath" :: Path.makeString(classpath.map(_.toFile())) :: mainClass :: options.toList
|
||||
}
|
||||
|
||||
class Run(private[sbt] val newLoader: Seq[File] => ClassLoader, trapExit: Boolean)
|
||||
class Run(private[sbt] val newLoader: Seq[NioPath] => ClassLoader, trapExit: Boolean)
|
||||
extends ScalaRun {
|
||||
def this(instance: ScalaInstance, trapExit: Boolean, nativeTmp: File) =
|
||||
this(
|
||||
(cp: Seq[File]) => ClasspathUtil.makeLoader(cp.map(_.toPath), instance, nativeTmp.toPath),
|
||||
(cp: Seq[NioPath]) => ClasspathUtil.makeLoader(cp, instance, nativeTmp.toPath),
|
||||
trapExit
|
||||
)
|
||||
|
||||
private[sbt] def runWithLoader(
|
||||
loader: ClassLoader,
|
||||
classpath: Seq[File],
|
||||
classpath: Seq[NioPath],
|
||||
mainClass: String,
|
||||
options: Seq[String],
|
||||
log: Logger
|
||||
|
|
@ -122,15 +133,19 @@ class Run(private[sbt] val newLoader: Seq[File] => ClassLoader, trapExit: Boolea
|
|||
}
|
||||
|
||||
/** Runs the class 'mainClass' using the given classpath and options using the scala runner. */
|
||||
def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Try[Unit] = {
|
||||
def run(
|
||||
mainClass: String,
|
||||
classpath: Seq[NioPath],
|
||||
options: Seq[String],
|
||||
log: Logger
|
||||
): Try[Unit] = {
|
||||
val loader = newLoader(classpath)
|
||||
try runWithLoader(loader, classpath, mainClass, options, log)
|
||||
finally
|
||||
loader match {
|
||||
loader match
|
||||
case ac: AutoCloseable => ac.close()
|
||||
case c: ClasspathFilter => c.close()
|
||||
case _ =>
|
||||
}
|
||||
}
|
||||
private def invokeMain(
|
||||
loader: ClassLoader,
|
||||
|
|
@ -173,8 +188,8 @@ class Run(private[sbt] val newLoader: Seq[File] => ClassLoader, trapExit: Boolea
|
|||
}
|
||||
|
||||
/** This module is an interface to starting the scala interpreter or runner. */
|
||||
object Run {
|
||||
def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger)(implicit
|
||||
object Run:
|
||||
def run(mainClass: String, classpath: Seq[NioPath], options: Seq[String], log: Logger)(implicit
|
||||
runner: ScalaRun
|
||||
) =
|
||||
runner.run(mainClass, classpath, options, log)
|
||||
|
|
@ -195,4 +210,4 @@ object Run {
|
|||
case str if str.contains(" ") => "\"" + str + "\""
|
||||
case str => str
|
||||
}).mkString(" ")
|
||||
}
|
||||
end Run
|
||||
|
|
|
|||
|
|
@ -68,6 +68,7 @@ package object sbt
|
|||
final val ThisScope = Scope.ThisScope
|
||||
final val Global = Scope.Global
|
||||
final val GlobalScope = Scope.GlobalScope
|
||||
val `Package` = Pkg
|
||||
|
||||
inline def config(name: String): Configuration = ${
|
||||
ConfigurationMacro.configMacroImpl('{ name })
|
||||
|
|
|
|||
|
|
@ -366,5 +366,7 @@ trait Import {
|
|||
type IvyPaths = sbt.librarymanagement.ivy.IvyPaths
|
||||
val IvyPaths = sbt.librarymanagement.ivy.IvyPaths
|
||||
|
||||
type FileConverter = xsbti.FileConverter
|
||||
type HashedVirtualFileRef = xsbti.HashedVirtualFileRef
|
||||
type IncOptions = xsbti.compile.IncOptions
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,9 +23,8 @@ $ exists ran
|
|||
$ delete ran
|
||||
|
||||
# switch to multi-project, no aggregation yet. 'reload' will drop session settings
|
||||
$ touch multi
|
||||
$ copy-file changes/build.sbt build.sbt
|
||||
$ mkdir sub sub/sub
|
||||
$ copy-file changes/build.sbt build.sbt
|
||||
> reload
|
||||
|
||||
# define in root project only
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
import sbt.nio.file.Glob
|
||||
|
||||
scalaVersion := "2.12.18"
|
||||
|
||||
target := baseDirectory.value / "target"
|
||||
|
||||
cleanKeepFiles ++= Seq(
|
||||
target.value / "keep",
|
||||
target.value / "keepfile"
|
||||
target.value / "keep",
|
||||
target.value / "keepfile"
|
||||
)
|
||||
|
||||
cleanKeepGlobs += target.value.toGlob / "keepdir" / **
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
lazy val scala3 = "3.2.1"
|
||||
lazy val scala213 = "2.13.1"
|
||||
|
||||
ThisBuild / crossScalaVersions := Seq(scala3, scala213)
|
||||
crossScalaVersions := Seq(scala3, scala213)
|
||||
ThisBuild / scalaVersion := scala3
|
||||
|
||||
lazy val rootProj = (project in file("."))
|
||||
|
|
|
|||
|
|
@ -1,67 +1,44 @@
|
|||
> show rootProj/projectID
|
||||
> + compile
|
||||
|
||||
$ exists lib/target/scala-3.2.1
|
||||
$ exists lib/target/scala-2.13
|
||||
$ exists sbt-foo/target/scala-3.2.1
|
||||
-$ exists sbt-foo/target/scala-2.13
|
||||
$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-3.2.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-2.13.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
> clean
|
||||
> + libProj/compile
|
||||
$ exists lib/target/scala-3.2.1
|
||||
$ exists lib/target/scala-2.13
|
||||
-$ exists sbt-foo/target/scala-3.2.1
|
||||
-$ exists sbt-foo/target/scala-2.13
|
||||
|
||||
# test safe switching
|
||||
> clean
|
||||
> ++ 3.2.1 -v compile
|
||||
$ exists lib/target/scala-3.2.1
|
||||
-$ exists lib/target/scala-2.13
|
||||
$ exists sbt-foo/target/scala-3.2.1
|
||||
-$ exists sbt-foo/target/scala-2.13
|
||||
|
||||
# Test legacy cross build with command support
|
||||
# > clean
|
||||
# > + build
|
||||
# $ exists lib/target/scala-3.2.1
|
||||
# $ exists lib/target/scala-2.13
|
||||
# $ exists sbt-foo/target/scala-3.2.1
|
||||
# -$ exists sbt-foo/target/scala-2.13
|
||||
$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.2.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
# Test ++ leaves crossScalaVersions unchanged
|
||||
> clean
|
||||
> ++3.2.1
|
||||
> +extrasProj/compile
|
||||
$ exists extras/target/scala-2.13
|
||||
$ exists extras/target/scala-3.2.1
|
||||
$ exists target/out/jvm/scala-2.13.1/foo-extras/foo-extras_2.13-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-3.2.1/foo-extras/foo-extras_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
# test safe switching
|
||||
> clean
|
||||
> ++ 2.13.1 -v compile
|
||||
$ exists lib/target/scala-2.13
|
||||
-$ exists lib/target/scala-3.2.1
|
||||
# -$ exists sbt-foo/target/scala-3.2.1
|
||||
-$ exists sbt-foo/target/scala-2.13
|
||||
-$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
# test wildcard switching (3.2.1
|
||||
> clean
|
||||
> ++ 3.* -v compile
|
||||
$ exists lib/target/scala-3.2.1
|
||||
-$ exists lib/target/scala-2.13
|
||||
$ exists sbt-foo/target/scala-3.2.1
|
||||
-$ exists sbt-foo/target/scala-2.13
|
||||
$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-3.2.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
# test wildcard switching (2.13)
|
||||
> clean
|
||||
> ++ 2.13.x -v compile
|
||||
$ exists lib/target/scala-2.13
|
||||
-$ exists lib/target/scala-3.2.1
|
||||
# -$ exists sbt-foo/target/scala-3.2.1
|
||||
-$ exists sbt-foo/target/scala-2.13
|
||||
-$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
# test wildcard switching (no matches)
|
||||
-> ++ 4.*
|
||||
# test wildcard switching (multiple matches)
|
||||
> ++ 2.*
|
||||
|
||||
|
|
|
|||
|
|
@ -1,19 +1,19 @@
|
|||
> ++3.0.2 compile
|
||||
|
||||
$ exists core/target/scala-3.0.2
|
||||
-$ exists core/target/scala-3.1.2
|
||||
-$ exists subproj/target/scala-3.0.2
|
||||
-$ exists subproj/target/scala-3.1.2
|
||||
$ exists target/out/jvm/scala-3.0.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.1.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.0.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.1.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
> clean
|
||||
-$ exists core/target/scala-3.0.2
|
||||
-$ exists core/target/scala-3.1.2
|
||||
-$ exists subproj/target/scala-3.0.2
|
||||
-$ exists subproj/target/scala-3.1.2
|
||||
-$ exists target/out/jvm/scala-3.0.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.1.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.0.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.1.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
||||
> ++3.1.2 compile
|
||||
|
||||
-$ exists core/target/scala-3.0.2
|
||||
$ exists core/target/scala-3.1.2
|
||||
-$ exists subproj/target/scala-3.0.2
|
||||
$ exists subproj/target/scala-3.1.2
|
||||
-$ exists target/out/jvm/scala-3.0.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-3.1.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-3.0.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
$ exists target/out/jvm/scala-3.1.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar
|
||||
|
|
|
|||
|
|
@ -4,6 +4,6 @@
|
|||
|
||||
> ++2.13.1 compile
|
||||
|
||||
$ exists core/target/scala-2.13
|
||||
-$ exists module/target/scala-2.13
|
||||
-$ exists module/target/scala-2.12
|
||||
$ exists target/out/jvm/scala-2.13.1/core/core_2.13-0.1.0-SNAPSHOT-noresources.jar
|
||||
-$ exists target/out/jvm/scala-2.13.1/module
|
||||
-$ exists target/out/jvm/scala-2.12.18/module
|
||||
|
|
|
|||
|
|
@ -1,3 +1,2 @@
|
|||
object Main{
|
||||
|
||||
object Main {
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ val newContents = "bbbbbbbbb"
|
|||
|
||||
val rootContentFile = "root.txt"
|
||||
|
||||
ThisBuild / scalaVersion := "2.12.12"
|
||||
ThisBuild / scalaVersion := "2.13.12"
|
||||
|
||||
lazy val root = (project in file("."))
|
||||
.settings(
|
||||
|
|
|
|||
|
|
@ -2,14 +2,14 @@
|
|||
|
||||
# there shouldn't be two api/ directories
|
||||
# see https://github.com/lampepfl/dotty/issues/11412
|
||||
$ exists rc1/target/scala-3.0.0-RC1/api/api/index.html
|
||||
$ exists rc1/target/scala-3.0.0-RC1/api/api/foo/A$.html
|
||||
$ exists rc1/target/scala-3.0.0-RC1/api/api/foo.html
|
||||
$ exists target/out/jvm/scala-3.0.0-RC1/rc1/api/api/index.html
|
||||
$ exists target/out/jvm/scala-3.0.0-RC1/rc1/api/api/foo/A$.html
|
||||
$ exists target/out/jvm/scala-3.0.0-RC1/rc1/api/api/foo.html
|
||||
|
||||
> m3 / doc
|
||||
|
||||
# there shouldn't be two api/ directories
|
||||
# see https://github.com/lampepfl/dotty/issues/11412
|
||||
$ exists m3/target/scala-3.0.0-M3/api/index.html
|
||||
$ exists m3/target/scala-3.0.0-M3/api/api/foo/A$.html
|
||||
$ exists m3/target/scala-3.0.0-M3/api/api/foo.html
|
||||
$ exists target/out/jvm/scala-3.0.0-M3/m3/api/index.html
|
||||
$ exists target/out/jvm/scala-3.0.0-M3/m3/api/api/foo/A$.html
|
||||
$ exists target/out/jvm/scala-3.0.0-M3/m3/api/api/foo.html
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ import Parsers._
|
|||
lazy val root = (project in file("."))
|
||||
.settings(
|
||||
crossPaths := false,
|
||||
crossScalaVersions := Seq("2.12.12", "2.13.3"),
|
||||
scalaVersion := "2.12.12",
|
||||
crossScalaVersions := Seq("2.12.18", "2.13.3"),
|
||||
scalaVersion := "2.12.18",
|
||||
Compile / doc / scalacOptions += "-Xfatal-warnings",
|
||||
commands += Command.command("excludeB") { s =>
|
||||
val impl = """val src = (Compile / sources).value; src.filterNot(_.getName.contains("B"))"""
|
||||
|
|
|
|||
|
|
@ -1,47 +0,0 @@
|
|||
-> doc
|
||||
|
||||
> excludeB
|
||||
|
||||
# hybrid project, only scaladoc run
|
||||
> doc
|
||||
$ exists target/api/index.js
|
||||
$ exists target/api/A$.html
|
||||
$ absent target/api/scala
|
||||
$ absent target/api/java
|
||||
|
||||
> setDocExtension scala
|
||||
|
||||
# The original B.scala fails scaladoc
|
||||
$ copy-file changes/B.scala B.scala
|
||||
# compile task is superfluous. Since doc task preceded by compile task has been problematic due to scala
|
||||
# compiler's way of handling empty classpath. We have it here to test that our workaround works.
|
||||
> clean ; compile ; doc
|
||||
|
||||
# pure scala project, only scaladoc at top level
|
||||
$ exists target/api/index.js
|
||||
$ exists target/api/A$.html
|
||||
$ exists target/api/B$.html
|
||||
$ absent target/api/package-list
|
||||
$ absent target/api/scala
|
||||
$ absent target/api/java
|
||||
|
||||
> setDocExtension java
|
||||
|
||||
> clean ; doc
|
||||
|
||||
# pure java project, only javadoc at top level
|
||||
$ exists target/api/index.html
|
||||
$ exists target/api/pkg/J.html
|
||||
$ absent target/api/index.js
|
||||
|
||||
> setDocExtension scala
|
||||
> ++2.13.3
|
||||
> clean
|
||||
$ absent target/api/A$.html
|
||||
> doc
|
||||
$ exists target/api/A$.html
|
||||
$ exists target/api/B$.html
|
||||
|
||||
# pending
|
||||
# $ absent target/api/scala
|
||||
# $ absent target/api/java
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
-> doc
|
||||
|
||||
> excludeB
|
||||
|
||||
# hybrid project, only scaladoc run
|
||||
> doc
|
||||
> packageBin
|
||||
$ exists target/out/jvm/scala-2.12.18/root/api/index.js
|
||||
$ exists target/out/jvm/scala-2.12.18/root/api/A$.html
|
||||
$ absent target/out/jvm/scala-2.12.18/root/api/scala
|
||||
$ absent target/out/jvm/scala-2.12.18/root/api/java
|
||||
|
||||
> setDocExtension scala
|
||||
|
||||
# The original B.scala fails scaladoc
|
||||
$ copy-file changes/B.scala B.scala
|
||||
# compile task is superfluous. Since doc task preceded by compile task has been problematic due to scala
|
||||
# compiler's way of handling empty classpath. We have it here to test that our workaround works.
|
||||
> clean ; compile ; doc
|
||||
|
||||
# pure scala project, only scaladoc at top level
|
||||
$ exists target/out/jvm/scala-2.12.18/root/api/index.js
|
||||
$ exists target/out/jvm/scala-2.12.18/root/api/A$.html
|
||||
$ exists target/out/jvm/scala-2.12.18/root/api/B$.html
|
||||
$ absent target/out/jvm/scala-2.12.18/root/api/package-list
|
||||
$ absent target/out/jvm/scala-2.12.18/root/api/scala
|
||||
$ absent target/out/jvm/scala-2.12.18/root/api/java
|
||||
|
||||
> setDocExtension java
|
||||
|
||||
> clean
|
||||
|
||||
# pending because Javadoc doesn't work
|
||||
> doc
|
||||
|
||||
# pure java project, only javadoc at top level
|
||||
$ exists target/out/jvm/scala-2.12.18/root/api/index.html
|
||||
$ exists target/out/jvm/scala-2.12.18/root/api/pkg/J.html
|
||||
$ absent target/out/jvm/scala-2.12.18/root/api/index.js
|
||||
|
||||
> setDocExtension scala
|
||||
> ++2.13.x
|
||||
> clean
|
||||
$ absent target/out/jvm/scala-2.12.18/root/api/A$.html
|
||||
> doc
|
||||
$ exists target/out/jvm/scala-2.13.3/root/api/A$.html
|
||||
$ exists target/out/jvm/scala-2.13.3/root/api/B$.html
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
import xsbti.HashedVirtualFileRef
|
||||
|
||||
// https://github.com/coursier/coursier/issues/1123
|
||||
ThisBuild / useCoursier := false
|
||||
|
||||
|
|
@ -34,17 +36,18 @@ val checkApiMappings = taskKey[Unit]("Verifies that the API mappings are collect
|
|||
def expectedMappings = Def.task {
|
||||
val stdLibVersion = "2.13.10"
|
||||
val binVersion = scalaBinaryVersion.value
|
||||
val converter = fileConverter.value
|
||||
val ms = update.value.configuration(Compile).get.modules.flatMap { mod =>
|
||||
mod.artifacts.flatMap { case (a, f) =>
|
||||
val n = a.name.stripSuffix("_" + binVersion)
|
||||
n match {
|
||||
case "a" | "b" | "c" => (f, apiBase(n)) :: Nil
|
||||
case "scala-library" => (f, scalaLibraryBase(stdLibVersion)) :: Nil
|
||||
case "a" | "b" | "c" => (converter.toVirtualFile(f.toPath()): HashedVirtualFileRef, apiBase(n)) :: Nil
|
||||
case "scala-library" => (converter.toVirtualFile(f.toPath()): HashedVirtualFileRef, scalaLibraryBase(stdLibVersion)) :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
}
|
||||
}
|
||||
val mc = (c / Compile / classDirectory).value -> apiBase("c")
|
||||
val mc = (c / Compile / packageBin).value -> apiBase("c")
|
||||
(mc +: ms).toMap
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
lazy val root = (project in file("."))
|
||||
.settings(
|
||||
name := "delete-target",
|
||||
scalaVersion := "2.12.1"
|
||||
scalaVersion := "2.12.18"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
$ mkdir target/scala-2.12/delete-target_2.12-0.1.0-SNAPSHOT.jar
|
||||
$ touch target/scala-2.12/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello
|
||||
$ mkdir target/out/jvm/scala-2.12.18/delete-target/delete-target_2.12-0.1.0-SNAPSHOT.jar
|
||||
$ touch target/out/jvm/scala-2.12.18/delete-target/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello
|
||||
-> package
|
||||
$ exists target/scala-2.12/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello
|
||||
$ exists target/out/jvm/scala-2.12.18/delete-target/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello
|
||||
|
|
|
|||
|
|
@ -0,0 +1,31 @@
|
|||
import sbt.internal.util.StringVirtualFile1
|
||||
import sjsonnew.BasicJsonProtocol.*
|
||||
|
||||
val pure1 = taskKey[Unit]("")
|
||||
val map1 = taskKey[String]("")
|
||||
val mapN1 = taskKey[Unit]("")
|
||||
|
||||
Global / localCacheDirectory := new File("/tmp/sbt/diskcache/")
|
||||
|
||||
pure1 := (Def.cachedTask {
|
||||
val output = StringVirtualFile1("a.txt", "foo")
|
||||
Def.declareOutput(output)
|
||||
()
|
||||
}).value
|
||||
|
||||
map1 := (Def.cachedTask {
|
||||
pure1.value
|
||||
val output1 = StringVirtualFile1("b1.txt", "foo")
|
||||
val output2 = StringVirtualFile1("b2.txt", "foo")
|
||||
Def.declareOutput(output1)
|
||||
Def.declareOutput(output2)
|
||||
"something"
|
||||
}).value
|
||||
|
||||
mapN1 := (Def.cachedTask {
|
||||
pure1.value
|
||||
map1.value
|
||||
val output = StringVirtualFile1("c.txt", "foo")
|
||||
Def.declareOutput(output)
|
||||
()
|
||||
}).value
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
> startServer
|
||||
> pure1
|
||||
$ exists target/out/a.txt
|
||||
> clean
|
||||
> pure1
|
||||
$ exists target/out/a.txt
|
||||
|
||||
> clean
|
||||
> map1
|
||||
$ exists target/out/a.txt
|
||||
$ exists target/out/b1.txt
|
||||
$ exists target/out/b2.txt
|
||||
|
||||
> clean
|
||||
> mapN1
|
||||
$ exists target/out/a.txt
|
||||
$ exists target/out/b1.txt
|
||||
$ exists target/out/c.txt
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import sbt.internal.util.StringVirtualFile1
|
||||
import sjsonnew.BasicJsonProtocol.*
|
||||
import CustomKeys.*
|
||||
|
||||
Global / localCacheDirectory := new File("/tmp/sbt/diskcache/")
|
||||
|
||||
aa := A()
|
||||
|
||||
// This tests that pure1 is opt'ed out from caching
|
||||
map1 := (Def.cachedTask {
|
||||
aa.value
|
||||
val output1 = StringVirtualFile1("b1.txt", "foo")
|
||||
val output2 = StringVirtualFile1("b2.txt", "foo")
|
||||
Def.declareOutput(output1)
|
||||
Def.declareOutput(output2)
|
||||
"something"
|
||||
}).value
|
||||
|
||||
mapN1 := (Def.cachedTask {
|
||||
aa.value
|
||||
map1.value
|
||||
val output = StringVirtualFile1("c.txt", "foo")
|
||||
Def.declareOutput(output)
|
||||
()
|
||||
}).value
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
import sbt.*
|
||||
import sbt.util.cacheLevel
|
||||
|
||||
case class A()
|
||||
|
||||
object CustomKeys:
|
||||
@cacheLevel(include = Array.empty)
|
||||
val aa = taskKey[A]("")
|
||||
val map1 = taskKey[String]("")
|
||||
val mapN1 = taskKey[Unit]("")
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
> startServer
|
||||
> map1
|
||||
$ exists target/out/b1.txt
|
||||
$ exists target/out/b2.txt
|
||||
|
|
@ -10,7 +10,7 @@ val wrappedTest = taskKey[Unit]("Test with modified java.library.path")
|
|||
def wrap(task: InputKey[Unit]): Def.Initialize[Task[Unit]] =
|
||||
Def.sequential(appendToLibraryPath, task.toTask(""), dropLibraryPath)
|
||||
|
||||
ThisBuild / turbo := true
|
||||
// ThisBuild / turbo := true
|
||||
|
||||
val root = (project in file(".")).settings(
|
||||
scalaVersion := "2.12.12",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,24 @@
|
|||
import sbt.io.Using
|
||||
import xsbti.compile.TastyFiles
|
||||
|
||||
ThisBuild / scalaVersion := "3.0.0-M3"
|
||||
ThisBuild / scalaVersion := "3.3.1"
|
||||
|
||||
TaskKey[Unit]("check") := {
|
||||
assert((Compile / auxiliaryClassFiles).value == Seq(TastyFiles.instance))
|
||||
assert((Test / auxiliaryClassFiles).value == Seq(TastyFiles.instance))
|
||||
}
|
||||
|
||||
TaskKey[Unit]("check2") := checkTastyFiles(true, true).value
|
||||
|
||||
TaskKey[Unit]("check3") := checkTastyFiles(true, false).value
|
||||
|
||||
def checkTastyFiles(aExists: Boolean, bExists: Boolean) = Def.task {
|
||||
val p = (Compile / packageBin).value
|
||||
val c = fileConverter.value
|
||||
Using.jarFile(false)(c.toPath(p).toFile()): jar =>
|
||||
if aExists then assert(jar.getJarEntry("A.tasty") ne null)
|
||||
else assert(jar.getJarEntry("A.tasty") eq null)
|
||||
|
||||
if bExists then assert(jar.getJarEntry("B.tasty") ne null)
|
||||
else assert(jar.getJarEntry("B.tasty") eq null)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,7 @@
|
|||
> check
|
||||
> compile
|
||||
|
||||
$ exists target/scala-3.0.0-M3/classes/A.tasty
|
||||
$ exists target/scala-3.0.0-M3/classes/B.tasty
|
||||
> check2
|
||||
|
||||
$ delete src/main/scala/B.scala
|
||||
|
||||
> compile
|
||||
|
||||
$ exists target/scala-3.0.0-M3/classes/A.tasty
|
||||
-$ exists target/scala-3.0.0-M3/classes/B.tasty
|
||||
> check3
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
> + compile
|
||||
$ exists target/scala-2.12
|
||||
$ exists target/scala-2.13
|
||||
$ exists target/out/jvm/scala-2.12.12/foo
|
||||
$ exists target/out/jvm/scala-2.13.1/foo
|
||||
|
|
|
|||
|
|
@ -28,7 +28,11 @@ lazy val root = (project in file("."))
|
|||
Compile / packageBin / artifact := mainArtifact,
|
||||
libraryDependencies ++= (if (baseDirectory.value / "retrieve").exists then publishedID :: Nil else Nil),
|
||||
// needed to add a jar with a different type to the managed classpath
|
||||
Compile / unmanagedClasspath ++= scalaInstance.value.libraryJars.toSeq,
|
||||
Compile / unmanagedClasspath ++= {
|
||||
val converter = fileConverter.value
|
||||
val xs = scalaInstance.value.libraryJars.toSeq
|
||||
xs.map(x => converter.toVirtualFile(x.toPath()): HashedVirtualFileRef)
|
||||
},
|
||||
classpathTypes := Set(tpe),
|
||||
|
||||
// custom configuration artifacts
|
||||
|
|
@ -66,7 +70,8 @@ def retrieveID = org % "test-retrieve" % "2.0"
|
|||
def checkTask(classpath: TaskKey[Classpath]) =
|
||||
Def.task {
|
||||
val deps = libraryDependencies.value
|
||||
val cp = (Compile / classpath).value.files
|
||||
given FileConverter = fileConverter.value
|
||||
val cp = (Compile / classpath).value.files.map(_.toFile())
|
||||
val loader = ClasspathUtilities.toLoader(cp, scalaInstance.value.loader)
|
||||
try { Class.forName("test.Test", false, loader); () }
|
||||
catch { case _: ClassNotFoundException | _: NoClassDefFoundError => sys.error(s"Dependency not retrieved properly: $deps, $cp") }
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ val checkScalaLibrary = TaskKey[Unit]("checkScalaLibrary")
|
|||
checkScalaLibrary := {
|
||||
val scalaLibsJars = (Compile / managedClasspath)
|
||||
.value
|
||||
.map(_.data.getName)
|
||||
.map(_.data.name)
|
||||
.filter(_.startsWith("scala-library"))
|
||||
.sorted
|
||||
val expectedScalaLibsJars = Seq(
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ lazy val root = (project in file(".")).
|
|||
settings(commonSettings: _*).
|
||||
settings(
|
||||
check := {
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted
|
||||
if (!(acp contains "netty-3.2.0.Final.jar")) {
|
||||
sys.error("netty-3.2.0.Final not found when it should be included: " + acp.toString)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -70,9 +70,9 @@ lazy val root = (project in file(".")).
|
|||
(ThisBuild / organization) := "org.example",
|
||||
(ThisBuild / version) := "1.0",
|
||||
check := {
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted
|
||||
val ccp = (c / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "demo_2.10.jar"}
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted
|
||||
val ccp = (c / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "demo_2.10.jar"}
|
||||
if (!(acp contains "commons-io-1.4-sources.jar")) {
|
||||
sys.error("commons-io-1.4-sources not found when it should be included: " + acp.toString)
|
||||
}
|
||||
|
|
@ -90,9 +90,9 @@ lazy val root = (project in file(".")).
|
|||
"\n - b (plain) " + bcpWithoutSource.toString +
|
||||
"\n - c (inter-project) " + ccpWithoutSource.toString)
|
||||
|
||||
val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "commons-io-1.4.jar"}
|
||||
val btestcp = (b / Test / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "commons-io-1.4.jar"}
|
||||
val ctestcp = (c / Test / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "demo_2.10.jar"} filterNot { _ == "commons-io-1.4.jar"}
|
||||
val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "commons-io-1.4.jar"}
|
||||
val btestcp = (b / Test / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "commons-io-1.4.jar"}
|
||||
val ctestcp = (c / Test / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "demo_2.10.jar"} filterNot { _ == "commons-io-1.4.jar"}
|
||||
if (ctestcp contains "junit-4.13.1.jar") {
|
||||
sys.error("junit found when it should be excluded: " + ctestcp.toString)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ val y2 = project.settings(
|
|||
)
|
||||
|
||||
TaskKey[Unit]("check") := {
|
||||
val x1cp = (x1 / Compile / externalDependencyClasspath).value.map(_.data.getName).sorted
|
||||
val x1cp = (x1 / Compile / externalDependencyClasspath).value.map(_.data.name).sorted
|
||||
def x1cpStr = x1cp.mkString("\n* ", "\n* ", "")
|
||||
|
||||
// if (!(x1cp contains "slf4j-api-1.6.6.jar"))
|
||||
|
|
|
|||
|
|
@ -38,16 +38,16 @@ lazy val root = (project in file(".")).
|
|||
version := "1.0",
|
||||
updateOptions := updateOptions.value.withCachedResolution(true),
|
||||
check := {
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
if (acp exists { _.data.getName contains "commons-io" }) {
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
if (acp exists { _.data.name contains "commons-io" }) {
|
||||
sys.error("commons-io found when it should be excluded")
|
||||
}
|
||||
if (acp exists { _.data.getName contains "commons-codec" }) {
|
||||
if (acp exists { _.data.name contains "commons-codec" }) {
|
||||
sys.error("commons-codec found when it should be excluded")
|
||||
}
|
||||
// This is checking to make sure excluded graph is not getting picked up
|
||||
if (!(bcp exists { _.data.getName contains "commons-io" })) {
|
||||
if (!(bcp exists { _.data.name contains "commons-io" })) {
|
||||
sys.error("commons-io NOT found when it should NOT be excluded")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -67,33 +67,33 @@ lazy val root = (project in file(".")).
|
|||
ThisBuild / version := "1.0",
|
||||
check := {
|
||||
// sys.error(dependencyCacheDirectory.value.toString)
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val ccp = (c / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val dcp = (d / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
val ccp = (c / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
val dcp = (d / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
|
||||
if (!(acp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) {
|
||||
if (!(acp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) {
|
||||
sys.error("spring-core-3.2.2 is not found on a")
|
||||
}
|
||||
if (!(bcp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) {
|
||||
if (!(bcp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) {
|
||||
sys.error("spring-core-3.2.2 is not found on b")
|
||||
}
|
||||
if (!(ccp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) {
|
||||
if (!(ccp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) {
|
||||
sys.error("spring-core-3.2.2 is not found on c")
|
||||
}
|
||||
if (!(dcp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) {
|
||||
if (!(dcp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) {
|
||||
sys.error("spring-core-3.2.2 is not found on d\n" + dcp.toString)
|
||||
}
|
||||
if (!(acp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
if (!(acp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
sys.error("spring-tx-3.1.2 is not found on a")
|
||||
}
|
||||
if (!(bcp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
if (!(bcp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
sys.error("spring-tx-3.1.2 is not found on b")
|
||||
}
|
||||
if (!(ccp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
if (!(ccp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
sys.error("spring-tx-3.1.2 is not found on c")
|
||||
}
|
||||
if (!(dcp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
if (!(dcp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) {
|
||||
sys.error("spring-tx-3.1.2 is not found on d")
|
||||
}
|
||||
if (acp == bcp) ()
|
||||
|
|
|
|||
|
|
@ -40,9 +40,9 @@ lazy val root = (project in file(".")).
|
|||
updateOptions := updateOptions.value.withCachedResolution(true),
|
||||
check := {
|
||||
val ur = (a / update).value
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.getName}
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.name}
|
||||
val atestcp0 = (a / Test / fullClasspath).value
|
||||
val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.getName}
|
||||
val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.name}
|
||||
// This is checking to make sure interproject dependency works
|
||||
if (acp exists { _ contains "scalatest" }) {
|
||||
sys.error("scalatest found when it should NOT be included: " + acp.toString)
|
||||
|
|
|
|||
|
|
@ -43,8 +43,8 @@ lazy val b = project.
|
|||
lazy val root = (project in file(".")).
|
||||
settings(
|
||||
check := {
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
if (acp == bcp) ()
|
||||
else sys.error("Different classpaths are found:" +
|
||||
"\n - a (overrides + cached) " + acp.toString +
|
||||
|
|
|
|||
|
|
@ -12,6 +12,6 @@ TaskKey[Unit]("check") := {
|
|||
def isTestJar(n: String): Boolean =
|
||||
(n contains "scalacheck") ||
|
||||
(n contains "specs2")
|
||||
val testLibs = cp map (_.data.getName) filter isTestJar
|
||||
val testLibs = cp map (_.data.name) filter isTestJar
|
||||
assert(testLibs.isEmpty, s"Compile Classpath has test libs:\n * ${testLibs.mkString("\n * ")}")
|
||||
}
|
||||
|
|
@ -26,13 +26,13 @@ lazy val root = (project in file(".")).
|
|||
check := {
|
||||
(a / update).value
|
||||
(b / update).value
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName}
|
||||
val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name}
|
||||
|
||||
if (acp exists { _.data.getName contains "slf4j-api-1.7.5.jar" }) {
|
||||
if (acp exists { _.data.name contains "slf4j-api-1.7.5.jar" }) {
|
||||
sys.error("slf4j-api-1.7.5.jar found when it should NOT be included: " + acp.toString)
|
||||
}
|
||||
if (bcp exists { _.data.getName contains "dispatch-core_2.11-0.11.1.jar" }) {
|
||||
if (bcp exists { _.data.name contains "dispatch-core_2.11-0.11.1.jar" }) {
|
||||
sys.error("dispatch-core_2.11-0.11.1.jar found when it should NOT be included: " + bcp.toString)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,13 +11,16 @@ lazy val root = (project in file(".")).
|
|||
scalaOverride := check("scala.App").value
|
||||
)
|
||||
|
||||
def check(className: String): Def.Initialize[Task[Unit]] = (Compile / fullClasspath) map { cp =>
|
||||
val existing = cp.files.filter(_.getName contains "scala-library")
|
||||
println("Full classpath: " + cp.mkString("\n\t", "\n\t", ""))
|
||||
println("scala-library.jar: " + existing.mkString("\n\t", "\n\t", ""))
|
||||
val loader = ClasspathUtilities.toLoader(existing)
|
||||
Class.forName(className, false, loader)
|
||||
}
|
||||
def check(className: String): Def.Initialize[Task[Unit]] =
|
||||
import sbt.TupleSyntax.*
|
||||
(Compile / fullClasspath, fileConverter.toTaskable) mapN { (cp, c) =>
|
||||
given FileConverter = c
|
||||
val existing = cp.files.filter(_.toFile.getName contains "scala-library")
|
||||
println("Full classpath: " + cp.mkString("\n\t", "\n\t", ""))
|
||||
println("scala-library.jar: " + existing.mkString("\n\t", "\n\t", ""))
|
||||
val loader = ClasspathUtilities.toLoader(existing.map(_.toFile()))
|
||||
Class.forName(className, false, loader)
|
||||
}
|
||||
|
||||
def dependencies(base: File) =
|
||||
if( ( base / "stm").exists ) ("org.scala-tools" % "scala-stm_2.8.2" % "0.6") :: Nil
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ def libraryDeps(base: File) = {
|
|||
def check(ver: String) =
|
||||
(Compile / dependencyClasspath) map { jars =>
|
||||
val log4j = jars map (_.data) collect {
|
||||
case f if f.getName contains "log4j-" => f.getName
|
||||
case f if f.name contains "log4j-" => f.name
|
||||
}
|
||||
if (log4j.size != 1 || !log4j.head.contains(ver))
|
||||
sys.error("Did not download the correct jar.")
|
||||
|
|
|
|||
|
|
@ -3,7 +3,11 @@ ThisBuild / useCoursier := false
|
|||
lazy val commonSettings = Seq(
|
||||
autoScalaLibrary := false,
|
||||
scalaModuleInfo := None,
|
||||
(Compile / unmanagedJars) ++= (scalaInstance map (_.allJars.toSeq)).value,
|
||||
(Compile / unmanagedJars) ++= {
|
||||
val converter = fileConverter.value
|
||||
val xs = scalaInstance.value.allJars.toSeq
|
||||
xs.map(_.toPath).map(x => converter.toVirtualFile(x): HashedVirtualFileRef)
|
||||
},
|
||||
(packageSrc / publishArtifact) := false,
|
||||
(packageDoc / publishArtifact) := false,
|
||||
publishMavenStyle := false
|
||||
|
|
|
|||
|
|
@ -32,8 +32,9 @@ lazy val expectedInter =
|
|||
</dependency>
|
||||
|
||||
def checkTask(expectedDep: xml.Elem) = TaskKey[Unit]("checkPom") := {
|
||||
val file = makePom.value
|
||||
val pom = xml.XML.loadFile(file)
|
||||
val vf = makePom.value
|
||||
val converter = fileConverter.value
|
||||
val pom = xml.XML.loadFile(converter.toPath(vf).toFile)
|
||||
val actual = pom \\ "dependencies"
|
||||
val expected = <d>
|
||||
{expectedDep}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue