diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 09186d676..481a7b642 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -52,10 +52,7 @@ jobs: env: JAVA_OPTS: -Xms800M -Xmx2G -Xss6M -XX:ReservedCodeCacheSize=128M -server -Dsbt.io.virtual=false -Dfile.encoding=UTF-8 JVM_OPTS: -Xms800M -Xmx2G -Xss6M -XX:ReservedCodeCacheSize=128M -server -Dsbt.io.virtual=false -Dfile.encoding=UTF-8 - SCALA_212: 2.12.17 - SCALA_213: 2.13.8 - SCALA_3: 3.1.0 - UTIL_TESTS: "utilCache/test utilControl/test utilInterface/test utilLogging/test utilPosition/test utilRelation/test utilScripted/test utilTracking/test" + UTIL_TESTS: "utilControl/test utilInterface/test utilLogging/test utilPosition/test utilRelation/test utilScripted/test" SBT_LOCAL: false TEST_SBT_VER: 1.5.0 SBT_ETC_FILE: $HOME/etc/sbt/sbtopts @@ -126,9 +123,9 @@ jobs: # ./sbt -v --client "serverTestProj/test" # ./sbt -v --client doc ./sbt -v --client "all $UTIL_TESTS" - ./sbt -v --client ++$SCALA_213 + ./sbt -v --client ++2.13.x ./sbt -v --client "all $UTIL_TESTS" - ./sbt -v --client ++$SCALA_212 + ./sbt -v --client ++2.12.x ./sbt -v --client "all $UTIL_TESTS" - name: Build and test (2) if: ${{ matrix.jobtype == 2 }} @@ -156,8 +153,8 @@ jobs: shell: bash run: | ./sbt -v --client test - ./sbt -v --client "++$SCALA_213; all $UTIL_TESTS" - ./sbt -v --client "++$SCALA_212; all $UTIL_TESTS" + ./sbt -v --client "++2.13.x; all $UTIL_TESTS" + ./sbt -v --client "++2.12.x; all $UTIL_TESTS" # - name: Build and test (6) # if: ${{ matrix.jobtype == 6 }} # shell: bash diff --git a/build.sbt b/build.sbt index 092379d9f..ffe185655 100644 --- a/build.sbt +++ b/build.sbt @@ -427,17 +427,17 @@ lazy val utilRelation = (project in file("internal") / "util-relation") lazy val utilCache = (project in file("util-cache")) .settings( utilCommonSettings, + testedBaseSettings, name := "Util Cache", libraryDependencies ++= Seq(sjsonNewScalaJson.value, sjsonNewMurmurhash.value, scalaReflect.value), - libraryDependencies ++= Seq(scalatest % "test"), utilMimaSettings, - mimaBinaryIssueFilters ++= Seq( - // Added a method to a sealed trait, technically not a problem for Scala - exclude[ReversedMissingMethodProblem]("sbt.util.HashFileInfo.hashArray"), - ) + Test / fork := true, + ) + .configure( + addSbtIO, + addSbtCompilerInterface, ) - .configure(addSbtIO) // Builds on cache to provide caching for filesystem-related operations lazy val utilTracking = (project in file("util-tracking")) @@ -660,6 +660,7 @@ lazy val actionsProj = (project in file("main-actions")) stdTaskProj, taskProj, testingProj, + utilCache, utilLogging, utilRelation, utilTracking, @@ -735,7 +736,7 @@ lazy val protocolProj = (project in file("protocol")) // General command support and core commands not specific to a build system lazy val commandProj = (project in file("main-command")) .enablePlugins(ContrabandPlugin, JsonCodecPlugin) - .dependsOn(protocolProj, completeProj, utilLogging) + .dependsOn(protocolProj, completeProj, utilLogging, utilCache) .settings( testedBaseSettings, name := "Command", @@ -801,7 +802,10 @@ lazy val commandProj = (project in file("main-command")) // The core macro project defines the main logic of the DSL, abstracted // away from several sbt implementors (tasks, settings, et cetera). lazy val coreMacrosProj = (project in file("core-macros")) - .dependsOn(collectionProj) + .dependsOn( + collectionProj, + utilCache, + ) .settings( testedBaseSettings, name := "Core Macros", @@ -1268,20 +1272,20 @@ def allProjects = bundledLauncherProj, sbtClientProj, buildFileProj, + utilCache, + utilTracking, ) ++ lowerUtilProjects // These need to be cross published to 2.12 and 2.13 for Zinc lazy val lowerUtilProjects = Seq( utilCore, - utilCache, utilControl, utilInterface, utilLogging, utilPosition, utilRelation, utilScripted, - utilTracking ) lazy val nonRoots = allProjects.map(p => LocalProject(p.id)) diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala index b8afeff4f..be2becf7a 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/Cont.scala @@ -4,9 +4,21 @@ package util package appmacro import scala.collection.mutable.ListBuffer +import scala.reflect.{ ClassTag, TypeTest } import scala.quoted.* -import sbt.util.Applicative -import sbt.util.Monad +import sjsonnew.{ BasicJsonProtocol, HashWriter, JsonFormat } +import sbt.util.{ + ActionCache, + ActionCacheStore, + Applicative, + BuildWideCacheConfiguration, + Cache, + CacheLevelTag, + Digest, + Monad, +} +import xsbti.VirtualFile +import Types.Id /** * Implementation of a macro that provides a direct syntax for applicative functors and monads. It @@ -22,12 +34,13 @@ trait Cont: */ def contMapN[A: Type, F[_], Effect[_]: Type]( tree: Expr[A], - instanceExpr: Expr[Applicative[F]] + applicativeExpr: Expr[Applicative[F]], + cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]], )(using iftpe: Type[F], eatpe: Type[Effect[A]], ): Expr[F[Effect[A]]] = - contMapN[A, F, Effect](tree, instanceExpr, conv.idTransform) + contMapN[A, F, Effect](tree, applicativeExpr, cacheConfigExpr, conv.idTransform) /** * Implementation of a macro that provides a direct syntax for applicative functors. It is @@ -35,13 +48,14 @@ trait Cont: */ def contMapN[A: Type, F[_], Effect[_]: Type]( tree: Expr[A], - instanceExpr: Expr[Applicative[F]], + applicativeExpr: Expr[Applicative[F]], + cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]], inner: conv.TermTransform[Effect] )(using iftpe: Type[F], eatpe: Type[Effect[A]], ): Expr[F[Effect[A]]] = - contImpl[A, F, Effect](Left(tree), instanceExpr, inner) + contImpl[A, F, Effect](Left(tree), applicativeExpr, cacheConfigExpr, inner) /** * Implementation of a macro that provides a direct syntax for applicative functors. It is @@ -49,12 +63,13 @@ trait Cont: */ def contFlatMap[A: Type, F[_], Effect[_]: Type]( tree: Expr[F[A]], - instanceExpr: Expr[Applicative[F]], + applicativeExpr: Expr[Applicative[F]], + cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]], )(using iftpe: Type[F], eatpe: Type[Effect[A]], ): Expr[F[Effect[A]]] = - contFlatMap[A, F, Effect](tree, instanceExpr, conv.idTransform) + contFlatMap[A, F, Effect](tree, applicativeExpr, cacheConfigExpr, conv.idTransform) /** * Implementation of a macro that provides a direct syntax for applicative functors. It is @@ -62,13 +77,14 @@ trait Cont: */ def contFlatMap[A: Type, F[_], Effect[_]: Type]( tree: Expr[F[A]], - instanceExpr: Expr[Applicative[F]], + applicativeExpr: Expr[Applicative[F]], + cacheConfigExpr: Option[Expr[BuildWideCacheConfiguration]], inner: conv.TermTransform[Effect] )(using iftpe: Type[F], eatpe: Type[Effect[A]], ): Expr[F[Effect[A]]] = - contImpl[A, F, Effect](Right(tree), instanceExpr, inner) + contImpl[A, F, Effect](Right(tree), applicativeExpr, cacheConfigExpr, inner) def summonAppExpr[F[_]: Type]: Expr[Applicative[F]] = import conv.qctx @@ -78,6 +94,30 @@ trait Cont: .summon[Applicative[F]] .getOrElse(sys.error(s"Applicative[F] not found for ${TypeRepr.of[F].typeSymbol}")) + def summonHashWriter[A: Type]: Expr[HashWriter[A]] = + import conv.qctx + import qctx.reflect.* + given qctx.type = qctx + Expr + .summon[HashWriter[A]] + .getOrElse(sys.error(s"HashWriter[A] not found for ${TypeRepr.of[A].show}")) + + def summonJsonFormat[A: Type]: Expr[JsonFormat[A]] = + import conv.qctx + import qctx.reflect.* + given qctx.type = qctx + Expr + .summon[JsonFormat[A]] + .getOrElse(sys.error(s"JsonFormat[A] not found for ${TypeRepr.of[A].show}")) + + def summonClassTag[A: Type]: Expr[ClassTag[A]] = + import conv.qctx + import qctx.reflect.* + given qctx.type = qctx + Expr + .summon[ClassTag[A]] + .getOrElse(sys.error(s"ClassTag[A] not found for ${TypeRepr.of[A].show}")) + /** * Implementation of a macro that provides a direct syntax for applicative functors and monads. * It is intended to bcke used in conjunction with another macro that conditions the inputs. @@ -117,7 +157,8 @@ trait Cont: */ def contImpl[A: Type, F[_], Effect[_]: Type]( eitherTree: Either[Expr[A], Expr[F[A]]], - instanceExpr: Expr[Applicative[F]], + applicativeExpr: Expr[Applicative[F]], + cacheConfigExprOpt: Option[Expr[BuildWideCacheConfiguration]], inner: conv.TermTransform[Effect] )(using iftpe: Type[F], @@ -134,18 +175,34 @@ trait Cont: case Right(r) => (r, faTpe) val inputBuf = ListBuffer[Input]() + val outputBuf = ListBuffer[Output]() def makeApp(body: Term, inputs: List[Input]): Expr[F[Effect[A]]] = inputs match case Nil => pure(body) case x :: Nil => genMap(body, x) case xs => genMapN(body, xs) + def unitExpr: Expr[Unit] = '{ () } // no inputs, so construct F[A] via Instance.pure or pure+flatten def pure(body: Term): Expr[F[Effect[A]]] = + val tags = CacheLevelTag.all.toList def pure0[A1: Type](body: Expr[A1]): Expr[F[A1]] = - '{ - $instanceExpr.pure[A1] { () => $body } - } + cacheConfigExprOpt match + case Some(cacheConfigExpr) => + '{ + $applicativeExpr.pure[A1] { () => + ${ + callActionCache[A1, Unit](outputBuf.toList, cacheConfigExpr, tags)( + body = body, + input = unitExpr, + ) + } + } + } + case None => + '{ + $applicativeExpr.pure[A1] { () => $body } + } eitherTree match case Left(_) => pure0[Effect[A]](inner(body).asExprOf[Effect[A]]) case Right(_) => @@ -156,7 +213,7 @@ trait Cont: def flatten(m: Expr[F[F[Effect[A]]]]): Expr[F[Effect[A]]] = '{ { - val i1 = $instanceExpr.asInstanceOf[Monad[F]] + val i1 = $applicativeExpr.asInstanceOf[Monad[F]] i1.flatten[Effect[A]]($m.asInstanceOf[F[F[Effect[A]]]]) } } @@ -183,13 +240,31 @@ trait Cont: convert[x](name, qual) transform { (tree: Term) => typed[x](Ref(param.symbol)) } - transformWrappers(body.asTerm.changeOwner(sym), substitute, sym) + val modifiedBody = + transformWrappers(body.asTerm.changeOwner(sym), substitute, sym).asExprOf[A1] + cacheConfigExprOpt match + case Some(cacheConfigExpr) => + if input.isCacheInput then + callActionCache(outputBuf.toList, cacheConfigExpr, input.tags)( + body = modifiedBody, + input = Ref(param.symbol).asExprOf[a], + ).asTerm.changeOwner(sym) + else + callActionCache[A1, Unit]( + outputBuf.toList, + cacheConfigExpr, + input.tags, + )( + body = modifiedBody, + input = unitExpr, + ).asTerm.changeOwner(sym) + case None => modifiedBody.asTerm } ).asExprOf[a => A1] val expr = input.term.asExprOf[F[a]] typed[F[A1]]( '{ - $instanceExpr.map[a, A1]($expr.asInstanceOf[F[a]])($lambda) + $applicativeExpr.map[a, A1]($expr.asInstanceOf[F[a]])($lambda) }.asTerm ).asExprOf[F[A1]] eitherTree match @@ -203,57 +278,137 @@ trait Cont: val br = makeTuple(inputs) val lambdaTpe = MethodType(List("$p0"))(_ => List(br.inputTupleTypeRepr), _ => TypeRepr.of[A1]) - val lambda = Lambda( - owner = Symbol.spliceOwner, - tpe = lambdaTpe, - rhsFn = (sym, params) => { - val p0 = params.head.asInstanceOf[Term] - // Called when transforming the tree to add an input. - // For `qual` of type F[A], and a `selection` qual.value, - // the call is addType(Type A, Tree qual) - // The result is a Tree representing a reference to - // the bound value of the input. - val substitute = [x] => - (name: String, tpe: Type[x], qual: Term, oldTree: Term) => - given Type[x] = tpe - convert[x](name, qual) transform { (replacement: Term) => - val idx = inputs.indexWhere(input => input.qual == qual) - Select - .unique(Ref(p0.symbol), "apply") - .appliedToTypes(List(br.inputTupleTypeRepr)) - .appliedToArgs(List(Literal(IntConstant(idx)))) + br.inputTupleTypeRepr.asType match + case '[inputTypeTpe] => + val lambda = Lambda( + owner = Symbol.spliceOwner, + tpe = lambdaTpe, + rhsFn = (sym, params) => { + val p0 = params.head.asInstanceOf[Term] + // Called when transforming the tree to add an input. + // For `qual` of type F[A], and a `selection` qual.value, + // the call is addType(Type A, Tree qual) + // The result is a Tree representing a reference to + // the bound value of the input. + val substitute = [x] => + (name: String, tpe: Type[x], qual: Term, oldTree: Term) => + given Type[x] = tpe + convert[x](name, qual) transform { (replacement: Term) => + val idx = inputs.indexWhere(input => input.qual == qual) + applyTuple(p0, br.inputTupleTypeRepr, idx) + } + val modifiedBody = + transformWrappers(body.asTerm.changeOwner(sym), substitute, sym).asExprOf[A1] + cacheConfigExprOpt match + case Some(cacheConfigExpr) => + if inputs.exists(_.isCacheInput) then + val tags = inputs + .filter(_.isCacheInput) + .map(_.tags.toSet) + .reduce(_ & _) + .toList + require( + tags.nonEmpty, + s"""cacheLevelTag union must be non-empty: ${inputs.mkString("\n")}""" + ) + br.cacheInputTupleTypeRepr.asType match + case '[cacheInputTpe] => + callActionCache(outputBuf.toList, cacheConfigExpr, tags)( + body = modifiedBody, + input = br.cacheInputExpr(p0).asExprOf[cacheInputTpe], + ).asTerm.changeOwner(sym) + else + val tags = CacheLevelTag.all.toList + callActionCache[A1, Unit](outputBuf.toList, cacheConfigExpr, tags)( + body = modifiedBody, + input = unitExpr, + ).asTerm.changeOwner(sym) + case None => + modifiedBody.asTerm } - transformWrappers(body.asTerm.changeOwner(sym), substitute, sym) - } - ) - val tupleMapRepr = TypeRepr - .of[Tuple.Map] - .appliedTo(List(br.inputTupleTypeRepr, TypeRepr.of[F])) - tupleMapRepr.asType match - case '[tupleMap] => - br.inputTupleTypeRepr.asType match - case '[inputTypeTpe] => + ) + val tupleMapRepr = TypeRepr + .of[Tuple.Map] + .appliedTo(List(br.inputTupleTypeRepr, TypeRepr.of[F])) + tupleMapRepr.asType match + case '[tupleMap] => '{ - given Applicative[F] = $instanceExpr + given Applicative[F] = $applicativeExpr import TupleMapExtension.* ${ br.tupleExpr.asInstanceOf[Expr[Tuple.Map[inputTypeTpe & Tuple, F]]] } .mapN(${ lambda.asExprOf[inputTypeTpe & Tuple => A1] }) } - eitherTree match case Left(_) => genMapN0[Effect[A]](inner(body).asExprOf[Effect[A]]) case Right(_) => flatten(genMapN0[F[Effect[A]]](inner(body).asExprOf[F[Effect[A]]])) + // call `ActionCache.cache` + def callActionCache[A1: Type, A2: Type]( + outputs: List[Output], + cacheConfigExpr: Expr[BuildWideCacheConfiguration], + tags: List[CacheLevelTag], + )(body: Expr[A1], input: Expr[A2]): Expr[A1] = + val codeContentHash = Expr[Long](body.show.##) + val extraHash = Expr[Long](0L) + val aJsonFormat = summonJsonFormat[A1] + val aClassTag = summonClassTag[A1] + val inputHashWriter = + if TypeRepr.of[A2] =:= TypeRepr.of[Unit] then + '{ + import BasicJsonProtocol.* + summon[HashWriter[Unit]] + }.asExprOf[HashWriter[A2]] + else summonHashWriter[A2] + val tagsExpr = '{ List(${ Varargs(tags.map(Expr[CacheLevelTag](_))) }: _*) } + val block = letOutput(outputs)(body) + '{ + given HashWriter[A2] = $inputHashWriter + given JsonFormat[A1] = $aJsonFormat + given ClassTag[A1] = $aClassTag + ActionCache + .cache( + $input, + codeContentHash = Digest.dummy($codeContentHash), + extraHash = Digest.dummy($extraHash), + tags = $tagsExpr + )({ _ => + $block + })($cacheConfigExpr) + } + + // wrap body in between output var declarations and var references + def letOutput[A1: Type]( + outputs: List[Output] + )(body: Expr[A1]): Expr[(A1, Seq[VirtualFile])] = + Block( + outputs.map(_.toVarDef), + '{ + ( + $body, + List(${ Varargs[VirtualFile](outputs.map(_.toRef.asExprOf[VirtualFile])) }: _*) + ) + }.asTerm + ).asExprOf[(A1, Seq[VirtualFile])] + + val WrapOutputName = "wrapOutput_\u2603\u2603" // Called when transforming the tree to add an input. // For `qual` of type F[A], and a `selection` qual.value. val record = [a] => (name: String, tpe: Type[a], qual: Term, oldTree: Term) => given t: Type[a] = tpe convert[a](name, qual) transform { (replacement: Term) => - inputBuf += Input(TypeRepr.of[a], qual, replacement, freshName("q")) - oldTree + if name != WrapOutputName then + // todo cache opt-out attribute + inputBuf += Input(TypeRepr.of[a], qual, replacement, freshName("q")) + oldTree + else + val output = Output(TypeRepr.of[a], qual, freshName("o"), Symbol.spliceOwner) + outputBuf += output + if cacheConfigExprOpt.isDefined then output.toAssign + else oldTree + end if } val tx = transformWrappers(expr.asTerm, record, Symbol.spliceOwner) val tr = makeApp(tx, inputBuf.toList) diff --git a/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala b/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala index 89a07860c..1ed2fffb5 100644 --- a/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala +++ b/core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala @@ -5,6 +5,8 @@ import scala.compiletime.summonInline import scala.quoted.* import scala.reflect.TypeTest import scala.collection.mutable +import sbt.util.cacheLevel +import sbt.util.CacheLevelTag trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int): import qctx.reflect.* @@ -15,10 +17,6 @@ trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int) counter = counter + 1 s"$$${prefix}${counter}" - /** - * Constructs a new, synthetic, local var with type `tpe`, a unique name, initialized to - * zero-equivalent (Zero[A]), and owned by `parent`. - */ def freshValDef(parent: Symbol, tpe: TypeRepr, rhs: Term): ValDef = tpe.asType match case '[a] => @@ -37,14 +35,27 @@ trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int) def makeTuple(inputs: List[Input]): BuilderResult = new BuilderResult: - override def inputTupleTypeRepr: TypeRepr = + override lazy val inputTupleTypeRepr: TypeRepr = tupleTypeRepr(inputs.map(_.tpe)) override def tupleExpr: Expr[Tuple] = Expr.ofTupleFromSeq(inputs.map(_.term.asExpr)) + override def cacheInputTupleTypeRepr: TypeRepr = + tupleTypeRepr(inputs.filter(_.isCacheInput).map(_.tpe)) + override def cacheInputExpr(tupleTerm: Term): Expr[Tuple] = + Expr.ofTupleFromSeq(inputs.zipWithIndex.flatMap { case (input, idx) => + if input.tags.nonEmpty then + input.tpe.asType match + case '[a] => + Some(applyTuple(tupleTerm, inputTupleTypeRepr, idx).asExprOf[a]) + else None + }) trait BuilderResult: def inputTupleTypeRepr: TypeRepr def tupleExpr: Expr[Tuple] + def cacheInputTupleTypeRepr: TypeRepr + def cacheInputExpr(tupleTerm: Term): Expr[Tuple] + end BuilderResult def tupleTypeRepr(param: List[TypeRepr]): TypeRepr = @@ -52,14 +63,69 @@ trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int) case x :: xs => TypeRepr.of[scala.*:].appliedTo(List(x, tupleTypeRepr(xs))) case Nil => TypeRepr.of[EmptyTuple] + private val cacheLevelSym = Symbol.requiredClass("sbt.util.cacheLevel") final class Input( val tpe: TypeRepr, val qual: Term, val term: Term, - val name: String + val name: String, ): override def toString: String = - s"Input($tpe, $qual, $term, $name)" + s"Input($tpe, $qual, $term, $name, $tags)" + + def isCacheInput: Boolean = tags.nonEmpty + lazy val tags = extractTags(qual) + private def extractTags(tree: Term): List[CacheLevelTag] = + def getAnnotation(tree: Term) = + Option(tree.tpe.termSymbol) match + case Some(x) => x.getAnnotation(cacheLevelSym) + case None => tree.symbol.getAnnotation(cacheLevelSym) + def extractTags0(tree: Term) = + getAnnotation(tree) match + case Some(annot) => + annot.asExprOf[cacheLevel] match + case '{ cacheLevel(include = Array.empty[CacheLevelTag]($_)) } => Nil + case '{ cacheLevel(include = Array[CacheLevelTag]($include*)) } => + include.value.get.toList + case _ => sys.error(Printer.TreeStructure.show(annot) + " does not match") + case None => CacheLevelTag.all.toList + tree match + case Inlined(_, _, tree) => extractTags(tree) + case Apply(_, List(arg)) => extractTags(arg) + case _ => extractTags0(tree) + + /** + * Represents an output expression via Def.declareOutput + */ + final class Output( + val tpe: TypeRepr, + val term: Term, + val name: String, + val parent: Symbol, + ): + override def toString: String = + s"Output($tpe, $term, $name)" + val placeholder: Symbol = + tpe.asType match + case '[a] => + Symbol.newVal( + parent, + name, + tpe, + Flags.Mutable, + Symbol.noSymbol + ) + def toVarDef: ValDef = + ValDef(placeholder, rhs = Some('{ null }.asTerm)) + def toAssign: Term = Assign(toRef, term) + def toRef: Ref = Ref(placeholder) + end Output + + def applyTuple(tupleTerm: Term, tpe: TypeRepr, idx: Int): Term = + Select + .unique(Ref(tupleTerm.symbol), "apply") + .appliedToTypes(List(tpe)) + .appliedToArgs(List(Literal(IntConstant(idx)))) trait TermTransform[F[_]]: def apply(in: Term): Term diff --git a/core-macros/src/test/scala/sbt/internal/ContTest.scala b/core-macros/src/test/scala/sbt/internal/ContTest.scala index 357ae990c..a6991f0bf 100644 --- a/core-macros/src/test/scala/sbt/internal/ContTest.scala +++ b/core-macros/src/test/scala/sbt/internal/ContTest.scala @@ -4,32 +4,40 @@ import sbt.internal.util.appmacro.* import verify.* import ContTestMacro.* import sbt.util.Applicative +import sjsonnew.BasicJsonProtocol object ContTest extends BasicTestSuite: test("pure") { given Applicative[List] = sbt.util.ListInstances.listMonad - val actual = contMapNMacro[List, Int](12) + val actual = uncachedContMapNMacro[List, Int](12) assert(actual == List(12)) } test("getMap") { given Applicative[List] = sbt.util.ListInstances.listMonad - val actual = contMapNMacro[List, Int](ContTest.wrapInit(List(1)) + 2) + val actual = uncachedContMapNMacro[List, Int](ContTest.wrapInit(List(1)) + 2) assert(actual == List(3)) + val actual2 = uncachedContMapNMacro[List, Int](ContTest.wrapInit(List(1)) + 2) + assert(actual2 == List(3)) } test("getMapN") { given Applicative[List] = sbt.util.ListInstances.listMonad - val actual = contMapNMacro[List, Int]( + val actual = uncachedContMapNMacro[List, Int]( ContTest.wrapInit(List(1)) + ContTest.wrapInit(List(2)) + 3 ) assert(actual == List(6)) + val actual2 = uncachedContMapNMacro[List, Int]( + ContTest.wrapInit(List(1)) + + ContTest.wrapInit(List(2)) + 4 + ) + assert(actual2 == List(7)) } test("getMapN2") { given Applicative[List] = sbt.util.ListInstances.listMonad - val actual = contMapNMacro[List, Int]({ + val actual = uncachedContMapNMacro[List, Int]({ val x = ContTest.wrapInit(List(1)) val y = ContTest.wrapInit(List(2)) x + y + 3 diff --git a/core-macros/src/test/scala/sbt/internal/ContTestMacro.scala b/core-macros/src/test/scala/sbt/internal/ContTestMacro.scala index 0fb9e1c0b..06ac30b88 100644 --- a/core-macros/src/test/scala/sbt/internal/ContTestMacro.scala +++ b/core-macros/src/test/scala/sbt/internal/ContTestMacro.scala @@ -3,19 +3,23 @@ package sbt.internal import sbt.internal.util.Types.Id import sbt.internal.util.appmacro.* import sbt.util.Applicative +import sbt.util.{ ActionCacheStore, InMemoryActionCacheStore } import scala.quoted.* import ConvertTestMacro.InputInitConvert object ContTestMacro: - inline def contMapNMacro[F[_]: Applicative, A](inline expr: A): List[A] = - ${ contMapNMacroImpl[F, A]('expr) } + inline def uncachedContMapNMacro[F[_]: Applicative, A](inline expr: A): List[A] = + ${ uncachedContMapNMacroImpl[F, A]('expr) } - def contMapNMacroImpl[F[_]: Type, A: Type](expr: Expr[A])(using + def uncachedContMapNMacroImpl[F[_]: Type, A: Type](expr: Expr[A])(using qctx: Quotes ): Expr[List[A]] = object ContSyntax extends Cont import ContSyntax.* val convert1: Convert[qctx.type] = new InputInitConvert(qctx) - convert1.contMapN[A, List, Id](expr, convert1.summonAppExpr[List], convert1.idTransform) - + convert1.contMapN[A, List, Id]( + tree = expr, + applicativeExpr = convert1.summonAppExpr[List], + cacheConfigExpr = None, + ) end ContTestMacro diff --git a/main-actions/src/main/scala/sbt/Doc.scala b/main-actions/src/main/scala/sbt/Doc.scala deleted file mode 100644 index 798e0b8d4..000000000 --- a/main-actions/src/main/scala/sbt/Doc.scala +++ /dev/null @@ -1,85 +0,0 @@ -/* - * sbt - * Copyright 2011 - 2018, Lightbend, Inc. - * Copyright 2008 - 2010, Mark Harrah - * Licensed under Apache License 2.0 (see LICENSE) - */ - -package sbt - -import java.io.File -import sbt.internal.inc.{ AnalyzingCompiler, PlainVirtualFile } -import sbt.internal.util.ManagedLogger -import sbt.util.CacheStoreFactory -import sbt.util.Logger -import xsbti.Reporter -import xsbti.compile.JavaTools -import sbt.internal.inc.MappedFileConverter - -object Doc { - import RawCompileLike._ - - def scaladoc( - label: String, - cacheStoreFactory: CacheStoreFactory, - compiler: AnalyzingCompiler - ): Gen = - scaladoc(label, cacheStoreFactory, compiler, Seq()) - - def scaladoc( - label: String, - cacheStoreFactory: CacheStoreFactory, - compiler: AnalyzingCompiler, - fileInputOptions: Seq[String] - ): Gen = - cached( - cacheStoreFactory, - fileInputOptions, - prepare( - label + " Scala API documentation", - (sources, classpath, outputDirectory, options, maxErrors, log) => { - compiler.doc( - sources map { x => - PlainVirtualFile(x.toPath) - }, - classpath map { x => - PlainVirtualFile(x.toPath) - }, - MappedFileConverter.empty, - outputDirectory.toPath, - options, - maxErrors, - log - ) - } - ) - ) - - @deprecated("Going away", "1.1.1") - def javadoc( - label: String, - cacheStoreFactory: CacheStoreFactory, - doc: JavaTools, - log: Logger, - reporter: Reporter, - ): Gen = ??? - - @deprecated("Going away", "1.1.1") - def javadoc( - label: String, - cacheStoreFactory: CacheStoreFactory, - doc: JavaTools, - log: Logger, - reporter: Reporter, - fileInputOptions: Seq[String], - ): Gen = ??? - - @deprecated("Going away", "1.1.1") - val javaSourcesOnly: File => Boolean = _.getName.endsWith(".java") -} - -@deprecated("Going away", "1.1.1") -sealed trait Doc { - @deprecated("Going away", "1.1.1") - type Gen = (Seq[File], Seq[File], File, Seq[String], Int, ManagedLogger) => Unit -} diff --git a/main-actions/src/main/scala/sbt/ForkTests.scala b/main-actions/src/main/scala/sbt/ForkTests.scala index 7994530f0..a464b3622 100755 --- a/main-actions/src/main/scala/sbt/ForkTests.scala +++ b/main-actions/src/main/scala/sbt/ForkTests.scala @@ -19,13 +19,15 @@ import sbt.ConcurrentRestrictions.Tag import sbt.protocol.testing._ import sbt.internal.util.Util.{ AnyOps, none } import sbt.internal.util.{ Terminal => UTerminal } +import xsbti.{ FileConverter, HashedVirtualFileRef } private[sbt] object ForkTests { def apply( runners: Map[TestFramework, Runner], opts: ProcessedOptions, config: Execution, - classpath: Seq[File], + classpath: Seq[HashedVirtualFileRef], + converter: FileConverter, fork: ForkOptions, log: Logger, tags: (Tag, Int)* @@ -36,10 +38,12 @@ private[sbt] object ForkTests { def all(work: Seq[ClassLoader => Unit]) = work.fork(f => f(dummyLoader)) val main = - if (opts.tests.isEmpty) + if opts.tests.isEmpty then constant(TestOutput(TestResult.Passed, Map.empty[String, SuiteResult], Iterable.empty)) else - mainTestTask(runners, opts, classpath, fork, log, config.parallel).tagw(config.tags: _*) + mainTestTask(runners, opts, classpath, converter, fork, log, config.parallel).tagw( + config.tags: _* + ) main.tagw(tags: _*).dependsOn(all(opts.setup): _*) flatMap { results => all(opts.cleanup).join.map(_ => results) } @@ -49,31 +53,34 @@ private[sbt] object ForkTests { runners: Map[TestFramework, Runner], tests: Vector[TestDefinition], config: Execution, - classpath: Seq[File], + classpath: Seq[HashedVirtualFileRef], + converter: FileConverter, fork: ForkOptions, log: Logger, tags: (Tag, Int)* ): Task[TestOutput] = { val opts = processOptions(config, tests, log) - apply(runners, opts, config, classpath, fork, log, tags: _*) + apply(runners, opts, config, classpath, converter, fork, log, tags: _*) } def apply( runners: Map[TestFramework, Runner], tests: Vector[TestDefinition], config: Execution, - classpath: Seq[File], + classpath: Seq[HashedVirtualFileRef], + converter: FileConverter, fork: ForkOptions, log: Logger, tag: Tag ): Task[TestOutput] = { - apply(runners, tests, config, classpath, fork, log, tag -> 1) + apply(runners, tests, config, classpath, converter, fork, log, tag -> 1) } private[this] def mainTestTask( runners: Map[TestFramework, Runner], opts: ProcessedOptions, - classpath: Seq[File], + classpath: Seq[HashedVirtualFileRef], + converter: FileConverter, fork: ForkOptions, log: Logger, parallel: Boolean @@ -148,8 +155,8 @@ private[sbt] object ForkTests { testListeners.foreach(_.doInit()) val acceptorThread = new Thread(Acceptor) acceptorThread.start() - - val fullCp = classpath ++ Seq( + val cpFiles = classpath.map(converter.toPath).map(_.toFile()) + val fullCp = cpFiles ++ Seq( IO.classLocationPath[ForkMain].toFile, IO.classLocationPath[Framework].toFile ) diff --git a/main-actions/src/main/scala/sbt/Package.scala b/main-actions/src/main/scala/sbt/Package.scala deleted file mode 100644 index 9555af808..000000000 --- a/main-actions/src/main/scala/sbt/Package.scala +++ /dev/null @@ -1,236 +0,0 @@ -/* - * sbt - * Copyright 2011 - 2018, Lightbend, Inc. - * Copyright 2008 - 2010, Mark Harrah - * Licensed under Apache License 2.0 (see LICENSE) - */ - -package sbt - -import java.io.File -import java.time.OffsetDateTime -import java.util.jar.{ Attributes, Manifest } -import scala.collection.JavaConverters._ -import sbt.io.IO - -import sjsonnew.JsonFormat - -import sbt.util.Logger - -import sbt.util.{ CacheStoreFactory, FilesInfo, ModifiedFileInfo, PlainFileInfo } -import sbt.util.FileInfo.{ exists, lastModified } -import sbt.util.CacheImplicits._ -import sbt.util.Tracked.{ inputChanged, outputChanged } -import scala.sys.process.Process - -sealed trait PackageOption - -/** - * == Package == - * - * This module provides an API to package jar files. - * - * @see [[https://docs.oracle.com/javase/tutorial/deployment/jar/index.html]] - */ -object Package { - final case class JarManifest(m: Manifest) extends PackageOption { - assert(m != null) - } - final case class MainClass(mainClassName: String) extends PackageOption - final case class ManifestAttributes(attributes: (Attributes.Name, String)*) extends PackageOption - def ManifestAttributes(attributes: (String, String)*): ManifestAttributes = { - val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value) - new ManifestAttributes(converted: _*) - } - // 2010-01-01 - private val default2010Timestamp: Long = 1262304000000L - final case class FixedTimestamp(value: Option[Long]) extends PackageOption - val keepTimestamps: Option[Long] = None - val fixed2010Timestamp: Option[Long] = Some(default2010Timestamp) - def gitCommitDateTimestamp: Option[Long] = - try { - Some( - OffsetDateTime - .parse(Process("git show -s --format=%cI").!!.trim) - .toInstant() - .toEpochMilli() - ) - } catch { - case e: Exception if e.getMessage.startsWith("Nonzero") => - sys.error( - s"git repository was expected for package timestamp; use Package.fixed2010Timestamp or Package.keepTimestamps instead" - ) - } - def setFixedTimestamp(value: Option[Long]): PackageOption = - FixedTimestamp(value) - - /** by default we overwrite all timestamps in JAR to epoch time 2010-01-01 for repeatable build */ - lazy val defaultTimestamp: Option[Long] = - sys.env - .get("SOURCE_DATE_EPOCH") - .map(_.toLong * 1000) - .orElse(Some(default2010Timestamp)) - - def timeFromConfiguration(config: Configuration): Option[Long] = - (config.options.collect { case t: FixedTimestamp => t }).headOption match { - case Some(FixedTimestamp(value)) => value - case _ => defaultTimestamp - } - - def mergeAttributes(a1: Attributes, a2: Attributes) = a1.asScala ++= a2.asScala - // merges `mergeManifest` into `manifest` (mutating `manifest` in the process) - def mergeManifests(manifest: Manifest, mergeManifest: Manifest): Unit = { - mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes) - val entryMap = manifest.getEntries.asScala - for ((key, value) <- mergeManifest.getEntries.asScala) { - entryMap.get(key) match { - case Some(attributes) => mergeAttributes(attributes, value); () - case None => entryMap.put(key, value); () - } - } - } - - /** - * The jar package configuration. Contains all relevant information to create a jar file. - * - * @param sources the jar contents - * @param jar the destination jar file - * @param options additional package information, e.g. jar manifest, main class or manifest attributes - */ - final class Configuration( - val sources: Seq[(File, String)], - val jar: File, - val options: Seq[PackageOption] - ) - - /** - * @param conf the package configuration that should be build - * @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible - * @param log feedback for the user - */ - def apply(conf: Configuration, cacheStoreFactory: CacheStoreFactory, log: Logger): Unit = - apply(conf, cacheStoreFactory, log, timeFromConfiguration(conf)) - - /** - * @param conf the package configuration that should be build - * @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible - * @param log feedback for the user - * @param time static timestamp to use for all entries, if any. - */ - def apply( - conf: Configuration, - cacheStoreFactory: CacheStoreFactory, - log: Logger, - time: Option[Long] - ): Unit = { - val manifest = new Manifest - val main = manifest.getMainAttributes - for (option <- conf.options) { - option match { - case JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); () - case MainClass(mainClassName) => main.put(Attributes.Name.MAIN_CLASS, mainClassName); () - case ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; () - case FixedTimestamp(value) => () - case _ => log.warn("Ignored unknown package option " + option) - } - } - setVersion(main) - - type Inputs = (Seq[(File, String)], FilesInfo[ModifiedFileInfo], Manifest) - val cachedMakeJar = inputChanged(cacheStoreFactory make "inputs") { - (inChanged, inputs: Inputs) => - import exists.format - val (sources, _, manifest) = inputs - outputChanged(cacheStoreFactory make "output") { (outChanged, jar: PlainFileInfo) => - if (inChanged || outChanged) { - makeJar(sources, jar.file, manifest, log, time) - jar.file - () - } else log.debug("Jar uptodate: " + jar.file) - } - } - - val inputFiles = conf.sources.map(_._1).toSet - val inputs = (conf.sources.distinct, lastModified(inputFiles), manifest) - cachedMakeJar(inputs)(() => exists(conf.jar)) - () - } - - /** - * updates the manifest version is there is none present. - * - * @param main the current jar attributes - */ - def setVersion(main: Attributes): Unit = { - val version = Attributes.Name.MANIFEST_VERSION - if (main.getValue(version) eq null) { - main.put(version, "1.0") - () - } - } - def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = { - import Attributes.Name._ - val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR) - val attribVals = Seq(name, version, orgName) - ManifestAttributes(attribKeys zip attribVals: _*) - } - def addImplManifestAttributes( - name: String, - version: String, - homepage: Option[java.net.URL], - org: String, - orgName: String - ): PackageOption = { - import Attributes.Name._ - - // The ones in Attributes.Name are deprecated saying: - // "Extension mechanism will be removed in a future release. Use class path instead." - val IMPLEMENTATION_VENDOR_ID = new Attributes.Name("Implementation-Vendor-Id") - val IMPLEMENTATION_URL = new Attributes.Name("Implementation-URL") - - val attribKeys = Seq( - IMPLEMENTATION_TITLE, - IMPLEMENTATION_VERSION, - IMPLEMENTATION_VENDOR, - IMPLEMENTATION_VENDOR_ID, - ) - val attribVals = Seq(name, version, orgName, org) - ManifestAttributes((attribKeys zip attribVals) ++ { - homepage map (h => (IMPLEMENTATION_URL, h.toString)) - }: _*) - } - - @deprecated("Specify whether to use a static timestamp", "1.4.0") - def makeJar(sources: Seq[(File, String)], jar: File, manifest: Manifest, log: Logger): Unit = - makeJar(sources, jar, manifest, log, None) - - def makeJar( - sources: Seq[(File, String)], - jar: File, - manifest: Manifest, - log: Logger, - time: Option[Long] - ): Unit = { - val path = jar.getAbsolutePath - log.debug("Packaging " + path + " ...") - if (jar.exists) - if (jar.isFile) - IO.delete(jar) - else - sys.error(path + " exists, but is not a regular file") - log.debug(sourcesDebugString(sources)) - IO.jar(sources, jar, manifest, time) - log.debug("Done packaging.") - } - def sourcesDebugString(sources: Seq[(File, String)]): String = - "Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t")) - - implicit def manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]]( - m => { - val bos = new java.io.ByteArrayOutputStream() - m write bos - bos.toByteArray - }, - bs => new Manifest(new java.io.ByteArrayInputStream(bs)) - ) -} diff --git a/main-actions/src/main/scala/sbt/Pkg.scala b/main-actions/src/main/scala/sbt/Pkg.scala new file mode 100644 index 000000000..67f0ffe0f --- /dev/null +++ b/main-actions/src/main/scala/sbt/Pkg.scala @@ -0,0 +1,338 @@ +/* + * sbt + * Copyright 2011 - 2018, Lightbend, Inc. + * Copyright 2008 - 2010, Mark Harrah + * Licensed under Apache License 2.0 (see LICENSE) + */ + +package sbt + +import java.io.File +import java.time.OffsetDateTime +import java.util.jar.{ Attributes, Manifest } +import scala.collection.JavaConverters._ +import sbt.io.IO + +import sjsonnew.{ + :*:, + Builder, + IsoLList, + JsonFormat, + LList, + LNil, + Unbuilder, + deserializationError, + flatUnionFormat4 +} + +import sbt.util.Logger +import sbt.util.{ CacheStoreFactory, FilesInfo, ModifiedFileInfo, PlainFileInfo } +import sbt.util.FileInfo.{ exists, lastModified } +import sbt.util.CacheImplicits._ +import sbt.util.Tracked.{ inputChanged, outputChanged } +import scala.sys.process.Process +import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile, VirtualFileRef } + +/** + * == Package == + * + * This module provides an API to package jar files. + * + * @see [[https://docs.oracle.com/javase/tutorial/deployment/jar/index.html]] + */ +object Pkg: + def JarManifest(m: Manifest) = PackageOption.JarManifest(m) + def MainClass(mainClassName: String) = PackageOption.MainClass(mainClassName) + def MainfestAttributes(attributes: (Attributes.Name, String)*) = + PackageOption.ManifestAttributes(attributes: _*) + def ManifestAttributes(attributes: (String, String)*) = { + val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value) + PackageOption.ManifestAttributes(converted: _*) + } + // 2010-01-01 + private val default2010Timestamp: Long = 1262304000000L + def FixedTimestamp(value: Option[Long]) = PackageOption.FixedTimestamp(value) + val keepTimestamps: Option[Long] = None + val fixed2010Timestamp: Option[Long] = Some(default2010Timestamp) + def gitCommitDateTimestamp: Option[Long] = + try { + Some( + OffsetDateTime + .parse(Process("git show -s --format=%cI").!!.trim) + .toInstant() + .toEpochMilli() + ) + } catch { + case e: Exception if e.getMessage.startsWith("Nonzero") => + sys.error( + s"git repository was expected for package timestamp; use Package.fixed2010Timestamp or Package.keepTimestamps instead" + ) + } + def setFixedTimestamp(value: Option[Long]): PackageOption = + FixedTimestamp(value) + + /** by default we overwrite all timestamps in JAR to epoch time 2010-01-01 for repeatable build */ + lazy val defaultTimestamp: Option[Long] = + sys.env + .get("SOURCE_DATE_EPOCH") + .map(_.toLong * 1000) + .orElse(Some(default2010Timestamp)) + + def timeFromConfiguration(config: Configuration): Option[Long] = + (config.options.collect { case t: PackageOption.FixedTimestamp => t }).headOption match + case Some(PackageOption.FixedTimestamp(value)) => value + case _ => defaultTimestamp + + def mergeAttributes(a1: Attributes, a2: Attributes) = a1.asScala ++= a2.asScala + // merges `mergeManifest` into `manifest` (mutating `manifest` in the process) + def mergeManifests(manifest: Manifest, mergeManifest: Manifest): Unit = { + mergeAttributes(manifest.getMainAttributes, mergeManifest.getMainAttributes) + val entryMap = manifest.getEntries.asScala + for ((key, value) <- mergeManifest.getEntries.asScala) { + entryMap.get(key) match { + case Some(attributes) => mergeAttributes(attributes, value); () + case None => entryMap.put(key, value); () + } + } + } + + /** + * The jar package configuration. Contains all relevant information to create a jar file. + * + * @param sources the jar contents + * @param jar the destination jar file + * @param options additional package information, e.g. jar manifest, main class or manifest attributes + */ + final class Configuration( + val sources: Seq[(HashedVirtualFileRef, String)], + val jar: VirtualFileRef, + val options: Seq[PackageOption] + ) + + object Configuration: + given IsoLList.Aux[ + Configuration, + Vector[(HashedVirtualFileRef, String)] :*: VirtualFileRef :*: Seq[PackageOption] :*: LNil + ] = + import sbt.util.CacheImplicits.given + import sbt.util.PathHashWriters.given + LList.iso( + (c: Configuration) => + ("sources", c.sources.toVector) :*: ("jar", c.jar) :*: ("options", c.options) :*: LNil, + (in: Vector[(HashedVirtualFileRef, String)] :*: VirtualFileRef :*: Seq[PackageOption] :*: + LNil) => Configuration(in.head, in.tail.head, in.tail.tail.head), + ) + given JsonFormat[Configuration] = summon[JsonFormat[Configuration]] + end Configuration + + /** + * @param conf the package configuration that should be build + * @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible + * @param log feedback for the user + */ + def apply(conf: Configuration, converter: FileConverter, log: Logger): VirtualFile = + apply(conf, converter, log, timeFromConfiguration(conf)) + + /** + * @param conf the package configuration that should be build + * @param cacheStoreFactory used for jar caching. We try to avoid rebuilds as much as possible + * @param log feedback for the user + * @param time static timestamp to use for all entries, if any. + */ + def apply( + conf: Configuration, + converter: FileConverter, + log: Logger, + time: Option[Long] + ): VirtualFile = + val manifest = toManifest(conf, log) + val out = converter.toPath(conf.jar).toFile() + val sources = conf.sources.map { case (vf, path) => + converter.toPath(vf).toFile() -> path + } + makeJar(sources, out, manifest, log, time) + converter.toVirtualFile(out.toPath()) + + def toManifest(conf: Configuration, log: Logger): Manifest = + val manifest = new Manifest + val main = manifest.getMainAttributes + for option <- conf.options do + option match + case PackageOption.JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); () + case PackageOption.MainClass(mainClassName) => + main.put(Attributes.Name.MAIN_CLASS, mainClassName); () + case PackageOption.ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; () + case PackageOption.FixedTimestamp(value) => () + case _ => log.warn("Ignored unknown package option " + option) + setVersion(main) + manifest + + /** + * updates the manifest version is there is none present. + * + * @param main the current jar attributes + */ + def setVersion(main: Attributes): Unit = { + val version = Attributes.Name.MANIFEST_VERSION + if (main.getValue(version) eq null) { + main.put(version, "1.0") + () + } + } + def addSpecManifestAttributes(name: String, version: String, orgName: String): PackageOption = { + import Attributes.Name._ + val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR) + val attribVals = Seq(name, version, orgName) + PackageOption.ManifestAttributes(attribKeys.zip(attribVals): _*) + } + def addImplManifestAttributes( + name: String, + version: String, + homepage: Option[java.net.URL], + org: String, + orgName: String + ): PackageOption = { + import Attributes.Name._ + + // The ones in Attributes.Name are deprecated saying: + // "Extension mechanism will be removed in a future release. Use class path instead." + val IMPLEMENTATION_VENDOR_ID = new Attributes.Name("Implementation-Vendor-Id") + val IMPLEMENTATION_URL = new Attributes.Name("Implementation-URL") + + val attribKeys = Seq( + IMPLEMENTATION_TITLE, + IMPLEMENTATION_VERSION, + IMPLEMENTATION_VENDOR, + IMPLEMENTATION_VENDOR_ID, + ) + val attribVals = Seq(name, version, orgName, org) + PackageOption.ManifestAttributes(attribKeys.zip(attribVals) ++ { + homepage map (h => (IMPLEMENTATION_URL, h.toString)) + }: _*) + } + + def makeJar( + sources: Seq[(File, String)], + jar: File, + manifest: Manifest, + log: Logger, + time: Option[Long] + ): Unit = { + val path = jar.getAbsolutePath + log.debug("Packaging " + path + " ...") + if (jar.exists) + if (jar.isFile) + IO.delete(jar) + else + sys.error(path + " exists, but is not a regular file") + log.debug(sourcesDebugString(sources)) + IO.jar(sources, jar, manifest, time) + log.debug("Done packaging.") + } + def sourcesDebugString(sources: Seq[(File, String)]): String = + "Input file mappings:\n\t" + (sources map { case (f, s) => s + "\n\t " + f } mkString ("\n\t")) + + given manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]]( + m => { + val bos = new java.io.ByteArrayOutputStream() + m write bos + bos.toByteArray + }, + bs => new Manifest(new java.io.ByteArrayInputStream(bs)) + ) +end Pkg + +enum PackageOption: + case JarManifest(m: Manifest) + case MainClass(mainClassName: String) + case ManifestAttributes(attributes: (Attributes.Name, String)*) + case FixedTimestamp(value: Option[Long]) + +object PackageOption: + import Pkg.manifestFormat + + private given jarManifestFormat: JsonFormat[PackageOption.JarManifest] = + new JsonFormat[PackageOption.JarManifest]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.JarManifest = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val m = unbuilder.readField[Manifest]("m") + unbuilder.endObject() + PackageOption.JarManifest(m) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.JarManifest, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField("m", obj.m) + builder.endObject() + + private given mainClassFormat: JsonFormat[PackageOption.MainClass] = + new JsonFormat[PackageOption.MainClass]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.MainClass = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val mainClassName = unbuilder.readField[String]("mainClassName") + unbuilder.endObject() + PackageOption.MainClass(mainClassName) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.MainClass, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField("mainClassName", obj.mainClassName) + builder.endObject() + + private given manifestAttributesFormat: JsonFormat[PackageOption.ManifestAttributes] = + new JsonFormat[PackageOption.ManifestAttributes]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.ManifestAttributes = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val attributes = unbuilder.readField[Vector[(String, String)]]("attributes") + unbuilder.endObject() + PackageOption.ManifestAttributes(attributes.map { case (k, v) => + Attributes.Name(k) -> v + }: _*) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.ManifestAttributes, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField( + "attributes", + obj.attributes.toVector.map { case (k, v) => k.toString -> v } + ) + builder.endObject() + + private given fixedTimeStampFormat: JsonFormat[PackageOption.FixedTimestamp] = + new JsonFormat[PackageOption.FixedTimestamp]: + override def read[J]( + jsOpt: Option[J], + unbuilder: Unbuilder[J] + ): PackageOption.FixedTimestamp = + jsOpt match + case Some(js) => + unbuilder.beginObject(js) + val value = unbuilder.readField[Option[Long]]("value") + unbuilder.endObject() + PackageOption.FixedTimestamp(value) + case None => deserializationError("Expected JsObject but found None") + override def write[J](obj: PackageOption.FixedTimestamp, builder: Builder[J]): Unit = + builder.beginObject() + builder.addField("value", obj.value) + builder.endObject() + + given JsonFormat[PackageOption] = flatUnionFormat4[ + PackageOption, + PackageOption.JarManifest, + PackageOption.MainClass, + PackageOption.ManifestAttributes, + PackageOption.FixedTimestamp, + ]("type") +end PackageOption diff --git a/main-command/src/main/scala/sbt/BasicKeys.scala b/main-command/src/main/scala/sbt/BasicKeys.scala index c3f1f70e1..e616f4d25 100644 --- a/main-command/src/main/scala/sbt/BasicKeys.scala +++ b/main-command/src/main/scala/sbt/BasicKeys.scala @@ -8,13 +8,13 @@ package sbt import java.io.File - +import java.nio.file.Path import sbt.internal.inc.classpath.{ ClassLoaderCache => IncClassLoaderCache } import sbt.internal.classpath.ClassLoaderCache import sbt.internal.server.ServerHandler import sbt.internal.util.AttributeKey import sbt.librarymanagement.ModuleID -import sbt.util.Level +import sbt.util.{ ActionCacheStore, Level } import scala.annotation.nowarn import scala.concurrent.duration.FiniteDuration import xsbti.VirtualFile @@ -106,6 +106,20 @@ object BasicKeys { 10000 ) + val cacheStores = + AttributeKey[Seq[ActionCacheStore]]( + "cacheStores", + "Cache backends", + 10000 + ) + + val rootOutputDirectory = + AttributeKey[Path]( + "rootOutputDirectory", + "Build-wide output directory", + 10000 + ) + // Unlike other BasicKeys, this is not used directly as a setting key, // and severLog / logLevel is used instead. private[sbt] val serverLogLevel = diff --git a/main-settings/src/main/scala/sbt/Append.scala b/main-settings/src/main/scala/sbt/Append.scala index 0147a6f07..b8a9fc37c 100644 --- a/main-settings/src/main/scala/sbt/Append.scala +++ b/main-settings/src/main/scala/sbt/Append.scala @@ -15,6 +15,7 @@ import sbt.Def.{ Classpath, Initialize } import sbt.internal.io.Source import sbt.internal.util.Attributed import sbt.io.{ AllPassFilter, NothingFilter } +import xsbti.HashedVirtualFileRef object Append: @implicitNotFound("No Append.Value[${A1}, ${A2}] found, so ${A2} cannot be appended to ${A1}") @@ -67,11 +68,11 @@ object Append: implicit def appendLong: Value[Long, Long] = _ + _ implicit def appendDouble: Value[Double, Double] = _ + _ - implicit def appendClasspath: Sequence[Classpath, Seq[File], File] = - new Sequence[Classpath, Seq[File], File] { - def appendValues(a: Classpath, b: Seq[File]): Classpath = a ++ Attributed.blankSeq(b) - def appendValue(a: Classpath, b: File): Classpath = a :+ Attributed.blank(b) - } + given Sequence[Classpath, Seq[HashedVirtualFileRef], HashedVirtualFileRef] with + override def appendValues(a: Classpath, b: Seq[HashedVirtualFileRef]): Classpath = + a ++ Attributed.blankSeq(b) + override def appendValue(a: Classpath, b: HashedVirtualFileRef): Classpath = + a :+ Attributed.blank(b) implicit def appendSet[T, V <: T]: Sequence[Set[T], Set[V], V] = new Sequence[Set[T], Set[V], V] { diff --git a/main-settings/src/main/scala/sbt/Def.scala b/main-settings/src/main/scala/sbt/Def.scala index 00644099c..3dd1301b2 100644 --- a/main-settings/src/main/scala/sbt/Def.scala +++ b/main-settings/src/main/scala/sbt/Def.scala @@ -8,6 +8,7 @@ package sbt import java.io.File +import java.nio.file.Path import java.net.URI import scala.annotation.compileTimeOnly @@ -18,15 +19,15 @@ import sbt.Scope.{ GlobalScope, ThisScope } import sbt.internal.util.Types.const import sbt.internal.util.complete.Parser import sbt.internal.util.{ Terminal => ITerminal, * } +import sbt.util.{ ActionCacheStore, AggregateActionCacheStore, BuildWideCacheConfiguration, InMemoryActionCacheStore } import Util._ import sbt.util.Show -import xsbti.VirtualFile +import xsbti.{ HashedVirtualFileRef, VirtualFile } import sjsonnew.JsonFormat /** A concrete settings system that uses `sbt.Scope` for the scope type. */ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits: - type Classpath = Seq[Attributed[File]] - type VirtualClasspath = Seq[Attributed[VirtualFile]] + type Classpath = Seq[Attributed[HashedVirtualFileRef]] def settings(ss: SettingsDefinition*): Seq[Setting[_]] = ss.flatMap(_.settings) @@ -229,8 +230,21 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits: import language.experimental.macros + // These are here, as opposed to RemoteCahe, since we need them from TaskMacro etc + private[sbt] var _cacheStore: ActionCacheStore = InMemoryActionCacheStore() + def cacheStore: ActionCacheStore = _cacheStore + private[sbt] var _outputDirectory: Option[Path] = None + def cacheConfiguration: BuildWideCacheConfiguration = + BuildWideCacheConfiguration( + _cacheStore, + _outputDirectory.getOrElse(sys.error("outputDirectory has not been set")), + ) + + inline def cachedTask[A1: JsonFormat](inline a1: A1): Def.Initialize[Task[A1]] = + ${ TaskMacro.taskMacroImpl[A1]('a1, cached = true) } + inline def task[A1](inline a1: A1): Def.Initialize[Task[A1]] = - ${ TaskMacro.taskMacroImpl[A1]('a1) } + ${ TaskMacro.taskMacroImpl[A1]('a1, cached = false) } inline def taskDyn[A1](inline a1: Def.Initialize[Task[A1]]): Def.Initialize[Task[A1]] = ${ TaskMacro.taskDynMacroImpl[A1]('a1) } @@ -247,7 +261,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits: ${ InputTaskMacro.inputTaskMacroImpl[A1]('a) } inline def taskIf[A1](inline a: A1): Def.Initialize[Task[A1]] = - ${ TaskMacro.taskIfImpl[A1]('a) } + ${ TaskMacro.taskIfImpl[A1]('a, cached = true) } private[sbt] def selectITask[A1, A2]( fab: Initialize[Task[Either[A1, A2]]], @@ -289,6 +303,10 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits: */ def promise[A]: PromiseWrap[A] = new PromiseWrap[A]() + inline def declareOutput(inline vf: VirtualFile): Unit = + InputWrapper.`wrapOutput_\u2603\u2603`[VirtualFile](vf) + + // The following conversions enable the types Initialize[T], Initialize[Task[T]], and Task[T] to // be used in task and setting macros as inputs with an ultimate result of type T diff --git a/main-settings/src/main/scala/sbt/Structure.scala b/main-settings/src/main/scala/sbt/Structure.scala index 69304e9ba..fa5a6c0dd 100644 --- a/main-settings/src/main/scala/sbt/Structure.scala +++ b/main-settings/src/main/scala/sbt/Structure.scala @@ -468,7 +468,7 @@ object Scoped: /** Internal function for the task macro. */ inline def taskMacro[A](inline a: A): Initialize[Task[A]] = - ${ TaskMacro.taskMacroImpl[A]('a) } + ${ TaskMacro.taskMacroImpl[A]('a, cached = false) } private[sbt] inline def :==(app: A1): Setting[Task[A1]] = set(Def.valueStrict(std.TaskExtra.constant(app))) diff --git a/main-settings/src/main/scala/sbt/std/InputConvert.scala b/main-settings/src/main/scala/sbt/std/InputConvert.scala index 3a830fa54..12a456cec 100644 --- a/main-settings/src/main/scala/sbt/std/InputConvert.scala +++ b/main-settings/src/main/scala/sbt/std/InputConvert.scala @@ -78,6 +78,7 @@ class FullConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart: case InputWrapper.WrapPreviousName => Converted.success(in) case InputWrapper.WrapInitName => wrapInit[A](in) case InputWrapper.WrapTaskName => wrapTask[A](in) + case InputWrapper.WrapOutputName => Converted.success(in) case _ => Converted.NotApplicable() private def wrapInit[A: Type](tree: Term): Converted = diff --git a/main-settings/src/main/scala/sbt/std/InputTaskMacro.scala.scala b/main-settings/src/main/scala/sbt/std/InputTaskMacro.scala similarity index 97% rename from main-settings/src/main/scala/sbt/std/InputTaskMacro.scala.scala rename to main-settings/src/main/scala/sbt/std/InputTaskMacro.scala index ea6d1b948..6f39d6201 100644 --- a/main-settings/src/main/scala/sbt/std/InputTaskMacro.scala.scala +++ b/main-settings/src/main/scala/sbt/std/InputTaskMacro.scala @@ -98,7 +98,7 @@ object InputTaskMacro: val inner: convert1.TermTransform[F1] = (in: Term) => f(in.asExprOf[A1]).asTerm val cond = conditionInputTaskTree(tree.asTerm).asExprOf[A1] - convert1.contMapN[A1, Def.Initialize, F1](cond, convert1.appExpr, inner) + convert1.contMapN[A1, Def.Initialize, F1](cond, convert1.appExpr, None, inner) private[this] def iParserMacro[F1[_]: Type, A1: Type](tree: Expr[A1])( f: Expr[A1] => Expr[F1[A1]] @@ -106,13 +106,12 @@ object InputTaskMacro: import qctx.reflect.* val convert1 = new ParserConvert(qctx, 1000) val inner: convert1.TermTransform[F1] = (in: Term) => f(in.asExprOf[A1]).asTerm - convert1.contMapN[A1, ParserInstance.F1, F1](tree, convert1.appExpr, inner) + convert1.contMapN[A1, ParserInstance.F1, F1](tree, convert1.appExpr, None, inner) private[this] def iTaskMacro[A1: Type](tree: Expr[A1])(using qctx: Quotes): Expr[Task[A1]] = import qctx.reflect.* val convert1 = new TaskConvert(qctx, 2000) - convert1.contMapN[A1, Task, Id](tree, convert1.appExpr) - + convert1.contMapN[A1, Task, Id](tree, convert1.appExpr, None) /* private[this] def inputTaskDynMacro0[A1: Type]( expr: Expr[Def.Initialize[Task[A1]]] @@ -233,7 +232,11 @@ object InputTaskMacro: val p0 = params.head.asInstanceOf[Ident] val body2 = convert1 - .contFlatMap[A2, TaskMacro.F, Id](body.asExprOf[TaskMacro.F[A2]], convert1.appExpr) + .contFlatMap[A2, TaskMacro.F, Id]( + body.asExprOf[TaskMacro.F[A2]], + convert1.appExpr, + None, + ) .asTerm object refTransformer extends TreeMap: override def transformTerm(tree: Term)(owner: Symbol): Term = diff --git a/main-settings/src/main/scala/sbt/std/InputWrapper.scala b/main-settings/src/main/scala/sbt/std/InputWrapper.scala index 533d2bd9c..2da95f9bf 100644 --- a/main-settings/src/main/scala/sbt/std/InputWrapper.scala +++ b/main-settings/src/main/scala/sbt/std/InputWrapper.scala @@ -26,6 +26,7 @@ object InputWrapper: private[std] final val WrapTaskName = "wrapTask_\u2603\u2603" private[std] final val WrapInitName = "wrapInit_\u2603\u2603" + private[std] final val WrapOutputName = "wrapOutput_\u2603\u2603" private[std] final val WrapInitTaskName = "wrapInitTask_\u2603\u2603" private[std] final val WrapInitInputName = "wrapInitInputTask_\u2603\u2603" private[std] final val WrapInputName = "wrapInputTask_\u2603\u2603" @@ -41,6 +42,11 @@ object InputWrapper: ) def `wrapInit_\u2603\u2603`[T](@deprecated("unused", "") in: Any): T = implDetailError + @compileTimeOnly( + "`declareOutput` can only be used within a task macro, such as Def.cachedTask." + ) + def `wrapOutput_\u2603\u2603`[A](@deprecated("unused", "") in: Any): A = implDetailError + @compileTimeOnly( "`value` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task." ) diff --git a/main-settings/src/main/scala/sbt/std/SettingMacro.scala b/main-settings/src/main/scala/sbt/std/SettingMacro.scala index 4a0af31cb..e86a96796 100644 --- a/main-settings/src/main/scala/sbt/std/SettingMacro.scala +++ b/main-settings/src/main/scala/sbt/std/SettingMacro.scala @@ -47,16 +47,16 @@ object SettingMacro: def settingMacroImpl[A1: Type](in: Expr[A1])(using qctx: Quotes): Expr[Initialize[A1]] = val convert1 = InitializeConvert(qctx, 0) - convert1.contMapN[A1, F, Id](in, convert1.appExpr) + convert1.contMapN[A1, F, Id](in, convert1.appExpr, None) def settingDynImpl[A1: Type](in: Expr[Initialize[A1]])(using qctx: Quotes): Expr[Initialize[A1]] = val convert1 = InitializeConvert(qctx, 0) - convert1.contFlatMap[A1, F, Id](in, convert1.appExpr) + convert1.contFlatMap[A1, F, Id](in, convert1.appExpr, None) def inputMacroImpl[A1: Type](in: Expr[State => Parser[A1]])(using qctx: Quotes ): Expr[ParserGen[A1]] = val convert1 = InitializeConvert(qctx, 0) - val init1 = convert1.contMapN[State => Parser[A1], F, Id](in, convert1.appExpr) + val init1 = convert1.contMapN[State => Parser[A1], F, Id](in, convert1.appExpr, None) '{ ParserGen[A1]($init1) } end SettingMacro diff --git a/main-settings/src/main/scala/sbt/std/TaskMacro.scala b/main-settings/src/main/scala/sbt/std/TaskMacro.scala index 29677d2e1..c12c5ec9d 100644 --- a/main-settings/src/main/scala/sbt/std/TaskMacro.scala +++ b/main-settings/src/main/scala/sbt/std/TaskMacro.scala @@ -9,7 +9,7 @@ package sbt package std import Def.{ Initialize, Setting } -import sbt.util.{ Applicative, Monad } +import sbt.util.{ ActionCacheStore, Applicative, Monad } import sbt.internal.util.Types.Id import sbt.internal.util.appmacro.{ Cont, @@ -27,7 +27,7 @@ import language.experimental.macros import scala.annotation.tailrec import scala.reflect.internal.util.UndefinedPosition import scala.quoted.* -import sjsonnew.JsonFormat +import sjsonnew.{ BasicJsonProtocol, JsonFormat } object TaskMacro: final val AssignInitName = "set" @@ -53,15 +53,23 @@ object TaskMacro: // import LinterDSL.{ Empty => EmptyLinter } - def taskMacroImpl[A1: Type](t: Expr[A1])(using qctx: Quotes): Expr[Initialize[Task[A1]]] = + def taskMacroImpl[A1: Type](t: Expr[A1], cached: Boolean)(using + qctx: Quotes + ): Expr[Initialize[Task[A1]]] = t match - case '{ if ($cond) then $thenp else $elsep } => taskIfImpl[A1](t) + case '{ if ($cond) then $thenp else $elsep } => taskIfImpl[A1](t, cached) case _ => val convert1 = new FullConvert(qctx, 0) - convert1.contMapN[A1, F, Id](t, convert1.appExpr) + val cacheConfigExpr = + if cached then Some('{ Def.cacheConfiguration }) + else None + convert1.contMapN[A1, F, Id](t, convert1.appExpr, cacheConfigExpr) - def taskIfImpl[A1: Type](expr: Expr[A1])(using qctx: Quotes): Expr[Initialize[Task[A1]]] = + def taskIfImpl[A1: Type](expr: Expr[A1], cached: Boolean)(using + qctx: Quotes + ): Expr[Initialize[Task[A1]]] = import qctx.reflect.* + val convert1 = new FullConvert(qctx, 1000) expr match case '{ if ($cond) then $thenp else $elsep } => '{ @@ -78,7 +86,7 @@ object TaskMacro: t: Expr[Initialize[Task[A1]]] )(using qctx: Quotes): Expr[Initialize[Task[A1]]] = val convert1 = new FullConvert(qctx, 1000) - convert1.contFlatMap[A1, F, Id](t, convert1.appExpr) + convert1.contFlatMap[A1, F, Id](t, convert1.appExpr, None) /** Translates .previous(format) to Previous.runtime()(format).value */ def previousImpl[A1: Type](t: Expr[TaskKey[A1]])(using diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala index 982fdfd62..9593bb77b 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/CompileRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class CompileRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File], + packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], val extractDirectory: java.io.File, val analysisFile: java.io.File) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -22,13 +22,13 @@ final class CompileRemoteCacheArtifact private ( override def toString: String = { "CompileRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + analysisFile + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile): CompileRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile): CompileRemoteCacheArtifact = { new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile) } def withArtifact(artifact: sbt.librarymanagement.Artifact): CompileRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): CompileRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): CompileRemoteCacheArtifact = { copy(packaged = packaged) } def withExtractDirectory(extractDirectory: java.io.File): CompileRemoteCacheArtifact = { @@ -40,5 +40,5 @@ final class CompileRemoteCacheArtifact private ( } object CompileRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, analysisFile: java.io.File): CompileRemoteCacheArtifact = new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], extractDirectory: java.io.File, analysisFile: java.io.File): CompileRemoteCacheArtifact = new CompileRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile) } diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala index 81ade63a2..86007ebb9 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/CustomRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class CustomRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File], + packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], val extractDirectory: java.io.File, val preserveLastModified: Boolean) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -22,13 +22,13 @@ final class CustomRemoteCacheArtifact private ( override def toString: String = { "CustomRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + preserveLastModified + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, preserveLastModified: Boolean = preserveLastModified): CustomRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, preserveLastModified: Boolean = preserveLastModified): CustomRemoteCacheArtifact = { new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified) } def withArtifact(artifact: sbt.librarymanagement.Artifact): CustomRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): CustomRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): CustomRemoteCacheArtifact = { copy(packaged = packaged) } def withExtractDirectory(extractDirectory: java.io.File): CustomRemoteCacheArtifact = { @@ -40,5 +40,5 @@ final class CustomRemoteCacheArtifact private ( } object CustomRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, preserveLastModified: Boolean): CustomRemoteCacheArtifact = new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], extractDirectory: java.io.File, preserveLastModified: Boolean): CustomRemoteCacheArtifact = new CustomRemoteCacheArtifact(artifact, packaged, extractDirectory, preserveLastModified) } diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala index 7e14d1f73..ff6f097e4 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/PomRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class PomRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File]) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { + packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -20,17 +20,17 @@ final class PomRemoteCacheArtifact private ( override def toString: String = { "PomRemoteCacheArtifact(" + artifact + ", " + packaged + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged): PomRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged): PomRemoteCacheArtifact = { new PomRemoteCacheArtifact(artifact, packaged) } def withArtifact(artifact: sbt.librarymanagement.Artifact): PomRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): PomRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): PomRemoteCacheArtifact = { copy(packaged = packaged) } } object PomRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File]): PomRemoteCacheArtifact = new PomRemoteCacheArtifact(artifact, packaged) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): PomRemoteCacheArtifact = new PomRemoteCacheArtifact(artifact, packaged) } diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala index 01c286e24..1a2136a41 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/RemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache abstract class RemoteCacheArtifact( val artifact: sbt.librarymanagement.Artifact, - val packaged: sbt.TaskKey[java.io.File]) extends Serializable { + val packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]) extends Serializable { diff --git a/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala b/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala index 7fb49f693..b02b6c4be 100644 --- a/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala +++ b/main/src/main/contraband-scala/sbt/internal/remotecache/TestRemoteCacheArtifact.scala @@ -6,7 +6,7 @@ package sbt.internal.remotecache final class TestRemoteCacheArtifact private ( artifact: sbt.librarymanagement.Artifact, - packaged: sbt.TaskKey[java.io.File], + packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], val extractDirectory: java.io.File, val analysisFile: java.io.File, val testResult: java.io.File) extends sbt.internal.remotecache.RemoteCacheArtifact(artifact, packaged) with Serializable { @@ -23,13 +23,13 @@ final class TestRemoteCacheArtifact private ( override def toString: String = { "TestRemoteCacheArtifact(" + artifact + ", " + packaged + ", " + extractDirectory + ", " + analysisFile + ", " + testResult + ")" } - private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[java.io.File] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile, testResult: java.io.File = testResult): TestRemoteCacheArtifact = { + private[this] def copy(artifact: sbt.librarymanagement.Artifact = artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef] = packaged, extractDirectory: java.io.File = extractDirectory, analysisFile: java.io.File = analysisFile, testResult: java.io.File = testResult): TestRemoteCacheArtifact = { new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult) } def withArtifact(artifact: sbt.librarymanagement.Artifact): TestRemoteCacheArtifact = { copy(artifact = artifact) } - def withPackaged(packaged: sbt.TaskKey[java.io.File]): TestRemoteCacheArtifact = { + def withPackaged(packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef]): TestRemoteCacheArtifact = { copy(packaged = packaged) } def withExtractDirectory(extractDirectory: java.io.File): TestRemoteCacheArtifact = { @@ -44,5 +44,5 @@ final class TestRemoteCacheArtifact private ( } object TestRemoteCacheArtifact { - def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[java.io.File], extractDirectory: java.io.File, analysisFile: java.io.File, testResult: java.io.File): TestRemoteCacheArtifact = new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult) + def apply(artifact: sbt.librarymanagement.Artifact, packaged: sbt.TaskKey[xsbti.HashedVirtualFileRef], extractDirectory: java.io.File, analysisFile: java.io.File, testResult: java.io.File): TestRemoteCacheArtifact = new TestRemoteCacheArtifact(artifact, packaged, extractDirectory, analysisFile, testResult) } diff --git a/main/src/main/contraband/remotecache.json b/main/src/main/contraband/remotecache.json index 486d25f9b..77142117d 100644 --- a/main/src/main/contraband/remotecache.json +++ b/main/src/main/contraband/remotecache.json @@ -14,7 +14,7 @@ }, { "name": "packaged", - "type": "sbt.TaskKey[java.io.File]" + "type": "sbt.TaskKey[xsbti.HashedVirtualFileRef]" } ], "types": [ @@ -80,4 +80,4 @@ ] } ] -} \ No newline at end of file +} diff --git a/main/src/main/scala/sbt/BackgroundJobService.scala b/main/src/main/scala/sbt/BackgroundJobService.scala index 8faad7728..937b421c2 100644 --- a/main/src/main/scala/sbt/BackgroundJobService.scala +++ b/main/src/main/scala/sbt/BackgroundJobService.scala @@ -16,6 +16,7 @@ import java.io.File import scala.util.control.NonFatal import scala.util.{ Failure, Success, Try } +import xsbti.FileConverter abstract class BackgroundJobService extends Closeable { @@ -70,14 +71,20 @@ abstract class BackgroundJobService extends Closeable { def waitFor(job: JobHandle): Unit /** Copies classpath to temporary directories. */ - def copyClasspath(products: Classpath, full: Classpath, workingDirectory: File): Classpath + def copyClasspath( + products: Classpath, + full: Classpath, + workingDirectory: File, + converter: FileConverter + ): Classpath private[sbt] def copyClasspath( products: Classpath, full: Classpath, workingDirectory: File, - hashContents: Boolean - ): Classpath = copyClasspath(products, full, workingDirectory) + hashContents: Boolean, + converter: FileConverter, + ): Classpath = copyClasspath(products, full, workingDirectory, converter) } object BackgroundJobService { diff --git a/main/src/main/scala/sbt/Cross.scala b/main/src/main/scala/sbt/Cross.scala index 9772dd6dc..aa11c1204 100644 --- a/main/src/main/scala/sbt/Cross.scala +++ b/main/src/main/scala/sbt/Cross.scala @@ -398,7 +398,7 @@ object Cross { scope / scalaVersion := version, scope / crossScalaVersions := scalaVersions, scope / scalaHome := Some(home), - scope / scalaInstance := inst + scope / scalaInstance := inst, ) case None => Seq( diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 6fed83730..cf346cb90 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -9,7 +9,7 @@ package sbt import java.io.{ File, PrintWriter } import java.net.{ URI, URL } -import java.nio.file.{ Paths, Path => NioPath } +import java.nio.file.{ Files, Paths, Path => NioPath } import java.util.Optional import java.util.concurrent.TimeUnit import lmcoursier.CoursierDependencyResolution @@ -25,7 +25,7 @@ import sbt.Project.{ inScope, inTask, // richInitialize, - // richInitializeTask, + // richInitialize Task, // richTaskSessionVar, // sbtRichTaskPromise } @@ -84,7 +84,7 @@ import sbt.nio.file.{ FileTreeView, Glob, RecursiveGlob } import sbt.nio.Watch import sbt.std.TaskExtra.* import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint } -import sbt.util.CacheImplicits._ +import sbt.util.CacheImplicits.given import sbt.util.InterfaceUtil.{ t2, toJavaFunction => f1 } import sbt.util._ import sjsonnew._ @@ -107,7 +107,7 @@ import sbt.internal.inc.{ MixedAnalyzingCompiler, ScalaInstance } -import xsbti.{ CrossValue, VirtualFile, VirtualFileRef } +import xsbti.{ CrossValue, HashedVirtualFileRef, VirtualFile, VirtualFileRef } import xsbti.compile.{ AnalysisContents, ClassFileManagerType, @@ -142,12 +142,21 @@ object Defaults extends BuildCommon { def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = LibraryManagement.lock(app) - def extractAnalysis[T](a: Attributed[T]): (T, CompileAnalysis) = - (a.data, a.metadata get Keys.analysis getOrElse Analysis.Empty) + def extractAnalysis[A1](a: Attributed[A1]): (A1, CompileAnalysis) = + ( + a.data, + a.metadata.get(Keys.analysis) match + case Some(ref) => RemoteCache.getCachedAnalysis(ref) + case None => Analysis.Empty + ) def analysisMap[T](cp: Seq[Attributed[T]]): T => Option[CompileAnalysis] = { - val m = (for (a <- cp; an <- a.metadata get Keys.analysis) yield (a.data, an)).toMap - m.get _ + val m = (for { + a <- cp + ref <- a.metadata.get(Keys.analysis) + an = RemoteCache.getCachedAnalysis(ref) + } yield (a.data, an)).toMap + m.get(_) } private[sbt] def globalDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] = @@ -225,14 +234,14 @@ object Defaults extends BuildCommon { bgCopyClasspath :== true, closeClassLoaders :== SysProp.closeClassLoaders, allowZombieClassLoaders :== true, - packageTimestamp :== Package.defaultTimestamp, + packageTimestamp :== Pkg.defaultTimestamp, ) ++ BuildServerProtocol.globalSettings private[sbt] lazy val globalIvyCore: Seq[Setting[_]] = Seq( internalConfigurationMap :== Configurations.internalMap _, credentials :== SysProp.sbtCredentialsEnv.toList, - exportJars :== false, + exportJars :== true, trackInternalDependencies :== TrackLevel.TrackAlways, exportToInternal :== TrackLevel.TrackAlways, useCoursier :== SysProp.defaultUseCoursier, @@ -411,7 +420,9 @@ object Defaults extends BuildCommon { val ih = app.provider.scalaProvider.launcher.ivyHome val coursierCache = csrCacheDirectory.value val javaHome = Paths.get(sys.props("java.home")) - Map( + val out = rootOutputDirectory.value + ListMap( + "OUT" -> out, "BASE" -> base.toPath, "SBT_BOOT" -> boot.toPath, "CSR_CACHE" -> coursierCache.toPath, @@ -547,7 +558,7 @@ object Defaults extends BuildCommon { // Appended to JvmPlugin.projectSettings def paths: Seq[Setting[_]] = Seq( baseDirectory := thisProject.value.base, - target := baseDirectory.value / "target", + target := rootOutputDirectory.value.resolve(outputPath.value).toFile(), // Use a different history path for jline3 because the jline2 format is // incompatible. By sbt 1.4.0, we should consider revering this to t / ".history" // and possibly rewriting the jline2 history in a jline3 compatible format if the @@ -555,8 +566,8 @@ object Defaults extends BuildCommon { // going back and forth between 1.3.x and 1.4.x. historyPath := (historyPath or target(t => Option(t / ".history3"))).value, sourceDirectory := baseDirectory.value / "src", - sourceManaged := crossTarget.value / "src_managed", - resourceManaged := crossTarget.value / "resource_managed", + sourceManaged := target.value / "src_managed", + resourceManaged := target.value / "resource_managed", // Adds subproject build.sbt files to the global list of build files to monitor Scope.Global / checkBuildSources / pollInterval :== new FiniteDuration(Int.MinValue, TimeUnit.MILLISECONDS), @@ -646,7 +657,7 @@ object Defaults extends BuildCommon { // This exists for binary compatibility and probably never should have been public. def addBaseSources: Seq[Def.Setting[Task[Seq[File]]]] = Nil lazy val outputConfigPaths: Seq[Setting[_]] = Seq( - classDirectory := crossTarget.value / (prefix(configuration.value.name) + "classes"), + classDirectory := target.value / (prefix(configuration.value.name) + "classes"), backendOutput := { val converter = fileConverter.value val dir = classDirectory.value @@ -654,16 +665,15 @@ object Defaults extends BuildCommon { }, earlyOutput / artifactPath := configArtifactPathSetting(artifact, "early").value, earlyOutput := { - val converter = fileConverter.value - val jar = (earlyOutput / artifactPath).value - converter.toVirtualFile(jar.toPath) + (earlyOutput / artifactPath).value match + case vf: VirtualFile => vf }, - semanticdbTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "meta"), - compileAnalysisTargetRoot := crossTarget.value / (prefix(configuration.value.name) + "zinc"), - earlyCompileAnalysisTargetRoot := crossTarget.value / (prefix( + semanticdbTargetRoot := target.value / (prefix(configuration.value.name) + "meta"), + compileAnalysisTargetRoot := target.value / (prefix(configuration.value.name) + "zinc"), + earlyCompileAnalysisTargetRoot := target.value / (prefix( configuration.value.name ) + "early-zinc"), - doc / target := crossTarget.value / (prefix(configuration.value.name) + "api") + doc / target := target.value / (prefix(configuration.value.name) + "api") ) // This is included into JvmPlugin.projectSettings @@ -708,14 +718,7 @@ object Defaults extends BuildCommon { CrossVersion(scalaV, binVersion)(base).withCrossVersion(Disabled()) }, crossSbtVersions := Vector((pluginCrossBuild / sbtVersion).value), - crossTarget := makeCrossTarget( - target.value, - scalaVersion.value, - scalaBinaryVersion.value, - (pluginCrossBuild / sbtBinaryVersion).value, - sbtPlugin.value, - crossPaths.value - ), + crossTarget := target.value, cleanIvy := IvyActions.cleanCachedResolutionCache(ivyModule.value, streams.value.log), clean := clean.dependsOnTask(cleanIvy).value, scalaCompilerBridgeBinaryJar := Def.settingDyn { @@ -886,8 +889,10 @@ object Defaults extends BuildCommon { }, compileOptions := { val opts = (compile / compileOptions).value - val cp0 = dependencyVirtualClasspath.value - val cp = backendOutput.value +: data(cp0) + val cp0 = dependencyClasspath.value + val cp1 = backendOutput.value +: data(cp0) + val converter = fileConverter.value + val cp = cp1.map(converter.toPath).map(converter.toVirtualFile) opts.withClasspath(cp.toArray) } ) @@ -936,7 +941,6 @@ object Defaults extends BuildCommon { }, internalDependencyConfigurations := InternalDependencies.configurations.value, manipulateBytecode := compileSplit.value, - compileIncremental := compileIncrementalTask.tag(Tags.Compile, Tags.CPU).value, printWarnings := printWarningsTask.value, compileAnalysisFilename := { // Here, if the user wants cross-scala-versioning, we also append it @@ -963,7 +967,7 @@ object Defaults extends BuildCommon { Option( TransactionalManagerType .of( // https://github.com/sbt/sbt/issues/1673 - crossTarget.value / s"${prefix(configuration.value.name)}classes.bak", + target.value / s"${prefix(configuration.value.name)}classes.bak", streams.value.log ): ClassFileManagerType ).toOptional @@ -1031,7 +1035,7 @@ object Defaults extends BuildCommon { // note that we use the same runner and mainClass as plain run mainBgRunMainTaskForConfig(This), mainBgRunTaskForConfig(This) - ) ++ inTask(run)(runnerSettings ++ newRunnerSettings) + ) ++ inTask(run)(runnerSettings ++ newRunnerSettings) ++ compileIncrementalTaskSettings private[this] lazy val configGlobal = globalDefaults( Seq( @@ -1280,7 +1284,8 @@ object Defaults extends BuildCommon { (test / javaOptions), (classLoaderLayeringStrategy), thisProject, - ).flatMapN { case (s, lt, tl, gp, ex, cp, fp, jo, clls, thisProj) => + fileConverter, + ).flatMapN { case (s, lt, tl, gp, ex, cp, fp, jo, clls, thisProj, c) => allTestGroupsTask( s, lt, @@ -1292,6 +1297,7 @@ object Defaults extends BuildCommon { jo, clls, projectId = s"${thisProj.id} / ", + c, ) } }.value, @@ -1417,9 +1423,10 @@ object Defaults extends BuildCommon { Def.task { val cp = (test / fullClasspath).value val s = (test / streams).value - val ans: Seq[Analysis] = cp.flatMap(_.metadata get Keys.analysis) map { case a0: Analysis => - a0 - } + val ans: Seq[Analysis] = cp + .flatMap(_.metadata.get(Keys.analysis)) + .map: str => + RemoteCache.getCachedAnalysis(str).asInstanceOf[Analysis] val succeeded = TestStatus.read(succeededFile(s.cacheDirectory)) val stamps = collection.mutable.Map.empty[String, Long] def stamp(dep: String): Long = { @@ -1483,6 +1490,7 @@ object Defaults extends BuildCommon { javaOptions.value, classLoaderLayeringStrategy.value, projectId = s"${thisProject.value.id} / ", + converter = fileConverter.value, ) val taskName = display.show(resolvedScoped.value) val trl = testResultLogger.value @@ -1516,6 +1524,7 @@ object Defaults extends BuildCommon { groups: Seq[Tests.Group], config: Tests.Execution, cp: Classpath, + converter: FileConverter, ): Task[Tests.Output] = { allTestGroupsTask( s, @@ -1528,6 +1537,7 @@ object Defaults extends BuildCommon { javaOptions = Nil, strategy = ClassLoaderLayeringStrategy.ScalaLibrary, projectId = "", + converter = converter, ) } @@ -1538,7 +1548,8 @@ object Defaults extends BuildCommon { groups: Seq[Tests.Group], config: Tests.Execution, cp: Classpath, - forkedParallelExecution: Boolean + converter: FileConverter, + forkedParallelExecution: Boolean, ): Task[Tests.Output] = { allTestGroupsTask( s, @@ -1551,6 +1562,7 @@ object Defaults extends BuildCommon { javaOptions = Nil, strategy = ClassLoaderLayeringStrategy.ScalaLibrary, projectId = "", + converter = converter, ) } @@ -1564,7 +1576,8 @@ object Defaults extends BuildCommon { forkedParallelExecution: Boolean, javaOptions: Seq[String], strategy: ClassLoaderLayeringStrategy, - projectId: String + projectId: String, + converter: FileConverter, ): Task[Tests.Output] = { val processedOptions: Map[Tests.Group, Tests.ProcessedOptions] = groups @@ -1594,7 +1607,8 @@ object Defaults extends BuildCommon { runners, processedOptions(group), forkedConfig, - cp.files, + data(cp), + converter, opts, s.log, (Tags.ForkedTestGroup, 1) +: group.tags: _* @@ -1716,10 +1730,10 @@ object Defaults extends BuildCommon { val ts = packageTimestamp.value val old = packageOptions.value - Package.addSpecManifestAttributes(n, ver, orgName) +: - Package.addImplManifestAttributes(n, ver, homepage.value, org, orgName) +: - Package.setFixedTimestamp(ts) +: - main.map(Package.MainClass.apply) ++: old + Pkg.addSpecManifestAttributes(n, ver, orgName) +: + Pkg.addImplManifestAttributes(n, ver, homepage.value, org, orgName) +: + Pkg.setFixedTimestamp(ts) +: + main.map(Pkg.MainClass.apply) ++: old } ) ) ++ @@ -1728,11 +1742,11 @@ object Defaults extends BuildCommon { packageOptions := { val old = packageOptions.value val ts = packageTimestamp.value - Package.addSpecManifestAttributes( + Pkg.addSpecManifestAttributes( name.value, version.value, organizationName.value - ) +: Package.setFixedTimestamp(ts) +: old + ) +: Pkg.setFixedTimestamp(ts) +: old } ) ) ++ @@ -1741,47 +1755,91 @@ object Defaults extends BuildCommon { packageTaskSettings(packageDoc, packageDocMappings) ++ Seq(Keys.`package` := packageBin.value) - def packageBinMappings: Initialize[Task[Seq[(File, String)]]] = - products.map { _ flatMap Path.allSubpaths } - def packageDocMappings: Initialize[Task[Seq[(File, String)]]] = - doc.map { x => Path.allSubpaths(x).toSeq } - def packageSrcMappings: Initialize[Task[Seq[(File, String)]]] = + def packageBinMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + Def.task { + val converter = fileConverter.value + val xs = products.value + xs + .flatMap(Path.allSubpaths) + .filter(_._1.isFile()) + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } + } + + def packageDocMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + Def.task { + val converter = fileConverter.value + val d = doc.value + Path + .allSubpaths(d) + .toSeq + .filter(_._1.isFile()) + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } + } + + def packageSrcMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = concatMappings(resourceMappings, sourceMappings) - private type Mappings = Initialize[Task[Seq[(File, String)]]] + private type Mappings = Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] def concatMappings(as: Mappings, bs: Mappings): Mappings = - as.zipWith(bs) { (a: Task[Seq[(File, String)]], b: Task[Seq[(File, String)]]) => - (a, b).mapN { case (seq1: Seq[(File, String)], seq2: Seq[(File, String)]) => - seq1 ++ seq2 - } + as.zipWith(bs) { + ( + a: Task[Seq[(HashedVirtualFileRef, String)]], + b: Task[Seq[(HashedVirtualFileRef, String)]] + ) => + (a, b).mapN { + case ( + seq1: Seq[(HashedVirtualFileRef, String)], + seq2: Seq[(HashedVirtualFileRef, String)] + ) => + seq1 ++ seq2 + } } // drop base directories, since there are no valid mappings for these - def sourceMappings: Initialize[Task[Seq[(File, String)]]] = + def sourceMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = Def.task { + val converter = fileConverter.value val sdirs = unmanagedSourceDirectories.value val base = baseDirectory.value val relative = (f: File) => relativeTo(sdirs)(f).orElse(relativeTo(base)(f)).orElse(flat(f)) val exclude = Set(sdirs, base) - unmanagedSources.value.flatMap { - case s if !exclude(s) => relative(s).map(s -> _) - case _ => None - } + unmanagedSources.value + .flatMap { + case s if !exclude(s) => relative(s).map(s -> _) + case _ => None + } + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } } - def resourceMappings = relativeMappings(unmanagedResources, unmanagedResourceDirectories) + def resourceMappings: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + relativeMappings(unmanagedResources, unmanagedResourceDirectories) def relativeMappings( files: Taskable[Seq[File]], dirs: Taskable[Seq[File]] - ): Initialize[Task[Seq[(File, String)]]] = + ): Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = Def.task { + val converter = fileConverter.value val rdirs = dirs.toTask.value.toSet val relative = (f: File) => relativeTo(rdirs)(f).orElse(flat(f)) - files.toTask.value.flatMap { - case r if !rdirs(r) => relative(r).map(r -> _) - case _ => None - } + files.toTask.value + .flatMap { + case r if !rdirs(r) => relative(r).map(r -> _) + case _ => None + } + .map { case (p, path) => + val vf = converter.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } } def collectFiles( @@ -1796,7 +1854,8 @@ object Defaults extends BuildCommon { def relativeMappings( // forward to widened variant files: ScopedTaskable[Seq[File]], dirs: ScopedTaskable[Seq[File]] - ): Initialize[Task[Seq[(File, String)]]] = relativeMappings(files: Taskable[Seq[File]], dirs) + ): Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] = + relativeMappings(files: Taskable[Seq[File]], dirs) def collectFiles( // forward to widened variant dirs: ScopedTaskable[Seq[File]], @@ -1807,10 +1866,11 @@ object Defaults extends BuildCommon { private[sbt] def configArtifactPathSetting( art: SettingKey[Artifact], extraPrefix: String - ): Initialize[File] = + ): Initialize[VirtualFile] = Def.setting { val f = artifactName.value - crossTarget.value / + val converter = fileConverter.value + val p = target.value / (prefix(configuration.value.name) + extraPrefix) / f( ScalaVersion( (artifactName / scalaVersion).value, @@ -1819,15 +1879,17 @@ object Defaults extends BuildCommon { projectID.value, art.value ) + converter.toVirtualFile(p.toPath()) } private[sbt] def prefixArtifactPathSetting( art: SettingKey[Artifact], extraPrefix: String - ): Initialize[File] = + ): Initialize[VirtualFileRef] = Def.setting { val f = artifactName.value - crossTarget.value / extraPrefix / f( + val converter = fileConverter.value + val p = target.value / extraPrefix / f( ScalaVersion( (artifactName / scalaVersion).value, (artifactName / scalaBinaryVersion).value @@ -1835,12 +1897,13 @@ object Defaults extends BuildCommon { projectID.value, art.value ) + converter.toVirtualFile(p.toPath()) } - def artifactPathSetting(art: SettingKey[Artifact]): Initialize[File] = + def artifactPathSetting(art: SettingKey[Artifact]): Initialize[VirtualFileRef] = Def.setting { val f = artifactName.value - crossTarget.value / f( + val p = target.value / f( ScalaVersion( (artifactName / scalaVersion).value, (artifactName / scalaBinaryVersion).value @@ -1848,6 +1911,8 @@ object Defaults extends BuildCommon { projectID.value, art.value ) + val converter = fileConverter.value + converter.toVirtualFile(p.toPath()) } def artifactSetting: Initialize[Artifact] = @@ -1875,45 +1940,44 @@ object Defaults extends BuildCommon { } } - @deprecated("The configuration(s) should not be decided based on the classifier.", "1.0.0") - def artifactConfigurations( - base: Artifact, - scope: Configuration, - classifier: Option[String] - ): Iterable[Configuration] = - classifier match { - case Some(c) => Artifact.classifierConf(c) :: Nil - case None => scope :: Nil - } - - def packageTaskSettings(key: TaskKey[File], mappingsTask: Initialize[Task[Seq[(File, String)]]]) = + def packageTaskSettings( + key: TaskKey[HashedVirtualFileRef], + mappingsTask: Initialize[Task[Seq[(HashedVirtualFileRef, String)]]] + ) = inTask(key)( Seq( (TaskZero / key) := packageTask.value, packageConfiguration := packageConfigurationTask.value, mappings := mappingsTask.value, - packagedArtifact := (artifact.value -> key.value), + packagedArtifact := artifact.value -> key.value, artifact := artifactSetting.value, artifactPath := artifactPathSetting(artifact).value ) ) - def packageTask: Initialize[Task[File]] = - Def.task { + def packageTask: Initialize[Task[HashedVirtualFileRef]] = + Def.cachedTask { val config = packageConfiguration.value val s = streams.value - Package( + val converter = fileConverter.value + val out = Pkg( config, - s.cacheStoreFactory, + converter, s.log, - Package.timeFromConfiguration(config) + Pkg.timeFromConfiguration(config) ) - config.jar + s.log.debug(s"wrote $out") + Def.declareOutput(out) + out } - def packageConfigurationTask: Initialize[Task[Package.Configuration]] = + def packageConfigurationTask: Initialize[Task[Pkg.Configuration]] = Def.task { - new Package.Configuration(mappings.value, artifactPath.value, packageOptions.value) + Pkg.Configuration( + mappings.value, + artifactPath.value, + packageOptions.value, + ) } def askForMainClass(classes: Seq[String]): Option[String] = @@ -1987,19 +2051,28 @@ object Defaults extends BuildCommon { val (mainClass, args) = parser.parsed val hashClasspath = (bgRunMain / bgHashClasspath).value val wrapper = termWrapper(canonicalInput.value, echoInput.value) + val converter = fileConverter.value service.runInBackgroundWithLoader(resolvedScoped.value, state.value) { (logger, workingDir) => - val files = - if (copyClasspath.value) - service.copyClasspath(products.value, classpath.value, workingDir, hashClasspath) + val cp = + if copyClasspath.value then + service.copyClasspath( + products.value, + classpath.value, + workingDir, + hashClasspath, + converter, + ) else classpath.value - val cp = data(files) - scalaRun.value match { + given FileConverter = fileConverter.value + scalaRun.value match case r: Run => - val loader = r.newLoader(cp) - (Some(loader), wrapper(() => r.runWithLoader(loader, cp, mainClass, args, logger).get)) + val loader = r.newLoader(cp.files) + ( + Some(loader), + wrapper(() => r.runWithLoader(loader, cp.files, mainClass, args, logger).get) + ) case sr => - (None, wrapper(() => sr.run(mainClass, cp, args, logger).get)) - } + (None, wrapper(() => sr.run(mainClass, cp.files, args, logger).get)) } } } @@ -2018,18 +2091,28 @@ object Defaults extends BuildCommon { val mainClass = mainClassTask.value getOrElse sys.error("No main class detected.") val hashClasspath = (bgRun / bgHashClasspath).value val wrapper = termWrapper(canonicalInput.value, echoInput.value) + val converter = fileConverter.value service.runInBackgroundWithLoader(resolvedScoped.value, state.value) { (logger, workingDir) => - val files = - if (copyClasspath.value) - service.copyClasspath(products.value, classpath.value, workingDir, hashClasspath) + val cp = + if copyClasspath.value then + service.copyClasspath( + products.value, + classpath.value, + workingDir, + hashClasspath, + converter + ) else classpath.value - val cp = data(files) + given FileConverter = converter scalaRun.value match case r: Run => - val loader = r.newLoader(cp) - (Some(loader), wrapper(() => r.runWithLoader(loader, cp, mainClass, args, logger).get)) + val loader = r.newLoader(cp.files) + ( + Some(loader), + wrapper(() => r.runWithLoader(loader, cp.files, mainClass, args, logger).get) + ) case sr => - (None, wrapper(() => sr.run(mainClass, cp, args, logger).get)) + (None, wrapper(() => sr.run(mainClass, cp.files, args, logger).get)) } } @@ -2058,7 +2141,11 @@ object Defaults extends BuildCommon { loadForParser(discoveredMainClasses)((s, names) => runMainParser(s, names getOrElse Nil)) Def.inputTask { val (mainClass, args) = parser.parsed - scalaRun.value.run(mainClass, data(classpath.value), args, streams.value.log).get + val cp = classpath.value + given FileConverter = fileConverter.value + scalaRun.value + .run(mainClass, cp.files, args, streams.value.log) + .get } } @@ -2071,7 +2158,9 @@ object Defaults extends BuildCommon { Def.inputTask { val in = parser.parsed val mainClass = mainClassTask.value getOrElse sys.error("No main class detected.") - scalaRun.value.run(mainClass, data(classpath.value), in, streams.value.log).get + val cp = classpath.value + given FileConverter = fileConverter.value + scalaRun.value.run(mainClass, cp.files, in, streams.value.log).get } def runnerTask: Setting[Task[ScalaRun]] = runner := runnerInit.value @@ -2150,7 +2239,7 @@ object Defaults extends BuildCommon { val dependencyCp = dependencyClasspath.value val log = streams.value.log if (autoAPIMappings.value) APIMappings.extract(dependencyCp, log).toMap - else Map.empty[File, URL] + else Map.empty[HashedVirtualFileRef, URL] }, fileInputOptions := Seq("-doc-root-content", "-diagrams-dot-path"), scalacOptions := { @@ -2186,43 +2275,48 @@ object Defaults extends BuildCommon { val allDeps = allDependencies.value (hasScala, hasJava) match { case (true, _) => - val options = sOpts ++ Opts.doc.externalAPI(xapis) - val runDoc = Doc.scaladoc( - label, - s.cacheStoreFactory sub "scala", - cs.scalac match { - case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scaladoc")) - }, - fiOpts - ) + val xapisFiles = xapis.map { case (k, v) => + converter.toPath(k).toFile() -> v + } + val options = sOpts ++ Opts.doc.externalAPI(xapisFiles) + val scalac = cs.scalac match + case ac: AnalyzingCompiler => ac.onArgs(exported(s, "scaladoc")) def isScala3Doc(module: ModuleID): Boolean = { module.configurations.exists(_.startsWith(Configurations.ScalaDocTool.name)) && module.name == ScalaArtifacts.Scala3DocID } - if (ScalaArtifacts.isScala3M123(sv) && !allDeps.exists(isScala3Doc)) { - Array( - "Unresolved scala3doc artifact", - "add 'ThisBuild / resolvers += Resolver.JCenterRepository'" - ).foreach(m => s.log.error(m)) - } - val docSrcs = if (ScalaArtifacts.isScala3(sv)) tFiles else srcs - runDoc(docSrcs, cp, out, options, maxErrors.value, s.log) + val docSrcFiles = if ScalaArtifacts.isScala3(sv) then tFiles else srcs + // todo: cache this + if docSrcFiles.nonEmpty then + IO.delete(out) + IO.createDirectory(out) + // use PlainVirtualFile since Scaladoc currently doesn't handle actual VirtualFiles + scalac.doc( + docSrcFiles.map(_.toPath()).map(new sbt.internal.inc.PlainVirtualFile(_)), + cp.map(converter.toPath).map(new sbt.internal.inc.PlainVirtualFile(_)), + converter, + out.toPath(), + options, + maxErrors.value, + s.log, + ) + else () case (_, true) => - val javadoc = - sbt.inc.Doc.cachedJavadoc(label, s.cacheStoreFactory sub "java", cs.javaTools) - javadoc.run( - srcs.toList map { x => - converter.toVirtualFile(x.toPath) - }, - cp map { x => - converter.toVirtualFile(x.toPath) - }, - converter, - out.toPath, - javacOptions.value.toList, + import sbt.internal.inc.javac.JavaCompilerArguments + val javaSourcesOnly: VirtualFile => Boolean = _.id.endsWith(".java") + val classpath = cp.map(converter.toPath).map(converter.toVirtualFile) + val options = javacOptions.value.toList + cs.javaTools.javadoc.run( + srcs.toArray + .map { x => + converter.toVirtualFile(x.toPath) + } + .filter(javaSourcesOnly), + JavaCompilerArguments(Nil, classpath, options).toArray, + CompileOutput(out.toPath), IncToolOptionsUtil.defaultIncToolOptions(), + reporter, s.log, - reporter ) case _ => () // do nothing } @@ -2268,7 +2362,9 @@ object Defaults extends BuildCommon { Def.task { val si = (task / scalaInstance).value val s = streams.value - val cpFiles = data((task / classpath).value) + val cp = data((task / classpath).value) + val converter = fileConverter.value + val cpFiles = cp.map(converter.toPath).map(_.toFile()) val fullcp = (cpFiles ++ si.allJars).distinct val tempDir = IO.createUniqueDirectory((task / taskTemporaryDirectory).value).toPath val loader = ClasspathUtil.makeLoader(fullcp.map(_.toPath), si, tempDir) @@ -2299,20 +2395,22 @@ object Defaults extends BuildCommon { private[sbt] def compileScalaBackendTask: Initialize[Task[CompileResult]] = Def.task { val setup: Setup = compileIncSetup.value val useBinary: Boolean = enableBinaryCompileAnalysis.value - val analysisResult: CompileResult = compileIncremental.value + val _ = compileIncremental.value val exportP = exportPipelining.value // Save analysis midway if pipelining is enabled - if (analysisResult.hasModified && exportP) { - val store = - MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary) - val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup()) - store.set(contents) + val store = MixedAnalyzingCompiler.staticCachedStore(setup.cachePath, !useBinary) + val contents = store.unsafeGet() + if (exportP) { // this stores the eary analysis (again) in case the subproject contains a macro setup.earlyAnalysisStore.toOption map { earlyStore => earlyStore.set(contents) } } - analysisResult + CompileResult.of( + contents.getAnalysis(), + contents.getMiniSetup(), + contents.getAnalysis().readCompilations().getAllCompilations().nonEmpty + ) } /** @@ -2336,6 +2434,7 @@ object Defaults extends BuildCommon { compile.value } } + def compileTask: Initialize[Task[CompileAnalysis]] = Def.task { val setup: Setup = compileIncSetup.value val useBinary: Boolean = enableBinaryCompileAnalysis.value @@ -2356,17 +2455,75 @@ object Defaults extends BuildCommon { } analysis } - def compileIncrementalTask = Def.task { - val s = streams.value - val ci = (compile / compileInputs).value - val ping = earlyOutputPing.value - val reporter = (compile / bspReporter).value - BspCompileTask.compute(bspTargetIdentifier.value, thisProjectRef.value, configuration.value) { - task => - // TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too? - compileIncrementalTaskImpl(task, s, ci, ping, reporter) - } - } + + def compileIncrementalTaskSettings = + inTask(compileIncremental)( + Seq( + (TaskZero / compileIncremental) := (Def + .cachedTask { + val s = streams.value + val ci = (compile / compileInputs).value + // This is a cacheable version + val ci2 = (compile / compileInputs2).value + val ping = (TaskZero / earlyOutputPing).value + val reporter = (compile / bspReporter).value + val setup: Setup = (TaskZero / compileIncSetup).value + val useBinary: Boolean = enableBinaryCompileAnalysis.value + val c = fileConverter.value + val analysisResult: CompileResult = + BspCompileTask + .compute(bspTargetIdentifier.value, thisProjectRef.value, configuration.value) { + bspTask => + // TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too? + compileIncrementalTaskImpl(bspTask, s, ci, ping, reporter) + } + val analysisOut = c.toVirtualFile(setup.cachePath()) + val store = + MixedAnalyzingCompiler.staticCachedStore(setup.cachePath, !useBinary) + val contents = + AnalysisContents.create(analysisResult.analysis(), analysisResult.setup()) + store.set(contents) + Def.declareOutput(analysisOut) + val dir = classDirectory.value + if (dir / "META-INF" / "MANIFEST.MF").exists then + IO.delete(dir / "META-INF" / "MANIFEST.MF") + // inline mappings + val mappings = Path + .allSubpaths(dir) + .filter(_._1.isFile()) + .map { case (p, path) => + val vf = c.toVirtualFile(p.toPath()) + (vf: HashedVirtualFileRef) -> path + } + .toSeq + // inlined to avoid caching mappings + val pkgConfig = Pkg.Configuration( + mappings, + artifactPath.value, + packageOptions.value, + ) + val out = Pkg( + pkgConfig, + c, + s.log, + Pkg.timeFromConfiguration(pkgConfig) + ) + s.log.debug(s"wrote $out") + Def.declareOutput(out) + analysisResult.hasModified() -> (out: HashedVirtualFileRef) + }) + .tag(Tags.Compile, Tags.CPU) + .value, + packagedArtifact := { + val (hasModified, out) = compileIncremental.value + artifact.value -> out + }, + artifact := artifactSetting.value, + artifactClassifier := Some("noresources"), + artifactPath := artifactPathSetting(artifact).value, + ) + ) + private val incCompiler = ZincUtil.defaultIncrementalCompiler private[sbt] def compileJavaTask: Initialize[Task[CompileResult]] = Def.task { val s = streams.value @@ -2388,6 +2545,7 @@ object Defaults extends BuildCommon { throw e } } + private[this] def compileIncrementalTaskImpl( task: BspCompileTask, s: TaskStreams, @@ -2396,43 +2554,35 @@ object Defaults extends BuildCommon { reporter: BuildServerReporter, ): CompileResult = { lazy val x = s.text(ExportStream) - def onArgs(cs: Compilers) = { + def onArgs(cs: Compilers) = cs.withScalac( - cs.scalac match { + cs.scalac match case ac: AnalyzingCompiler => ac.onArgs(exported(x, "scalac")) case x => x - } ) - } - def onProgress(s: Setup) = { + def onProgress(s: Setup) = val cp = new BspCompileProgress(task, s.progress.asScala) s.withProgress(cp) - } val compilers: Compilers = ci.compilers val setup: Setup = ci.setup - val i = ci - .withCompilers(onArgs(compilers)) - .withSetup(onProgress(setup)) - try { + val i = ci.withCompilers(onArgs(compilers)).withSetup(onProgress(setup)) + try val result = incCompiler.compile(i, s.log) reporter.sendSuccessReport(result.getAnalysis) result - } catch { + catch case e: Throwable => - if (!promise.isCompleted) { + if !promise.isCompleted then promise.failure(e) ConcurrentRestrictions.cancelAllSentinels() - } reporter.sendFailureReport(ci.options.sources) - throw e - } finally { - x.close() // workaround for #937 - } + finally x.close() // workaround for #937 } + def compileIncSetupTask = Def.task { val cp = dependencyPicklePath.value - val lookup = new PerClasspathEntryLookup { + val lookup = new PerClasspathEntryLookup: private val cachedAnalysisMap: VirtualFile => Option[CompileAnalysis] = analysisMap(cp) private val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass = @@ -2441,12 +2591,12 @@ object Defaults extends BuildCommon { cachedAnalysisMap(classpathEntry).toOptional override def definesClass(classpathEntry: VirtualFile): DefinesClass = cachedPerEntryDefinesClassLookup(classpathEntry) - } val extra = extraIncOptions.value.map(t2) val useBinary: Boolean = enableBinaryCompileAnalysis.value val eapath = earlyCompileAnalysisFile.value.toPath val eaOpt = - if (exportPipelining.value) Some(MixedAnalyzingCompiler.staticCachedStore(eapath, !useBinary)) + if exportPipelining.value then + Some(MixedAnalyzingCompiler.staticCachedStore(eapath, !useBinary)) else None Setup.of( lookup, @@ -2460,14 +2610,16 @@ object Defaults extends BuildCommon { extra.toArray, ) } + def compileInputsSettings: Seq[Setting[_]] = compileInputsSettings(dependencyPicklePath) - def compileInputsSettings(classpathTask: TaskKey[VirtualClasspath]): Seq[Setting[_]] = { + def compileInputsSettings(classpathTask: TaskKey[Classpath]): Seq[Setting[_]] = { Seq( compileOptions := { val c = fileConverter.value val cp0 = classpathTask.value - val cp = backendOutput.value +: data(cp0) + val cp1 = backendOutput.value +: data(cp0) + val cp = cp1.map(c.toPath).map(c.toVirtualFile) val vs = sources.value.toVector map { x => c.toVirtualFile(x.toPath) } @@ -2509,7 +2661,18 @@ object Defaults extends BuildCommon { setup, prev ) - } + }, + // todo: Zinc's hashing should automatically handle directories + compileInputs2 := { + val cp0 = classpathTask.value + val inputs = compileInputs.value + CompileInputs2( + data(cp0).toVector, + inputs.options.sources.toVector, + scalacOptions.value.toVector, + javacOptions.value.toVector, + ) + }, ) } @@ -2582,7 +2745,7 @@ object Defaults extends BuildCommon { val t = classDirectory.value val dirs = resourceDirectories.value.toSet val s = streams.value - val syncDir = crossTarget.value / (prefix(configuration.value.name) + "sync") + val syncDir = target.value / (prefix(configuration.value.name) + "sync") val factory = CacheStoreFactory(syncDir) val cacheStore = factory.make("copy-resource") val converter = fileConverter.value @@ -2693,22 +2856,28 @@ object Classpaths { import Defaults._ import Keys._ - def concatDistinct[T](a: Taskable[Seq[T]], b: Taskable[Seq[T]]): Initialize[Task[Seq[T]]] = + def concatDistinct[A]( + a: Taskable[Seq[A]], + b: Taskable[Seq[A]] + ): Initialize[Task[Seq[A]]] = Def.task((a.toTask.value ++ b.toTask.value).distinct) - def concat[T](a: Taskable[Seq[T]], b: Taskable[Seq[T]]): Initialize[Task[Seq[T]]] = + def concat[A](a: Taskable[Seq[A]], b: Taskable[Seq[A]]): Initialize[Task[Seq[A]]] = Def.task(a.toTask.value ++ b.toTask.value) def concatSettings[T](a: Initialize[Seq[T]], b: Initialize[Seq[T]]): Initialize[Seq[T]] = Def.setting { a.value ++ b.value } - def concatDistinct[T]( // forward to widened variant - a: ScopedTaskable[Seq[T]], - b: ScopedTaskable[Seq[T]] - ): Initialize[Task[Seq[T]]] = concatDistinct(a: Taskable[Seq[T]], b) + def concatDistinct[A]( // forward to widened variant + a: ScopedTaskable[Seq[A]], + b: ScopedTaskable[Seq[A]] + ): Initialize[Task[Seq[A]]] = concatDistinct(a: Taskable[Seq[A]], b) - def concat[T](a: ScopedTaskable[Seq[T]], b: ScopedTaskable[Seq[T]]): Initialize[Task[Seq[T]]] = - concat(a: Taskable[Seq[T]], b) // forward to widened variant + def concat[A]( + a: ScopedTaskable[Seq[A]], + b: ScopedTaskable[Seq[A]] + ): Initialize[Task[Seq[A]]] = + concat(a: Taskable[Seq[A]], b) // forward to widened variant def concatSettings[T](a: SettingKey[Seq[T]], b: SettingKey[Seq[T]]): Initialize[Seq[T]] = concatSettings(a: Initialize[Seq[T]], b) // forward to widened variant @@ -2731,20 +2900,25 @@ object Classpaths { dependencyClasspath := concat(internalDependencyClasspath, externalDependencyClasspath).value, fullClasspath := concatDistinct(exportedProducts, dependencyClasspath).value, internalDependencyClasspath := ClasspathImpl.internalDependencyClasspathTask.value, - unmanagedClasspath := unmanagedDependencies.value, + unmanagedClasspath := ClasspathImpl.unmanagedDependenciesTask.value, managedClasspath := { + val converter = fileConverter.value val isMeta = isMetaBuild.value val force = reresolveSbtArtifacts.value val app = appConfiguration.value def isJansiOrJLine(f: File) = f.getName.contains("jline") || f.getName.contains("jansi") val scalaInstanceJars = app.provider.scalaProvider.jars.filterNot(isJansiOrJLine) - val sbtCp = (scalaInstanceJars ++ app.provider.mainClasspath).map(Attributed.blank) + val sbtCp = (scalaInstanceJars ++ app.provider.mainClasspath) + .map(_.toPath) + .map(p => converter.toVirtualFile(p): HashedVirtualFileRef) + .map(Attributed.blank) val mjars = managedJars( classpathConfiguration.value, classpathTypes.value, - update.value + update.value, + converter, ) - if (isMeta && !force) (mjars ++ sbtCp).distinct + if isMeta && !force then (mjars ++ sbtCp).distinct else mjars }, exportedProducts := ClasspathImpl.trackedExportedProducts(TrackLevel.TrackAlways).value, @@ -2771,33 +2945,27 @@ object Classpaths { configuration.value, unmanagedBase.value, (unmanagedJars / includeFilter) value, - (unmanagedJars / excludeFilter) value + (unmanagedJars / excludeFilter) value, + fileConverter.value, ) - ).map(exportClasspath) ++ Seq( + ).map(exportVirtualClasspath) ++ Seq( externalDependencyClasspath / outputFileStamps := { val stamper = timeWrappedStamper.value val converter = fileConverter.value - externalDependencyClasspath.value flatMap { file0 => - val p = file0.data.toPath - FileStamp(stamper.library(converter.toVirtualFile(p))).map(p -> _) - } + externalDependencyClasspath.value.flatMap: vf => + val p = converter.toPath(vf.data) + FileStamp(stamper.library(vf.data)).map(p -> _) + }, + dependencyClasspathFiles := { + val converter = fileConverter.value + data(dependencyClasspath.value).map(converter.toPath) }, - dependencyClasspathFiles := data(dependencyClasspath.value).map(_.toPath), dependencyClasspathFiles / outputFileStamps := { val stamper = timeWrappedStamper.value val converter = fileConverter.value - dependencyClasspathFiles.value.flatMap(p => - FileStamp(stamper.library(converter.toVirtualFile(p))).map(p -> _) - ) - }, - dependencyVirtualClasspath := { - val converter = fileConverter.value - val cp0 = dependencyClasspath.value - cp0 map { (attr: Attributed[File]) => - attr map { file => - converter.toVirtualFile(file.toPath) - } - } + dependencyClasspathFiles.value.flatMap: p => + val vf = converter.toVirtualFile(p) + FileStamp(stamper.library(vf)).map(p -> _) }, // Note: invoking this task from shell would block indefinately because it will // wait for the upstream compilation to start. @@ -2806,36 +2974,44 @@ object Classpaths { if (incOptions.value.pipelining) { concat( internalDependencyPicklePath, - Def.task { - externalDependencyClasspath.value map { (attr: Attributed[File]) => - attr map { file => - val converter = fileConverter.value - converter.toVirtualFile(file.toPath) - } - } - } + externalDependencyClasspath, ).value } else { - dependencyVirtualClasspath.value + dependencyClasspath.value } }, internalDependencyPicklePath := ClasspathImpl.internalDependencyPicklePathTask.value, exportedPickles := ClasspathImpl.exportedPicklesTask.value, ) - - private[this] def exportClasspath(s: Setting[Task[Classpath]]): Setting[Task[Classpath]] = + private[this] def exportVirtualClasspath( + s: Setting[Task[Classpath]] + ): Setting[Task[Classpath]] = + s.mapInitialize(init => Def.task { exportVirtualClasspath(streams.value, init.value) }) + private[this] def exportClasspath( + s: Setting[Task[Seq[Attributed[File]]]] + ): Setting[Task[Seq[Attributed[File]]]] = s.mapInitialize(init => Def.task { exportClasspath(streams.value, init.value) }) - private[this] def exportClasspath(s: TaskStreams, cp: Classpath): Classpath = { + private[this] def exportVirtualClasspath(s: TaskStreams, cp: Classpath): Classpath = + val w = s.text(ExportStream) + try w.println(data(cp).toString) + finally w.close() // workaround for #937 + cp + private[this] def exportClasspath( + s: TaskStreams, + cp: Seq[Attributed[File]] + ): Seq[Attributed[File]] = val w = s.text(ExportStream) try w.println(Path.makeString(data(cp))) finally w.close() // workaround for #937 cp - } def defaultPackageKeys = Seq(packageBin, packageSrc, packageDoc) - lazy val defaultPackages: Seq[TaskKey[File]] = - for (task <- defaultPackageKeys; conf <- Seq(Compile, Test)) yield (conf / task) - lazy val defaultArtifactTasks: Seq[TaskKey[File]] = makePom +: defaultPackages + lazy val defaultPackages: Seq[TaskKey[HashedVirtualFileRef]] = + for + task <- defaultPackageKeys + conf <- Seq(Compile, Test) + yield (conf / task) + lazy val defaultArtifactTasks: Seq[TaskKey[HashedVirtualFileRef]] = makePom +: defaultPackages def findClasspathConfig( map: Configuration => Configuration, @@ -2854,13 +3030,18 @@ object Classpaths { } getOrElse notFound } - def packaged(pkgTasks: Seq[TaskKey[File]]): Initialize[Task[Map[Artifact, File]]] = - enabledOnly(packagedArtifact.toSettingKey, pkgTasks) apply (_.join.map(_.toMap)) + def packaged( + pkgTasks: Seq[TaskKey[HashedVirtualFileRef]] + ): Initialize[Task[Map[Artifact, HashedVirtualFileRef]]] = + enabledOnly(packagedArtifact.toSettingKey, pkgTasks).apply(_.join.map(_.toMap)) - def artifactDefs(pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[Artifact]] = + def artifactDefs(pkgTasks: Seq[TaskKey[HashedVirtualFileRef]]): Initialize[Seq[Artifact]] = enabledOnly(artifact, pkgTasks) - def enabledOnly[T](key: SettingKey[T], pkgTasks: Seq[TaskKey[File]]): Initialize[Seq[T]] = + def enabledOnly[T]( + key: SettingKey[T], + pkgTasks: Seq[TaskKey[HashedVirtualFileRef]] + ): Initialize[Seq[T]] = (forallIn(key, pkgTasks) zipWith forallIn(publishArtifact, pkgTasks))(_ zip _ collect { case (a, true) => a }) @@ -2889,12 +3070,12 @@ object Classpaths { val ivyPublishSettings: Seq[Setting[_]] = publishGlobalDefaults ++ Seq( artifacts :== Nil, packagedArtifacts :== Map.empty, - crossTarget := target.value, makePom := { + val converter = fileConverter.value val config = makePomConfiguration.value val publisher = Keys.publisher.value publisher.makePomFile(ivyModule.value, config, streams.value.log) - config.file.get + converter.toVirtualFile(config.file.get.toPath()) }, (makePom / packagedArtifact) := ((makePom / artifact).value -> makePom.value), deliver := deliverTask(makeIvyXmlConfiguration).value, @@ -3043,6 +3224,12 @@ object Classpaths { } }).value, moduleName := normalizedName.value, + outputPath := { + val p = platform.value + val m = moduleName.value + val sv = scalaVersion.value + s"$p/scala-$sv/$m" + }, ivyPaths := IvyPaths( baseDirectory.value.toString, bootIvyHome(appConfiguration.value).map(_.toString) @@ -3174,18 +3361,22 @@ object Classpaths { else confs }, moduleSettings := moduleSettings0.value, - makePomConfiguration := MakePomConfiguration() - .withFile((makePom / artifactPath).value) - .withModuleInfo(projectInfo.value) - .withExtra(pomExtra.value) - .withProcess(pomPostProcess.value) - .withFilterRepositories(pomIncludeRepository.value) - .withAllRepositories(pomAllRepositories.value) - .withConfigurations(Configurations.defaultMavenConfigurations), + makePomConfiguration := { + val converter = fileConverter.value + val out = converter.toPath((makePom / artifactPath).value) + MakePomConfiguration() + .withFile(out.toFile()) + .withModuleInfo(projectInfo.value) + .withExtra(pomExtra.value) + .withProcess(pomPostProcess.value) + .withFilterRepositories(pomIncludeRepository.value) + .withAllRepositories(pomAllRepositories.value) + .withConfigurations(Configurations.defaultMavenConfigurations) + }, makeIvyXmlConfiguration := { makeIvyXmlConfig( publishMavenStyle.value, - sbt.Classpaths.deliverPattern(crossTarget.value), + sbt.Classpaths.deliverPattern(target.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, (publish / checksums).value.toVector, @@ -3202,12 +3393,16 @@ object Classpaths { |so tooling can use it for eviction errors etc - https://www.scala-sbt.org/1.x/docs/Publishing.html""".stripMargin ) else () + val converter = fileConverter.value + val artifacts = (publish / packagedArtifacts).value.toVector.map { (a, vf) => + a -> converter.toPath(vf).toFile + } publishConfig( publishMavenStyle.value, - deliverPattern(crossTarget.value), + deliverPattern(target.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (publish / packagedArtifacts).value.toVector, + artifacts, (publish / checksums).value.toVector, getPublishTo(publishTo.value).name, ivyLoggingLevel.value, @@ -3217,7 +3412,7 @@ object Classpaths { makeIvyXmlLocalConfiguration := { makeIvyXmlConfig( false, // publishMavenStyle.value, - sbt.Classpaths.deliverPattern(crossTarget.value), + sbt.Classpaths.deliverPattern(target.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, (publish / checksums).value.toVector, @@ -3226,27 +3421,39 @@ object Classpaths { optResolverName = Some("local") ) }, - publishLocalConfiguration := publishConfig( - false, // publishMavenStyle.value, - deliverPattern(crossTarget.value), - if (isSnapshot.value) "integration" else "release", - ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (publishLocal / packagedArtifacts).value.toVector, - (publishLocal / checksums).value.toVector, - logging = ivyLoggingLevel.value, - overwrite = isSnapshot.value - ), - publishM2Configuration := publishConfig( - true, - deliverPattern(crossTarget.value), - if (isSnapshot.value) "integration" else "release", - ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (publishM2 / packagedArtifacts).value.toVector, - checksums = (publishM2 / checksums).value.toVector, - resolverName = Resolver.publishMavenLocal.name, - logging = ivyLoggingLevel.value, - overwrite = isSnapshot.value - ), + publishLocalConfiguration := { + val converter = fileConverter.value + val artifacts = (publishLocal / packagedArtifacts).value.toVector.map { (a, vf) => + a -> converter.toPath(vf).toFile + } + publishConfig( + false, // publishMavenStyle.value, + deliverPattern(target.value), + if (isSnapshot.value) "integration" else "release", + ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, + artifacts, + (publishLocal / checksums).value.toVector, + logging = ivyLoggingLevel.value, + overwrite = isSnapshot.value + ) + }, + publishM2Configuration := { + val converter = fileConverter.value + val artifacts = (publishM2 / packagedArtifacts).value.toVector.map { (a, vf) => + a -> converter.toPath(vf).toFile + } + publishConfig( + true, + deliverPattern(target.value), + if (isSnapshot.value) "integration" else "release", + ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, + artifacts, + checksums = (publishM2 / checksums).value.toVector, + resolverName = Resolver.publishMavenLocal.name, + logging = ivyLoggingLevel.value, + overwrite = isSnapshot.value + ) + }, ivySbt := ivySbt0.value, ivyModule := { val is = ivySbt.value; new is.Module(moduleSettings.value) }, allCredentials := LMCoursier.allCredentialsTask.value, @@ -3473,7 +3680,7 @@ object Classpaths { moduleConfigurations = Vector.empty, checksums = checksums.value.toVector, managedChecksums = false, - resolutionCacheDir = Some(crossTarget.value / "resolution-cache"), + resolutionCacheDir = Some(target.value / "resolution-cache"), ), ivySbt := ivySbt0.value, classifiersModule := classifiersModuleTask.value, @@ -3551,7 +3758,7 @@ object Classpaths { moduleConfigurations = Vector.empty, checksums = checksums.value.toVector, managedChecksums = false, - resolutionCacheDir = Some(crossTarget.value / "bridge-resolution-cache"), + resolutionCacheDir = Some(target.value / "bridge-resolution-cache"), ) ) ) ++ Seq( @@ -3578,15 +3785,29 @@ object Classpaths { scalaCompilerBridgeDependencyResolution := (scalaCompilerBridgeScope / dependencyResolution).value ) + val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] = + new sjsonnew.JsonKeyFormat[ModuleID] { + import LibraryManagementCodec._ + import sjsonnew.support.scalajson.unsafe._ + val moduleIdFormat: JsonFormat[ModuleID] = implicitly[JsonFormat[ModuleID]] + def write(key: ModuleID): String = + CompactPrinter(Converter.toJsonUnsafe(key)(moduleIdFormat)) + def read(key: String): ModuleID = + Converter.fromJsonUnsafe[ModuleID](Parser.parseUnsafe(key))(moduleIdFormat) + } + def classifiersModuleTask: Initialize[Task[GetClassifiersModule]] = Def.task { val classifiers = transitiveClassifiers.value val ref = thisProjectRef.value - val pluginClasspath = loadedBuild.value.units(ref.build).unit.plugins.fullClasspath.toVector - val pluginJars = pluginClasspath.filter( - _.data.isFile - ) // exclude directories: an approximation to whether they've been published - val pluginIDs: Vector[ModuleID] = pluginJars.flatMap(_ get moduleID.key) + val unit = loadedBuild.value.units(ref.build).unit + val converter = unit.converter + val pluginClasspath = unit.plugins.fullClasspath.toVector + val pluginJars = pluginClasspath.filter: x => + !Files.isDirectory(converter.toPath(x.data)) + // exclude directories: an approximation to whether they've been published + val pluginIDs: Vector[ModuleID] = pluginJars.flatMap(_.get(moduleIDStr).map: str => + moduleIdJsonKeyFormat.read(str)) GetClassifiersModule( projectID.value, // TODO: Should it be sbt's scalaModuleInfo? @@ -3623,17 +3844,6 @@ object Classpaths { } } tag (Tags.Publish, Tags.Network) - val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] = - new sjsonnew.JsonKeyFormat[ModuleID] { - import LibraryManagementCodec._ - import sjsonnew.support.scalajson.unsafe._ - val moduleIdFormat: JsonFormat[ModuleID] = implicitly[JsonFormat[ModuleID]] - def write(key: ModuleID): String = - CompactPrinter(Converter.toJsonUnsafe(key)(moduleIdFormat)) - def read(key: String): ModuleID = - Converter.fromJsonUnsafe[ModuleID](Parser.parseUnsafe(key))(moduleIdFormat) - } - def withExcludes(out: File, classifiers: Seq[String], lock: xsbti.GlobalLock)( f: Map[ModuleID, Vector[ConfigRef]] => UpdateReport ): UpdateReport = LibraryManagement.withExcludes(out, classifiers, lock)(f) @@ -3714,7 +3924,7 @@ object Classpaths { appConfiguration.toTaskable, Defaults.unmanagedScalaInstanceOnly.toTaskable, dependencyCacheDirectory.toTaskable, - crossTarget.toTaskable, + target.toTaskable, executionRoots.toTaskable, resolvedScoped.toTaskable, forceUpdatePeriod.toTaskable, @@ -4031,8 +4241,12 @@ object Classpaths { val c = fileConverter.value Def.unit(copyResources.value) Def.unit(compile.value) - - c.toPath(backendOutput.value).toFile :: Nil + val dir = c.toPath(backendOutput.value) + val rawJar = compileIncremental.value._2 + val rawJarPath = c.toPath(rawJar) + IO.unzip(rawJarPath.toFile, dir.toFile) + IO.delete(dir.toFile / "META-INF" / "MANIFEST.MF") + dir.toFile :: Nil } private[sbt] def makePickleProducts: Initialize[Task[Seq[VirtualFile]]] = Def.task { @@ -4057,7 +4271,6 @@ object Classpaths { def internalDependencyJarsTask: Initialize[Task[Classpath]] = ClasspathImpl.internalDependencyJarsTask - def unmanagedDependencies: Initialize[Task[Classpath]] = ClasspathImpl.unmanagedDependenciesTask def mkIvyConfiguration: Initialize[Task[InlineIvyConfiguration]] = Def.task { val (rs, other) = (fullResolvers.value.toVector, otherResolvers.value.toVector) @@ -4071,7 +4284,7 @@ object Classpaths { .withModuleConfigurations(moduleConfigurations.value.toVector) .withLock(lock(appConfiguration.value)) .withChecksums((update / checksums).value.toVector) - .withResolutionCacheDir(crossTarget.value / "resolution-cache") + .withResolutionCacheDir(target.value / "resolution-cache") .withUpdateOptions(updateOptions.value) .withLog(s.log) } @@ -4176,22 +4389,35 @@ object Classpaths { def addUnmanagedLibrary: Seq[Setting[_]] = Seq((Compile / unmanagedJars) ++= unmanagedScalaLibrary.value) - def unmanagedScalaLibrary: Initialize[Task[Seq[File]]] = + def unmanagedScalaLibrary: Initialize[Task[Seq[HashedVirtualFileRef]]] = (Def.task { autoScalaLibrary.value && scalaHome.value.isDefined }).flatMapTask { case cond => - if cond then Def.task { (scalaInstance.value.libraryJars: Seq[File]) } - else Def.task { (Nil: Seq[File]) } + if cond then + Def.task { + val converter = fileConverter.value + (scalaInstance.value.libraryJars: Seq[File]) + .map(_.toPath) + .map(converter.toVirtualFile) + } + else Def.task { (Nil: Seq[HashedVirtualFileRef]) } } import DependencyFilter._ - def managedJars(config: Configuration, jarTypes: Set[String], up: UpdateReport): Classpath = + def managedJars( + config: Configuration, + jarTypes: Set[String], + up: UpdateReport, + converter: FileConverter + ): Classpath = up.filter(configurationFilter(config.name) && artifactFilter(`type` = jarTypes)) .toSeq .map { case (_, module, art, file) => - Attributed(file)( - AttributeMap.empty - .put(artifact.key, art) - .put(moduleID.key, module) - .put(configuration.key, config) + val vf = converter.toVirtualFile(file.toPath()) + Attributed(vf)( + Map( + Keys.artifactStr -> RemoteCache.artifactToStr(art), + Keys.moduleIDStr -> moduleIdJsonKeyFormat.write(module), + Keys.configurationStr -> config.name, + ) ) } .distinct @@ -4200,28 +4426,24 @@ object Classpaths { config: Configuration, base: File, filter: FileFilter, - excl: FileFilter - ): Classpath = { + excl: FileFilter, + converter: FileConverter, + ): Classpath = + given FileConverter = converter (base * (filter -- excl) +++ (base / config.name).descendantsExcept(filter, excl)).classpath - } - @deprecated( - "The method only works for Scala 2, use the overloaded version to support both Scala 2 and Scala 3", - "1.1.5" - ) - def autoPlugins(report: UpdateReport, internalPluginClasspath: Seq[File]): Seq[String] = - autoPlugins(report, internalPluginClasspath, isDotty = false) def autoPlugins( report: UpdateReport, - internalPluginClasspath: Seq[File], + internalPluginClasspath: Seq[NioPath], isDotty: Boolean - ): Seq[String] = { + ): Seq[String] = import sbt.internal.inc.classpath.ClasspathUtil.compilerPlugins val pluginClasspath = - report.matching(configurationFilter(CompilerPlugin.name)) ++ internalPluginClasspath - val plugins = compilerPlugins(pluginClasspath.map(_.toPath), isDotty) + report + .matching(configurationFilter(CompilerPlugin.name)) + .map(_.toPath) ++ internalPluginClasspath + val plugins = compilerPlugins(pluginClasspath, isDotty) plugins.map("-Xplugin:" + _.toAbsolutePath.toString).toSeq - } private[this] lazy val internalCompilerPluginClasspath: Initialize[Task[Classpath]] = (Def @@ -4240,6 +4462,7 @@ object Classpaths { lazy val compilerPluginConfig = Seq( scalacOptions := { + given FileConverter = fileConverter.value val options = scalacOptions.value val newPlugins = autoPlugins( update.value, @@ -4465,7 +4688,7 @@ trait BuildExtra extends BuildCommon with DefExtra { libraryDependencies += compilerPlugin(dependency) /** Constructs a setting that declares a new artifact `a` that is generated by `taskDef`. */ - def addArtifact(a: Artifact, taskDef: TaskKey[File]): SettingsDefinition = { + def addArtifact(a: Artifact, taskDef: TaskKey[HashedVirtualFileRef]): SettingsDefinition = { val pkgd = packagedArtifacts := packagedArtifacts.value.updated(a, taskDef.value) Seq(artifacts += a, pkgd) } @@ -4473,10 +4696,10 @@ trait BuildExtra extends BuildCommon with DefExtra { /** Constructs a setting that declares a new artifact `artifact` that is generated by `taskDef`. */ def addArtifact( artifact: Initialize[Artifact], - taskDef: Initialize[Task[File]] + taskDef: Initialize[Task[HashedVirtualFileRef]] ): SettingsDefinition = { val artLocal = SettingKey.local[Artifact] - val taskLocal = TaskKey.local[File] + val taskLocal = TaskKey.local[HashedVirtualFileRef] val art = artifacts := artLocal.value +: artifacts.value val pkgd = packagedArtifacts := packagedArtifacts.value.updated(artLocal.value, taskLocal.value) Seq(artLocal := artifact.value, taskLocal := taskDef.value, art, pkgd) @@ -4561,11 +4784,12 @@ trait BuildExtra extends BuildCommon with DefExtra { baseArguments: String* ): Initialize[InputTask[Unit]] = Def.inputTask { + given FileConverter = fileConverter.value import Def._ val r = (config / run / runner).value val cp = (config / fullClasspath).value val args = spaceDelimited().parsed - r.run(mainClass, data(cp), baseArguments ++ args, streams.value.log).get + r.run(mainClass, cp.files, baseArguments ++ args, streams.value.log).get } def runTask( @@ -4574,10 +4798,11 @@ trait BuildExtra extends BuildCommon with DefExtra { arguments: String* ): Initialize[Task[Unit]] = Def.task { + given FileConverter = fileConverter.value val cp = (config / fullClasspath).value val r = (config / run / runner).value val s = streams.value - r.run(mainClass, data(cp), arguments, s.log).get + r.run(mainClass, cp.files, arguments, s.log).get } // public API @@ -4595,12 +4820,14 @@ trait BuildExtra extends BuildCommon with DefExtra { .flatMapTask { result => initScoped( scoped.scopedKey, - ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config)) - ).zipWith(Def.task { ((config / fullClasspath).value, streams.value, result) }) { - (rTask, t) => - (t, rTask) mapN { case ((cp, s, args), r) => - r.run(mainClass, data(cp), baseArguments ++ args, s.log).get - } + ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config)), + ).zipWith(Def.task { + ((config / fullClasspath).value, streams.value, fileConverter.value, result) + }) { (rTask, t) => + (t, rTask) mapN { case ((cp, s, converter, args), r) => + given FileConverter = converter + r.run(mainClass, cp.files, baseArguments ++ args, s.log).get + } } }) .value @@ -4619,11 +4846,13 @@ trait BuildExtra extends BuildCommon with DefExtra { Vector( scoped := initScoped( scoped.scopedKey, - ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config)) - ).zipWith(Def.task { ((config / fullClasspath).value, streams.value) }) { case (rTask, t) => - (t, rTask).mapN { case ((cp, s), r) => - r.run(mainClass, data(cp), arguments, s.log).get - } + ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config)), + ).zipWith(Def.task { ((config / fullClasspath).value, streams.value, fileConverter.value) }) { + case (rTask, t) => + (t, rTask).mapN { case ((cp, s, converter), r) => + given FileConverter = converter + r.run(mainClass, cp.files, arguments, s.log).get + } }.value ) ++ inTask(scoped)((config / forkOptions) := forkOptionsTask.value) @@ -4659,24 +4888,19 @@ trait BuildCommon { */ implicit def globFilter(expression: String): NameFilter = GlobFilter(expression) - implicit def richAttributed(s: Seq[Attributed[File]]): RichAttributed = new RichAttributed(s) - implicit def richFiles(s: Seq[File]): RichFiles = new RichFiles(s) - implicit def richPathFinder(s: PathFinder): RichPathFinder = new RichPathFinder(s) - final class RichPathFinder private[sbt] (s: PathFinder) { + extension (s: PathFinder) + def classpath(using FileConverter): Classpath = + val converter = summon[FileConverter] + Attributed.blankSeq(s.get().map(p => converter.toVirtualFile(p.toPath): HashedVirtualFileRef)) - /** Converts the `PathFinder` to a `Classpath`, which is an alias for `Seq[Attributed[File]]`. */ - def classpath: Classpath = Attributed.blankSeq(s.get()) - } - final class RichAttributed private[sbt] (s: Seq[Attributed[File]]) { + extension (s: Classpath) + def files(using FileConverter): Seq[NioPath] = + val converter = summon[FileConverter] + Attributed.data(s).map(converter.toPath) - /** Extracts the plain `Seq[File]` from a Classpath (which is a `Seq[Attributed[File]]`). */ - def files: Seq[File] = Attributed.data(s) - } - final class RichFiles private[sbt] (s: Seq[File]) { - - /** Converts the `Seq[File]` to a Classpath, which is an alias for `Seq[Attributed[File]]`. */ + extension (s: Seq[HashedVirtualFileRef]) + /** Converts the `Seq[HashedVirtualFileRef]` to a Classpath, which is an alias for `Seq[Attributed[HashedVirtualFileRef]]`. */ def classpath: Classpath = Attributed blankSeq s - } def overrideConfigs(cs: Configuration*)( configurations: Seq[Configuration] @@ -4727,7 +4951,7 @@ trait BuildCommon { // these are for use for constructing Tasks def loadPrevious[T](task: TaskKey[T])(implicit f: JsonFormat[T]): Initialize[Task[Option[T]]] = Def.task { loadFromContext(task, resolvedScoped.value, state.value)(f) } - def getPrevious[T](task: TaskKey[T]): Initialize[Task[Option[T]]] = + def getPrevious[A](task: TaskKey[A]): Initialize[Task[Option[A]]] = Def.task { getFromContext(task, resolvedScoped.value, state.value) } private[sbt] def derive[T](s: Setting[T]): Setting[T] = diff --git a/main/src/main/scala/sbt/EvaluateTask.scala b/main/src/main/scala/sbt/EvaluateTask.scala index e28ae2321..50a4dcdbc 100644 --- a/main/src/main/scala/sbt/EvaluateTask.scala +++ b/main/src/main/scala/sbt/EvaluateTask.scala @@ -30,6 +30,7 @@ import scala.annotation.nowarn import scala.Console.RED import scala.concurrent.duration.Duration import scala.util.control.NonFatal +import xsbti.FileConverter /** * An API that allows you to cancel executing tasks upon some signal. @@ -142,8 +143,8 @@ object EvaluateTaskConfig { } final case class PluginData( - dependencyClasspath: Seq[Attributed[File]], - definitionClasspath: Seq[Attributed[File]], + dependencyClasspath: Def.Classpath, + definitionClasspath: Def.Classpath, resolvers: Option[Vector[Resolver]], report: Option[UpdateReport], scalacOptions: Seq[String], @@ -151,14 +152,15 @@ final case class PluginData( unmanagedSources: Seq[File], managedSourceDirectories: Seq[File], managedSources: Seq[File], - buildTarget: Option[BuildTargetIdentifier] + buildTarget: Option[BuildTargetIdentifier], + converter: FileConverter, ) { - val classpath: Seq[Attributed[File]] = definitionClasspath ++ dependencyClasspath + val classpath: Def.Classpath = definitionClasspath ++ dependencyClasspath } object PluginData { - private[sbt] def apply(dependencyClasspath: Def.Classpath): PluginData = - PluginData(dependencyClasspath, Nil, None, None, Nil, Nil, Nil, Nil, Nil, None) + private[sbt] def apply(dependencyClasspath: Def.Classpath, converter: FileConverter): PluginData = + PluginData(dependencyClasspath, Nil, None, None, Nil, Nil, Nil, Nil, Nil, None, converter) } object EvaluateTask { diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index cb7658737..efc90e81a 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -28,6 +28,7 @@ import sbt.internal.remotecache.RemoteCacheArtifact import sbt.internal.server.BuildServerProtocol.BspFullWorkspace import sbt.internal.server.{ BuildServerReporter, ServerHandler } import sbt.internal.util.{ AttributeKey, ProgressState, SourcePosition } +import sbt.internal.util.StringAttributeKey import sbt.io._ import sbt.librarymanagement.Configurations.CompilerPlugin import sbt.librarymanagement.LibraryManagementCodec._ @@ -35,8 +36,8 @@ import sbt.librarymanagement._ import sbt.librarymanagement.ivy.{ Credentials, IvyConfiguration, IvyPaths, UpdateOptions } import sbt.nio.file.Glob import sbt.testing.Framework -import sbt.util.{ Level, Logger, LoggerContext } -import xsbti.{ FileConverter, VirtualFile } +import sbt.util.{ cacheLevel, ActionCacheStore, Level, Logger, LoggerContext } +import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFile, VirtualFileRef } import xsbti.compile._ import xsbti.compile.analysis.ReadStamps @@ -83,7 +84,12 @@ object Keys { val buildDependencies = settingKey[BuildDependencies]("Definitive source of inter-project dependencies for compilation and dependency management.\n\tThis is populated by default by the dependencies declared on Project instances, but may be modified.\n\tThe main restriction is that new builds may not be introduced.").withRank(DSetting) val appConfiguration = settingKey[xsbti.AppConfiguration]("Provides access to the launched sbt configuration, including the ScalaProvider, Launcher, and GlobalLock.").withRank(DSetting) val thisProject = settingKey[ResolvedProject]("Provides the current project for the referencing scope.").withRank(CSetting) + + @cacheLevel(include = Array.empty) val thisProjectRef = settingKey[ProjectRef]("Provides a fully-resolved reference to the current project for the referencing scope.").withRank(CSetting) + val configurationStr = StringAttributeKey("configuration") + + @cacheLevel(include = Array.empty) val configuration = settingKey[Configuration]("Provides the current configuration of the referencing scope.").withRank(CSetting) val commands = settingKey[Seq[Command]]("Defines commands to be registered when this project or build is the current selected one.").withRank(CSetting) val initialize = settingKey[Unit]("A convenience setting for performing side-effects during initialization.").withRank(BSetting) @@ -108,8 +114,13 @@ object Keys { val serverUseJni = SettingKey(BasicKeys.serverUseJni) val fullServerHandlers = SettingKey(BasicKeys.fullServerHandlers) val serverHandlers = settingKey[Seq[ServerHandler]]("User-defined server handlers.") + val cacheStores = settingKey[Seq[ActionCacheStore]]("Cache backends") + val rootOutputDirectory = SettingKey(BasicKeys.rootOutputDirectory) + + // val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting) + val analysis = StringAttributeKey("analysis") + - val analysis = AttributeKey[CompileAnalysis]("analysis", "Analysis of compilation, including dependencies and generated outputs.", DSetting) val suppressSbtShellNotification = settingKey[Boolean]("""True to suppress the "Executing in batch mode.." message.""").withRank(CSetting) val pollInterval = settingKey[FiniteDuration]("Interval between checks for modified sources by the continuous execution command.").withRank(BMinusSetting) val watchAntiEntropy = settingKey[FiniteDuration]("Duration for which the watch EventMonitor will ignore events for a file after that file has triggered a build.").withRank(BMinusSetting) @@ -159,6 +170,7 @@ object Keys { val resources = taskKey[Seq[File]]("All resource files, both managed and unmanaged.").withRank(BTask) // Output paths + @cacheLevel(include = Array.empty) val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting) val earlyOutput = settingKey[VirtualFile]("JAR file for pickles used for build pipelining") val backendOutput = settingKey[VirtualFile]("Directory or JAR file for compiled classes and copied resources") @@ -184,7 +196,10 @@ object Keys { val cleanupCommands = settingKey[String]("Commands to execute before the Scala interpreter exits.").withRank(BMinusSetting) val asciiGraphWidth = settingKey[Int]("Determines maximum width of the settings graph in ASCII mode").withRank(AMinusSetting) val compileOptions = taskKey[CompileOptions]("Collects basic options to configure compilers").withRank(DTask) + + @cacheLevel(include = Array.empty) val compileInputs = taskKey[Inputs]("Collects all inputs needed for compilation.").withRank(DTask) + val compileInputs2 = taskKey[CompileInputs2]("") val scalaHome = settingKey[Option[File]]("If Some, defines the local Scala installation to use for compilation, running, and testing.").withRank(ASetting) val scalaInstance = taskKey[ScalaInstance]("Defines the Scala instance to use for compilation, running, and testing.").withRank(DTask) val scalaOrganization = settingKey[String]("Organization/group ID of the Scala used in the project. Default value is 'org.scala-lang'. This is an advanced setting used for clones of the Scala Language. It should be disregarded in standard use cases.").withRank(CSetting) @@ -222,11 +237,13 @@ object Keys { val consoleProject = taskKey[Unit]("Starts the Scala interpreter with the sbt and the build definition on the classpath and useful imports.").withRank(AMinusTask) val compile = taskKey[CompileAnalysis]("Compiles sources.").withRank(APlusTask) val manipulateBytecode = taskKey[CompileResult]("Manipulates generated bytecode").withRank(BTask) - val compileIncremental = taskKey[CompileResult]("Actually runs the incremental compilation").withRank(DTask) + val compileIncremental = taskKey[(Boolean, HashedVirtualFileRef)]("Actually runs the incremental compilation").withRank(DTask) val previousCompile = taskKey[PreviousResult]("Read the incremental compiler analysis from disk").withRank(DTask) val tastyFiles = taskKey[Seq[File]]("Returns the TASTy files produced by compilation").withRank(DTask) private[sbt] val compileScalaBackend = taskKey[CompileResult]("Compiles only Scala sources if pipelining is enabled. Compiles both Scala and Java sources otherwise").withRank(Invisible) private[sbt] val compileEarly = taskKey[CompileAnalysis]("Compiles only Scala sources if pipelining is enabled, and produce an early output (pickle JAR)").withRank(Invisible) + + @cacheLevel(include = Array.empty) private[sbt] val earlyOutputPing = taskKey[PromiseWrap[Boolean]]("When pipelining is enabled, this returns true when early output (pickle JAR) is created; false otherwise").withRank(Invisible) private[sbt] val compileJava = taskKey[CompileResult]("Compiles only Java sources (called only for pipelining)").withRank(Invisible) private[sbt] val compileSplit = taskKey[CompileResult]("When pipelining is enabled, compile Scala then Java; otherwise compile both").withRank(Invisible) @@ -238,6 +255,8 @@ object Keys { val earlyCompileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting) val compileAnalysisFile = taskKey[File]("Zinc analysis storage.").withRank(DSetting) val earlyCompileAnalysisFile = taskKey[File]("Zinc analysis storage for early compilation").withRank(DSetting) + + @cacheLevel(include = Array.empty) val compileIncSetup = taskKey[Setup]("Configures aspects of incremental compilation.").withRank(DTask) val compilerCache = taskKey[GlobalsCache]("Cache of scala.tools.nsc.Global instances. This should typically be cached so that it isn't recreated every task run.").withRank(DTask) val stateCompilerCache = AttributeKey[GlobalsCache]("stateCompilerCache", "Internal use: Global cache.") @@ -251,6 +270,7 @@ object Keys { val sourcePositionMappers = taskKey[Seq[xsbti.Position => Option[xsbti.Position]]]("Maps positions in generated source files to the original source it was generated from").withRank(DTask) private[sbt] val externalHooks = taskKey[ExternalHooks]("The external hooks used by zinc.") val auxiliaryClassFiles = taskKey[Seq[AuxiliaryClassFiles]]("The auxiliary class files that must be managed by Zinc (for instance the TASTy files)") + @cacheLevel(include = Array.empty) val fileConverter = settingKey[FileConverter]("The file converter used to convert between Path and VirtualFile") val allowMachinePath = settingKey[Boolean]("Allow machine-specific paths during conversion.") val reportAbsolutePath = settingKey[Boolean]("Report absolute paths during compilation.") @@ -259,20 +279,21 @@ object Keys { private[sbt] val reusableStamper = taskKey[ReadStamps]("The stamper can be reused across subprojects and sessions.") // package keys - val packageBin = taskKey[File]("Produces a main artifact, such as a binary jar.").withRank(ATask) - val `package` = taskKey[File]("Produces the main artifact, such as a binary jar. This is typically an alias for the task that actually does the packaging.").withRank(APlusTask) - val packageDoc = taskKey[File]("Produces a documentation artifact, such as a jar containing API documentation.").withRank(AMinusTask) - val packageSrc = taskKey[File]("Produces a source artifact, such as a jar containing sources and resources.").withRank(AMinusTask) - val packageCache = taskKey[File]("Produces the main artifact for caching.") + val packageBin = taskKey[HashedVirtualFileRef]("Produces a main artifact, such as a binary jar.").withRank(ATask) + val `package` = taskKey[HashedVirtualFileRef]("Produces the main artifact, such as a binary jar. This is typically an alias for the task that actually does the packaging.").withRank(APlusTask) + val packageDoc = taskKey[HashedVirtualFileRef]("Produces a documentation artifact, such as a jar containing API documentation.").withRank(AMinusTask) + val packageSrc = taskKey[HashedVirtualFileRef]("Produces a source artifact, such as a jar containing sources and resources.").withRank(AMinusTask) + val packageCache = taskKey[HashedVirtualFileRef]("Produces the main artifact for caching.") val packageOptions = taskKey[Seq[PackageOption]]("Options for packaging.").withRank(BTask) val packageTimestamp = settingKey[Option[Long]]("Overwrites timestamps in JAR file to make the build reproducible; None keeps the existing timestamps (useful for web resources)").withRank(CSetting) - val packageConfiguration = taskKey[Package.Configuration]("Collects all inputs needed for packaging.").withRank(DTask) - val artifactPath = settingKey[File]("The location of a generated artifact.").withRank(BPlusSetting) + val packageConfiguration = taskKey[Pkg.Configuration]("Collects all inputs needed for packaging.").withRank(DTask) + val artifactPath = settingKey[VirtualFileRef]("The location of a generated artifact.").withRank(BPlusSetting) + val artifactStr = StringAttributeKey("artifact") val artifact = settingKey[Artifact]("Describes an artifact.").withRank(BMinusSetting) val artifactClassifier = settingKey[Option[String]]("Sets the classifier used by the default artifact definition.").withRank(BSetting) val artifactName = settingKey[(ScalaVersion, ModuleID, Artifact) => String]("Function that produces the artifact name from its definition.").withRank(CSetting) - val mappings = taskKey[Seq[(File, String)]]("Defines the mappings from a file to a path, used by packaging, for example.").withRank(BTask) + val mappings = taskKey[Seq[(HashedVirtualFileRef, String)]]("Defines the mappings from a file to a path, used by packaging, for example.").withRank(BTask) val fileMappings = taskKey[Seq[(File, File)]]("Defines the mappings from a file to a file, used for copying files, for example.").withRank(BMinusTask) // Run Keys @@ -331,7 +352,6 @@ object Keys { // Classpath/Dependency Management Keys type Classpath = Def.Classpath - type VirtualClasspath = Def.VirtualClasspath val name = settingKey[String]("Project name.").withRank(APlusSetting) val normalizedName = settingKey[String]("Project name transformed from mixed case and spaces to lowercase and dash-separated.").withRank(BSetting) @@ -344,8 +364,8 @@ object Keys { val organizationHomepage = settingKey[Option[URL]]("Organization homepage.").withRank(BMinusSetting) val developers = settingKey[List[Developer]]("List of developers implicated in the project").withRank(BMinusSetting) val apiURL = settingKey[Option[URL]]("Base URL for API documentation.").withRank(BMinusSetting) - val entryApiURL = AttributeKey[URL]("entryApiURL", "Base URL for the API documentation for a classpath entry.") - val apiMappings = taskKey[Map[File, URL]]("Mappings from classpath entry to API documentation base URL.").withRank(BMinusSetting) + val entryApiURL = StringAttributeKey("entryApiURL") // , "Base URL for the API documentation for a classpath entry.") + val apiMappings = taskKey[Map[HashedVirtualFileRef, URL]]("Mappings from classpath entry to API documentation base URL.").withRank(BMinusSetting) val autoAPIMappings = settingKey[Boolean]("If true, automatically manages mappings to the API doc URL.").withRank(BMinusSetting) val scmInfo = settingKey[Option[ScmInfo]]("Basic SCM information for the project.").withRank(BMinusSetting) val projectInfo = settingKey[ModuleInfo]("Addition project information like formal name, homepage, licenses etc.").withRank(CSetting) @@ -363,16 +383,15 @@ object Keys { val internalDependencyClasspath = taskKey[Classpath]("The internal (inter-project) classpath.").withRank(CTask) val externalDependencyClasspath = taskKey[Classpath]("The classpath consisting of library dependencies, both managed and unmanaged.").withRank(BMinusTask) val dependencyClasspath = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(BPlusTask) - val dependencyVirtualClasspath = taskKey[VirtualClasspath]("The classpath consisting of internal and external, managed and unmanaged dependencies.").withRank(CTask) - val dependencyPicklePath = taskKey[VirtualClasspath]("The classpath consisting of internal pickles and external, managed and unmanaged dependencies. This task is promise-blocked.") - val internalDependencyPicklePath = taskKey[VirtualClasspath]("The internal (inter-project) pickles. This task is promise-blocked.") + val dependencyPicklePath = taskKey[Classpath]("The classpath consisting of internal pickles and external, managed and unmanaged dependencies. This task is promise-blocked.") + val internalDependencyPicklePath = taskKey[Classpath]("The internal (inter-project) pickles. This task is promise-blocked.") val fullClasspath = taskKey[Classpath]("The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies.").withRank(BPlusTask) val trackInternalDependencies = settingKey[TrackLevel]("The level of tracking for the internal (inter-project) dependency.").withRank(BSetting) val exportToInternal = settingKey[TrackLevel]("The level of tracking for this project by the internal callers.").withRank(BSetting) val exportedProductJars = taskKey[Classpath]("Build products that go on the exported classpath as JARs.") val exportedProductJarsIfMissing = taskKey[Classpath]("Build products that go on the exported classpath as JARs if missing.") val exportedProductJarsNoTracking = taskKey[Classpath]("Just the exported classpath as JARs without triggering the compilation.") - val exportedPickles = taskKey[VirtualClasspath]("Build products that go on the exported compilation classpath as JARs. Note this is promise-blocked.").withRank(DTask) + val exportedPickles = taskKey[Classpath]("Build products that go on the exported compilation classpath as JARs. Note this is promise-blocked.").withRank(DTask) val pickleProducts = taskKey[Seq[VirtualFile]]("Pickle JARs").withRank(DTask) val internalDependencyAsJars = taskKey[Classpath]("The internal (inter-project) classpath as JARs.") val dependencyClasspathAsJars = taskKey[Classpath]("The classpath consisting of internal and external, managed and unmanaged dependencies, all as JARs.") @@ -392,7 +411,7 @@ object Keys { val pushRemoteCacheConfiguration = taskKey[PublishConfiguration]("") val pushRemoteCacheTo = settingKey[Option[Resolver]]("The resolver to publish remote cache to.") val remoteCacheResolvers = settingKey[Seq[Resolver]]("Resolvers for remote cache.") - val remoteCachePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.") + val remoteCachePom = taskKey[HashedVirtualFileRef]("Generates a pom for publishing when publishing Maven-style.") val localCacheDirectory = settingKey[File]("Operating system specific cache directory.") val usePipelining = settingKey[Boolean]("Use subproject pipelining for compilation.").withRank(BSetting) val exportPipelining = settingKey[Boolean]("Product early output so downstream subprojects can do pipelining.").withRank(BSetting) @@ -401,6 +420,8 @@ object Keys { val bspConfig = taskKey[Unit]("Create or update the BSP connection files").withRank(DSetting) val bspEnabled = SettingKey[Boolean](BasicKeys.bspEnabled) val bspSbtEnabled = settingKey[Boolean]("Should BSP export meta-targets for the SBT build itself?") + + @cacheLevel(include = Array.empty) val bspTargetIdentifier = settingKey[BuildTargetIdentifier]("Build target identifier of a project and configuration.").withRank(DSetting) val bspWorkspace = settingKey[Map[BuildTargetIdentifier, Scope]]("Mapping of BSP build targets to sbt scopes").withRank(DSetting) private[sbt] val bspFullWorkspace = settingKey[BspFullWorkspace]("Mapping of BSP build targets to sbt scopes and meta-targets for the SBT build itself").withRank(DSetting) @@ -431,6 +452,8 @@ object Keys { val bspScalaTestClassesItem = taskKey[Seq[ScalaTestClassesItem]]("").withRank(DTask) val bspScalaMainClasses = inputKey[Unit]("Corresponds to buildTarget/scalaMainClasses request").withRank(DTask) val bspScalaMainClassesItem = taskKey[ScalaMainClassesItem]("").withRank(DTask) + + @cacheLevel(include = Array.empty) val bspReporter = taskKey[BuildServerReporter]("").withRank(DTask) val useCoursier = settingKey[Boolean]("Use Coursier for dependency resolution.").withRank(BSetting) @@ -489,12 +512,12 @@ object Keys { val makePomConfiguration = settingKey[MakePomConfiguration]("Configuration for generating a pom.").withRank(DSetting) val makeIvyXmlConfiguration = taskKey[PublishConfiguration]("Configuration for generating ivy.xml.").withRank(DSetting) val makeIvyXmlLocalConfiguration = taskKey[PublishConfiguration]("Configuration for generating ivy.xml.").withRank(DSetting) - val packagedArtifacts = taskKey[Map[Artifact, File]]("Packages all artifacts for publishing and maps the Artifact definition to the generated file.").withRank(CTask) + val packagedArtifacts = taskKey[Map[Artifact, HashedVirtualFileRef]]("Packages all artifacts for publishing and maps the Artifact definition to the generated file.").withRank(CTask) val publishMavenStyle = settingKey[Boolean]("Configures whether to generate and publish a pom (true) or Ivy file (false).").withRank(BSetting) val credentials = taskKey[Seq[Credentials]]("The credentials to use for updating and publishing.").withRank(BMinusTask) val allCredentials = taskKey[Seq[Credentials]]("Aggregated credentials across current and root subprojects. Do not rewire this task.").withRank(DTask) - val makePom = taskKey[File]("Generates a pom for publishing when publishing Maven-style.").withRank(BPlusTask) + val makePom = taskKey[HashedVirtualFileRef]("Generates a pom for publishing when publishing Maven-style.").withRank(BPlusTask) val deliver = taskKey[File]("Generates the Ivy file for publishing to a repository.").withRank(BTask) val deliverLocal = taskKey[File]("Generates the Ivy file for publishing to the local repository.").withRank(BTask) // makeIvyXml is currently identical to the confusingly-named "deliver", which may be deprecated in the future @@ -509,8 +532,10 @@ object Keys { val pomAllRepositories = settingKey[Boolean]("If true, includes repositories used in module configurations in the pom repositories section. If false, only the common repositories are included.").withRank(BMinusSetting) val moduleName = settingKey[String]("The name of the current module, used for dependency management.").withRank(BSetting) + val outputPath = settingKey[String]("Path of the output directory relative from the rootOutputDirectory.").withRank(DSetting) val version = settingKey[String]("The version/revision of the current module.").withRank(APlusSetting) val isSnapshot = settingKey[Boolean]("True if the version of the project is a snapshot version.").withRank(BPlusSetting) + val moduleIDStr = StringAttributeKey("moduleID") val moduleID = settingKey[ModuleID]("A dependency management descriptor. This is currently used for associating a ModuleID with a classpath entry.").withRank(BPlusSetting) val projectID = settingKey[ModuleID]("The dependency management descriptor for the current module.").withRank(BMinusSetting) val overrideBuildResolvers = settingKey[Boolean]("Whether or not all the build resolvers should be overridden with what's defined from the launcher.").withRank(BMinusSetting) @@ -550,7 +575,7 @@ object Keys { val managedDirectory = settingKey[File]("Directory to which managed dependencies are retrieved.").withRank(BSetting) val classpathTypes = settingKey[Set[String]]("Artifact types that are included on the classpath.").withRank(BSetting) val publishArtifact = settingKey[Boolean]("Enables (true) or disables (false) publishing an artifact.").withRank(AMinusSetting) - val packagedArtifact = taskKey[(Artifact, File)]("Generates a packaged artifact, returning the Artifact and the produced File.").withRank(CTask) + val packagedArtifact = taskKey[(Artifact, HashedVirtualFileRef)]("Generates a packaged artifact, returning the Artifact and the produced File.").withRank(CTask) val checksums = settingKey[Seq[String]]("The list of checksums to generate and to verify for dependencies.").withRank(BSetting) val forceUpdatePeriod = settingKey[Option[FiniteDuration]]("Duration after which to force a full update to occur").withRank(CSetting) val versionScheme = settingKey[Option[String]]("""Version scheme used for the subproject: Supported values are Some("early-semver"), Some("pvp"), and Some("semver-spec")""").withRank(BSetting) @@ -584,6 +609,7 @@ object Keys { val forcegc = settingKey[Boolean]("Enables (true) or disables (false) forcing garbage collection after task run when needed.").withRank(BMinusSetting) val minForcegcInterval = settingKey[Duration]("Minimal interval to check for forcing garbage collection.") val settingsData = std.FullInstance.settingsData + @cacheLevel(include = Array.empty) val streams = taskKey[TaskStreams]("Provides streams for logging and persisting data.").withRank(DTask) val taskDefinitionKey = Def.taskDefinitionKey val (executionRoots, dummyRoots) = Def.dummy[Seq[ScopedKey[_]]]("executionRoots", "The list of root tasks for this task execution. Roots are the top-level tasks that were directly requested to be run.") diff --git a/main/src/main/scala/sbt/Main.scala b/main/src/main/scala/sbt/Main.scala index 72d8ace35..780584abc 100644 --- a/main/src/main/scala/sbt/Main.scala +++ b/main/src/main/scala/sbt/Main.scala @@ -956,10 +956,10 @@ object BuiltinCommands { def doLoadProject(s0: State, action: LoadAction): State = { welcomeBanner(s0) checkSBTVersionChanged(s0) + RemoteCache.initializeRemoteCache(s0) val (s1, base) = Project.loadAction(SessionVar.clear(s0), action) IO.createDirectory(base) val s2 = if (s1 has Keys.stateCompilerCache) s1 else registerCompilerCache(s1) - val (eval, structure) = try Load.defaultLoad(s2, base, s2.log, Project.inPluginProject(s2), Project.extraBuilds(s2)) catch { @@ -979,6 +979,7 @@ object BuiltinCommands { st => setupGlobalFileTreeRepository(addCacheStoreFactoryFactory(st)) ) val s4 = s3.put(Keys.useLog4J.key, Project.extract(s3).get(Keys.useLog4J)) + RemoteCache.initializeRemoteCache(s4) addSuperShellParams(CheckBuildSources.init(LintUnused.lintUnusedFunc(s4))) } diff --git a/main/src/main/scala/sbt/ProjectExtra.scala b/main/src/main/scala/sbt/ProjectExtra.scala index a800e0c6f..723d11b9d 100755 --- a/main/src/main/scala/sbt/ProjectExtra.scala +++ b/main/src/main/scala/sbt/ProjectExtra.scala @@ -8,12 +8,14 @@ package sbt import java.io.File +import java.nio.file.{ Path => NioPath } import java.net.URI import java.util.Locale // import Project._ import Keys.{ stateBuildStructure, bspEnabled, + cacheStores, colorShellPrompt, commands, historyPath, @@ -22,6 +24,7 @@ import Keys.{ shellPrompt, templateResolverInfos, autoStartServer, + rootOutputDirectory, serverHost, serverIdleTimeout, serverLog, @@ -51,7 +54,7 @@ import sbt.internal.util.Types.const // , idFun } import sbt.internal.util.complete.DefaultParsers import sbt.internal.server.ServerHandler import sbt.librarymanagement.Configuration -import sbt.util.{ Show, Level } +import sbt.util.{ ActionCacheStore, Show, Level } import sjsonnew.JsonFormat import scala.annotation.targetName import scala.concurrent.{ Await, TimeoutException } @@ -319,6 +322,8 @@ trait ProjectExtra extends Scoped.Syntax: val connectionType: Option[ConnectionType] = get(serverConnectionType) val srvLogLevel: Option[Level.Value] = (ref / serverLog / logLevel).get(structure.data) val hs: Option[Seq[ServerHandler]] = get(ThisBuild / fullServerHandlers) + val caches: Option[Seq[ActionCacheStore]] = get(cacheStores) + val rod: Option[NioPath] = get(rootOutputDirectory) val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true)) val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged( s.definedCommands, @@ -344,6 +349,8 @@ trait ProjectExtra extends Scoped.Syntax: .setCond(colorShellPrompt.key, newPrompt) .setCond(BasicKeys.serverLogLevel, srvLogLevel) .setCond(fullServerHandlers.key, hs) + .setCond(cacheStores.key, caches) + .setCond(rootOutputDirectory.key, rod) s.copy( attributes = newAttrs, definedCommands = newDefinedCommands diff --git a/main/src/main/scala/sbt/RemoteCache.scala b/main/src/main/scala/sbt/RemoteCache.scala index ae7622be5..3044ae62d 100644 --- a/main/src/main/scala/sbt/RemoteCache.scala +++ b/main/src/main/scala/sbt/RemoteCache.scala @@ -9,7 +9,7 @@ package sbt package internal import java.io.File -import java.nio.file.Path +import java.nio.file.{ Files, Path } import org.apache.ivy.core.module.descriptor.{ DefaultArtifact, Artifact => IArtifact } import org.apache.ivy.core.report.DownloadStatus @@ -22,9 +22,16 @@ import sbt.ProjectExtra.* import sbt.ScopeFilter.Make._ import sbt.SlashSyntax0._ import sbt.coursierint.LMCoursier -import sbt.internal.inc.{ HashUtil, JarUtils } +import sbt.internal.inc.{ + CompileOutput, + FileAnalysisStore, + HashUtil, + JarUtils, + MappedFileConverter +} import sbt.internal.librarymanagement._ import sbt.internal.remotecache._ +import sbt.internal.inc.Analysis import sbt.io.IO import sbt.io.syntax._ import sbt.librarymanagement._ @@ -34,15 +41,89 @@ import sbt.nio.FileStamp import sbt.nio.Keys.{ inputFileStamps, outputFileStamps } import sbt.std.TaskExtra._ import sbt.util.InterfaceUtil.toOption -import sbt.util.Logger +import sbt.util.{ + ActionCacheStore, + AggregateActionCacheStore, + CacheImplicits, + DiskActionCacheStore, + InMemoryActionCacheStore, + Logger +} +import sjsonnew.JsonFormat +import xsbti.{ HashedVirtualFileRef, VirtualFileRef } +import xsbti.compile.{ AnalysisContents, CompileAnalysis, MiniSetup, MiniOptions } import scala.annotation.nowarn +import scala.collection.mutable object RemoteCache { final val cachedCompileClassifier = "cached-compile" final val cachedTestClassifier = "cached-test" final val commitLength = 10 + def cacheStore: ActionCacheStore = Def.cacheStore + + // TODO: cap with caffeine + private[sbt] val analysisStore: mutable.Map[HashedVirtualFileRef, CompileAnalysis] = + mutable.Map.empty + + // TODO: figure out a good timing to initialize cache + // currently this is called twice so metabuild can call compile with a minimal setting + private[sbt] def initializeRemoteCache(s: State): Unit = + val outDir = + s.get(BasicKeys.rootOutputDirectory).getOrElse((s.baseDir / "target" / "out").toPath()) + Def._outputDirectory = Some(outDir) + val caches = s.get(BasicKeys.cacheStores) + caches match + case Some(xs) => Def._cacheStore = AggregateActionCacheStore(xs) + case None => + val tempDiskCache = (s.baseDir / "target" / "bootcache").toPath() + Def._cacheStore = DiskActionCacheStore(tempDiskCache) + + private[sbt] def getCachedAnalysis(ref: String): CompileAnalysis = + getCachedAnalysis(CacheImplicits.strToHashedVirtualFileRef(ref)) + private[sbt] def getCachedAnalysis(ref: HashedVirtualFileRef): CompileAnalysis = + analysisStore.getOrElseUpdate( + ref, { + val vfs = cacheStore.getBlobs(ref :: Nil) + if vfs.nonEmpty then + val outputDirectory = Def.cacheConfiguration.outputDirectory + cacheStore.syncBlobs(vfs, outputDirectory).headOption match + case Some(file) => FileAnalysisStore.binary(file.toFile()).get.get.getAnalysis + case None => Analysis.empty + else Analysis.empty + } + ) + + private[sbt] val tempConverter: MappedFileConverter = MappedFileConverter.empty + private[sbt] def postAnalysis(analysis: CompileAnalysis): Option[HashedVirtualFileRef] = + IO.withTemporaryFile("analysis", ".tmp", true): file => + val output = CompileOutput.empty + val option = MiniOptions.of(Array(), Array(), Array()) + val setup = MiniSetup.of( + output, + option, + "", + xsbti.compile.CompileOrder.Mixed, + false, + Array() + ) + FileAnalysisStore.binary(file).set(AnalysisContents.create(analysis, setup)) + val vf = tempConverter.toVirtualFile(file.toPath) + val refs = cacheStore.putBlobs(vf :: Nil) + refs.headOption match + case Some(ref) => + analysisStore(ref) = analysis + Some(ref) + case None => None + + private[sbt] def artifactToStr(art: Artifact): String = { + import LibraryManagementCodec._ + import sjsonnew.support.scalajson.unsafe._ + val format: JsonFormat[Artifact] = summon[JsonFormat[Artifact]] + CompactPrinter(Converter.toJsonUnsafe(art)(format)) + } + def gitCommitId: String = scala.sys.process.Process("git rev-parse HEAD").!!.trim.take(commitLength) @@ -67,6 +148,17 @@ object RemoteCache { // base is used only to resolve relative paths, which should never happen IvyPaths(base.toString, localCacheDirectory.value.toString) }, + rootOutputDirectory := { + appConfiguration.value.baseDirectory + .toPath() + .resolve("target") + .resolve("out") + }, + cacheStores := { + List( + DiskActionCacheStore(localCacheDirectory.value.toPath()) + ) + }, ) lazy val projectSettings: Seq[Def.Setting[_]] = (Seq( @@ -117,17 +209,20 @@ object RemoteCache { remoteCachePom / pushRemoteCacheArtifact := true, remoteCachePom := { val s = streams.value + val converter = fileConverter.value val config = (remoteCachePom / makePomConfiguration).value val publisher = Keys.publisher.value publisher.makePomFile((pushRemoteCache / ivyModule).value, config, s.log) - config.file.get + converter.toVirtualFile(config.file.get.toPath) }, remoteCachePom / artifactPath := { Defaults.prefixArtifactPathSetting(makePom / artifact, "remote-cache").value }, remoteCachePom / makePomConfiguration := { + val converter = fileConverter.value val config = makePomConfiguration.value - config.withFile((remoteCachePom / artifactPath).value) + val out = converter.toPath((remoteCachePom / artifactPath).value) + config.withFile(out.toFile()) }, remoteCachePom / remoteCacheArtifact := { PomRemoteCacheArtifact((makePom / artifact).value, remoteCachePom) @@ -178,17 +273,20 @@ object RemoteCache { inTask(packageCache)( Seq( packageCache.in(Defaults.TaskZero) := { + val converter = fileConverter.value val original = packageBin.in(Defaults.TaskZero).value + val originalFile = converter.toPath(original) val artp = artifactPath.value + val artpFile = converter.toPath(artp) val af = compileAnalysisFile.value - IO.copyFile(original, artp) + IO.copyFile(originalFile.toFile(), artpFile.toFile()) // skip zip manipulation if the artp is a blank file - if (af.exists && artp.length() > 0) { - JarUtils.includeInJar(artp, Vector(af -> s"META-INF/inc_compile.zip")) + if (af.exists && artpFile.toFile().length() > 0) { + JarUtils.includeInJar(artpFile.toFile(), Vector(af -> s"META-INF/inc_compile.zip")) } val rf = getResourceFilePaths().value if (rf.exists) { - JarUtils.includeInJar(artp, Vector(rf -> s"META-INF/copy-resources.txt")) + JarUtils.includeInJar(artpFile.toFile(), Vector(rf -> s"META-INF/copy-resources.txt")) } // val testStream = (test / streams).?.value // testStream foreach { s => @@ -197,7 +295,7 @@ object RemoteCache { // JarUtils.includeInJar(artp, Vector(sf -> s"META-INF/succeeded_tests")) // } // } - artp + converter.toVirtualFile(artpFile) }, pushRemoteCacheArtifact := true, remoteCacheArtifact := cacheArtifactTask.value, @@ -238,12 +336,17 @@ object RemoteCache { combineHash(extractHash(inputs) ++ extractHash(cp) ++ extraInc) }, pushRemoteCacheConfiguration := { + val converter = fileConverter.value + val artifacts = (pushRemoteCacheConfiguration / packagedArtifacts).value.toVector.map { + case (a, vf) => + a -> converter.toPath(vf).toFile + } Classpaths.publishConfig( (pushRemoteCacheConfiguration / publishMavenStyle).value, Classpaths.deliverPattern(crossTarget.value), if (isSnapshot.value) "integration" else "release", ivyConfigurations.value.map(c => ConfigRef(c.name)).toVector, - (pushRemoteCacheConfiguration / packagedArtifacts).value.toVector, + artifacts, (pushRemoteCacheConfiguration / checksums).value.toVector, Classpaths.getPublishTo(pushRemoteCacheTo.value).name, ivyLoggingLevel.value, @@ -455,12 +558,12 @@ object RemoteCache { // } } - private def defaultArtifactTasks: Seq[TaskKey[File]] = + private def defaultArtifactTasks: Seq[TaskKey[HashedVirtualFileRef]] = Seq(Compile / packageCache, Test / packageCache) private def enabledOnly[A]( key: SettingKey[A], - pkgTasks: Seq[TaskKey[File]] + pkgTasks: Seq[TaskKey[HashedVirtualFileRef]] ): Def.Initialize[Seq[A]] = (Classpaths.forallIn(key, pkgTasks) zipWith Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) => diff --git a/main/src/main/scala/sbt/ScriptedPlugin.scala b/main/src/main/scala/sbt/ScriptedPlugin.scala index b24bac1a7..6db60e4ce 100644 --- a/main/src/main/scala/sbt/ScriptedPlugin.scala +++ b/main/src/main/scala/sbt/ScriptedPlugin.scala @@ -198,6 +198,13 @@ object ScriptedPlugin extends AutoPlugin { } private[this] def getJars(config: Configuration): Initialize[Task[PathFinder]] = Def.task { - PathFinder(Classpaths.managedJars(config, classpathTypes.value, Keys.update.value).map(_.data)) + val converter = Keys.fileConverter.value + PathFinder( + Classpaths + .managedJars(config, classpathTypes.value, Keys.update.value, converter) + .map(_.data) + .map(converter.toPath) + .map(_.toFile()) + ) } } diff --git a/main/src/main/scala/sbt/internal/APIMappings.scala b/main/src/main/scala/sbt/internal/APIMappings.scala index e07dda236..f33b9c7ff 100644 --- a/main/src/main/scala/sbt/internal/APIMappings.scala +++ b/main/src/main/scala/sbt/internal/APIMappings.scala @@ -9,34 +9,45 @@ package sbt package internal import java.io.File -import java.net.{ MalformedURLException, URL } +import java.net.{ MalformedURLException, URI, URL } import sbt.internal.librarymanagement.mavenint.SbtPomExtraProperties import sbt.librarymanagement.ModuleID import sbt.internal.util.Attributed import sbt.util.Logger +import xsbti.HashedVirtualFileRef private[sbt] object APIMappings { - def extract(cp: Seq[Attributed[File]], log: Logger): Seq[(File, URL)] = + def extract( + cp: Seq[Attributed[HashedVirtualFileRef]], + log: Logger + ): Seq[(HashedVirtualFileRef, URL)] = cp.flatMap(entry => extractFromEntry(entry, log)) - def extractFromEntry(entry: Attributed[File], log: Logger): Option[(File, URL)] = - entry.get(Keys.entryApiURL) match { - case Some(u) => Some((entry.data, u)) + def extractFromEntry( + entry: Attributed[HashedVirtualFileRef], + log: Logger + ): Option[(HashedVirtualFileRef, URL)] = + entry.get(Keys.entryApiURL) match + case Some(u) => Some((entry.data, URI(u).toURL)) case None => - entry.get(Keys.moduleID.key).flatMap { mid => + entry.get(Keys.moduleIDStr).flatMap { str => + val mid = Classpaths.moduleIdJsonKeyFormat.read(str) extractFromID(entry.data, mid, log) } - } - private[this] def extractFromID(entry: File, mid: ModuleID, log: Logger): Option[(File, URL)] = - for { + private[this] def extractFromID( + entry: HashedVirtualFileRef, + mid: ModuleID, + log: Logger + ): Option[(HashedVirtualFileRef, URL)] = + for urlString <- mid.extraAttributes.get(SbtPomExtraProperties.POM_API_KEY) u <- parseURL(urlString, entry, log) - } yield (entry, u) + yield (entry, u) - private[this] def parseURL(s: String, forEntry: File, log: Logger): Option[URL] = + private[this] def parseURL(s: String, forEntry: HashedVirtualFileRef, log: Logger): Option[URL] = try Some(new URL(s)) catch { case e: MalformedURLException => @@ -44,8 +55,8 @@ private[sbt] object APIMappings { None } - def store[T](attr: Attributed[T], entryAPI: Option[URL]): Attributed[T] = entryAPI match { - case None => attr - case Some(u) => attr.put(Keys.entryApiURL, u) - } + def store[A](attr: Attributed[A], entryAPI: Option[URL]): Attributed[A] = + entryAPI match + case None => attr + case Some(u) => attr.put(Keys.entryApiURL, u.toString) } diff --git a/main/src/main/scala/sbt/internal/BuildDef.scala b/main/src/main/scala/sbt/internal/BuildDef.scala index 6bef7f9ff..470285d62 100644 --- a/main/src/main/scala/sbt/internal/BuildDef.scala +++ b/main/src/main/scala/sbt/internal/BuildDef.scala @@ -15,6 +15,7 @@ import Def.Setting import sbt.io.Hash import sbt.internal.util.Attributed import sbt.internal.inc.ReflectUtilities +import xsbti.HashedVirtualFileRef trait BuildDef { def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects @@ -73,6 +74,9 @@ private[sbt] object BuildDef { ) def analyzed(in: Seq[Attributed[_]]): Seq[xsbti.compile.CompileAnalysis] = - in.flatMap { _.metadata.get(Keys.analysis) } - + in.flatMap: a => + a.metadata + .get(Keys.analysis) + .map: str => + RemoteCache.getCachedAnalysis(str) } diff --git a/main/src/main/scala/sbt/internal/BuildStructure.scala b/main/src/main/scala/sbt/internal/BuildStructure.scala index 6aa878cb6..ef23fb07e 100644 --- a/main/src/main/scala/sbt/internal/BuildStructure.scala +++ b/main/src/main/scala/sbt/internal/BuildStructure.scala @@ -23,6 +23,7 @@ import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attribute import sbt.internal.util.Attributed.data import sbt.util.Logger import scala.annotation.nowarn +import xsbti.FileConverter final class BuildStructure( val units: Map[URI, LoadedBuildUnit], @@ -109,9 +110,10 @@ final class LoadedBuildUnit( * It includes build definition and plugin classes and classes for .sbt file statements and expressions. */ def classpath: Seq[Path] = + val converter = unit.converter unit.definitions.target.map( _.toPath() - ) ++ unit.plugins.classpath.map(_.toPath()) ++ unit.definitions.dslDefinitions.classpath + ) ++ unit.plugins.classpath.map(converter.toPath) ++ unit.definitions.dslDefinitions.classpath /** * The class loader to use for this build unit's publicly visible code. @@ -239,7 +241,7 @@ final class LoadedPlugins( val loader: ClassLoader, val detected: DetectedPlugins ) { - def fullClasspath: Seq[Attributed[File]] = pluginData.classpath + def fullClasspath: Def.Classpath = pluginData.classpath def classpath = data(fullClasspath) } @@ -253,7 +255,8 @@ final class BuildUnit( val uri: URI, val localBase: File, val definitions: LoadedDefinitions, - val plugins: LoadedPlugins + val plugins: LoadedPlugins, + val converter: FileConverter, ) { override def toString = if (uri.getScheme == "file") localBase.toString diff --git a/main/src/main/scala/sbt/internal/ClassLoaders.scala b/main/src/main/scala/sbt/internal/ClassLoaders.scala index 0dc63bbf7..22c824c8e 100644 --- a/main/src/main/scala/sbt/internal/ClassLoaders.scala +++ b/main/src/main/scala/sbt/internal/ClassLoaders.scala @@ -25,6 +25,7 @@ import sbt.nio.FileStamp import sbt.nio.FileStamp.LastModified import sbt.nio.Keys._ import sbt.util.Logger +import xsbti.HashedVirtualFileRef private[sbt] object ClassLoaders { private implicit class SeqFileOps(val files: Seq[File]) extends AnyVal { @@ -36,14 +37,21 @@ private[sbt] object ClassLoaders { */ private[sbt] def testTask: Def.Initialize[Task[ClassLoader]] = Def.task { val si = scalaInstance.value - val cp = fullClasspath.value.map(_.data) + val converter = fileConverter.value + val cp = fullClasspath.value + .map(_.data) + .map(converter.toPath) + .map(_.toFile) val dependencyStamps = modifiedTimes((dependencyClasspathFiles / outputFileStamps).value).toMap def getLm(f: File): Long = dependencyStamps.getOrElse(f, IO.getModifiedTimeOrZero(f)) val rawCP = cp.map(f => f -> getLm(f)) val fullCP = - if (si.isManagedVersion) rawCP + if si.isManagedVersion then rawCP else si.libraryJars.map(j => j -> IO.getModifiedTimeOrZero(j)).toSeq ++ rawCP - val exclude = dependencyJars(exportedProducts).value.toSet ++ si.libraryJars + val exclude: Set[File] = dependencyJars(exportedProducts).value + .map(converter.toPath) + .map(_.toFile) + .toSet ++ si.libraryJars val logger = state.value.globalLogging.full val close = closeClassLoaders.value val allowZombies = allowZombieClassLoaders.value @@ -51,7 +59,11 @@ private[sbt] object ClassLoaders { strategy = classLoaderLayeringStrategy.value, si = si, fullCP = fullCP, - allDependenciesSet = dependencyJars(dependencyClasspath).value.filterNot(exclude).toSet, + allDependenciesSet = dependencyJars(dependencyClasspath).value + .map(converter.toPath) + .map(_.toFile) + .filterNot(exclude) + .toSet, cache = extendedClassLoaderCache.value, resources = ClasspathUtil.createClasspathResources(fullCP.map(_._1.toPath), si), tmp = IO.createUniqueDirectory(taskTemporaryDirectory.value), @@ -71,6 +83,7 @@ private[sbt] object ClassLoaders { val opts = forkOptions.value new ForkRun(opts) else { + val converter = fileConverter.value val resolvedScope = resolvedScoped.value.scope val instance = scalaInstance.value val s = streams.value @@ -91,20 +104,25 @@ private[sbt] object ClassLoaders { ) s.log.warn(s"$showJavaOptions will be ignored, $showFork is set to false") - val exclude = dependencyJars(exportedProducts).value.toSet ++ instance.libraryJars - val allDeps = dependencyJars(dependencyClasspath).value.filterNot(exclude) + val exclude = dependencyJars(exportedProducts).value + .map(converter.toPath) + .map(_.toFile) + .toSet ++ instance.libraryJars + val allDeps = dependencyJars(dependencyClasspath).value + .map(converter.toPath) + .map(_.toFile) + .filterNot(exclude) val logger = state.value.globalLogging.full val allowZombies = allowZombieClassLoaders.value val close = closeClassLoaders.value val newLoader = - (classpath: Seq[File]) => { - val mappings = classpath.map(f => f.getName -> f).toMap - val cp = classpath.map(_.toPath) + (cp: Seq[Path]) => { + val mappings = cp.map(_.toFile()).map(f => f.getName -> f).toMap val transformedDependencies = allDeps.map(f => mappings.getOrElse(f.getName, f)) buildLayers( strategy = classLoaderLayeringStrategy.value: @sbtUnchecked, si = instance, - fullCP = classpath.map(f => f -> IO.getModifiedTimeOrZero(f)), + fullCP = cp.map(_.toFile()).map(f => f -> IO.getModifiedTimeOrZero(f)), allDependenciesSet = transformedDependencies.toSet, cache = extendedClassLoaderCache.value: @sbtUnchecked, resources = ClasspathUtil.createClasspathResources(cp, instance), @@ -218,8 +236,9 @@ private[sbt] object ClassLoaders { } private def dependencyJars( - key: sbt.TaskKey[Seq[Attributed[File]]] - ): Def.Initialize[Task[Seq[File]]] = Def.task(data(key.value).filter(_.getName.endsWith(".jar"))) + key: sbt.TaskKey[Seq[Attributed[HashedVirtualFileRef]]] + ): Def.Initialize[Task[Seq[HashedVirtualFileRef]]] = + Def.task(data(key.value).filter(_.id.endsWith(".jar"))) private[this] def modifiedTimes(stamps: Seq[(Path, FileStamp)]): Seq[(File, Long)] = stamps.map { case (p, LastModified(lm)) => p.toFile -> lm diff --git a/main/src/main/scala/sbt/internal/ClasspathImpl.scala b/main/src/main/scala/sbt/internal/ClasspathImpl.scala index cf4e769cf..36bfd5537 100644 --- a/main/src/main/scala/sbt/internal/ClasspathImpl.scala +++ b/main/src/main/scala/sbt/internal/ClasspathImpl.scala @@ -23,30 +23,28 @@ import sbt.librarymanagement.Configurations.names import sbt.std.TaskExtra._ import sbt.util._ import scala.collection.JavaConverters._ +import xsbti.{ HashedVirtualFileRef, VirtualFileRef } import xsbti.compile.CompileAnalysis private[sbt] object ClasspathImpl { // Since we can't predict the path for pickleProduct, // we can't reduce the track level. - def exportedPicklesTask: Initialize[Task[VirtualClasspath]] = + def exportedPicklesTask: Initialize[Task[Classpath]] = Def.task { // conditional task: do not refactor - if (exportPipelining.value) { + if exportPipelining.value then val module = projectID.value val config = configuration.value val products = pickleProducts.value val analysis = compileEarly.value val xs = products map { _ -> analysis } - for { (f, analysis) <- xs } yield APIMappings + for (f, analysis) <- xs + yield APIMappings .store(analyzed(f, analysis), apiURL.value) - .put(moduleID.key, module) - .put(configuration.key, config) - } else { - val c = fileConverter.value - val ps = exportedProducts.value - ps.map(attr => attr.map(x => c.toVirtualFile(x.toPath))) - } + .put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module)) + .put(Keys.configurationStr, config.name) + else exportedProducts.value } def trackedExportedProducts(track: TrackLevel): Initialize[Task[Classpath]] = @@ -55,11 +53,12 @@ private[sbt] object ClasspathImpl { val art = (packageBin / artifact).value val module = projectID.value val config = configuration.value - for { (f, analysis) <- trackedExportedProductsImplTask(track).value } yield APIMappings - .store(analyzed(f, analysis), apiURL.value) - .put(artifact.key, art) - .put(moduleID.key, module) - .put(configuration.key, config) + for (f, analysis) <- trackedExportedProductsImplTask(track).value + yield APIMappings + .store(analyzed[HashedVirtualFileRef](f, analysis), apiURL.value) + .put(Keys.artifactStr, RemoteCache.artifactToStr(art)) + .put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module)) + .put(Keys.configurationStr, config.name) } def trackedExportedJarProducts(track: TrackLevel): Initialize[Task[Classpath]] = @@ -68,16 +67,18 @@ private[sbt] object ClasspathImpl { val art = (packageBin / artifact).value val module = projectID.value val config = configuration.value - for { (f, analysis) <- trackedJarProductsImplTask(track).value } yield APIMappings + val converter = fileConverter.value + for (f, analysis) <- trackedJarProductsImplTask(track).value + yield APIMappings .store(analyzed(f, analysis), apiURL.value) - .put(artifact.key, art) - .put(moduleID.key, module) - .put(configuration.key, config) + .put(Keys.artifactStr, RemoteCache.artifactToStr(art)) + .put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module)) + .put(Keys.configurationStr, config.name) } private[this] def trackedExportedProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] = Def.taskIf { if { val _ = (packageBin / dynamicDependency).value @@ -88,7 +89,7 @@ private[sbt] object ClasspathImpl { private[this] def trackedNonJarProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] = (Def .task { val dirs = productDirectories.value @@ -98,41 +99,55 @@ private[sbt] object ClasspathImpl { .flatMapTask { case (TrackLevel.TrackAlways, _, _) => Def.task { - products.value map { (_, compile.value) } + val converter = fileConverter.value + val a = compile.value + products.value + .map { x => converter.toVirtualFile(x.toPath()) } + .map { (_, a) } } case (TrackLevel.TrackIfMissing, dirs, view) if view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).isEmpty => Def.task { - products.value map { (_, compile.value) } + val converter = fileConverter.value + val a = compile.value + products.value + .map { x => converter.toVirtualFile(x.toPath()) } + .map { (_, a) } } case (_, dirs, _) => Def.task { + val converter = fileConverter.value val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty) - dirs.map(_ -> analysis) + dirs + .map { x => converter.toVirtualFile(x.toPath()) } + .map(_ -> analysis) } } private[this] def trackedJarProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(File, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] = (Def .task { - val jar = (packageBin / artifactPath).value - (TrackLevel.intersection(track, exportToInternal.value), jar) + val converter = fileConverter.value + val vf = (packageBin / artifactPath).value + val jar = converter.toPath(vf) + (TrackLevel.intersection(track, exportToInternal.value), vf, jar) }) .flatMapTask { - case (TrackLevel.TrackAlways, _) => + case (TrackLevel.TrackAlways, _, _) => Def.task { Seq((packageBin.value, compile.value)) } - case (TrackLevel.TrackIfMissing, jar) if !jar.exists => + case (TrackLevel.TrackIfMissing, _, jar) if !jar.toFile().exists => Def.task { Seq((packageBin.value, compile.value)) } - case (_, jar) => + case (_, vf, _) => Def.task { + val converter = fileConverter.value val analysisOpt = previousCompile.value.analysis.toOption - Seq(jar) map { x => + Seq(vf).map(converter.toPath).map(converter.toVirtualFile).map { x => ( x, if (analysisOpt.isDefined) analysisOpt.get @@ -186,7 +201,7 @@ private[sbt] object ClasspathImpl { ) } - def internalDependencyPicklePathTask: Initialize[Task[VirtualClasspath]] = { + def internalDependencyPicklePathTask: Initialize[Task[Classpath]] = { def implTask( projectRef: ProjectRef, conf: Configuration, @@ -195,8 +210,8 @@ private[sbt] object ClasspathImpl { deps: BuildDependencies, track: TrackLevel, log: Logger - ): Initialize[Task[VirtualClasspath]] = - Def.value[Task[VirtualClasspath]] { + ): Initialize[Task[Classpath]] = + Def.value[Task[Classpath]] { interDependencies(projectRef, deps, conf, self, data, track, false, log)( exportedPickles, exportedPickles, @@ -243,11 +258,20 @@ private[sbt] object ClasspathImpl { log: Logger ): Initialize[Task[Classpath]] = Def.value[Task[Classpath]] { - interDependencies(projectRef, deps, conf, self, data, track, false, log)( + interDependencies[Attributed[HashedVirtualFileRef]]( + projectRef, + deps, + conf, + self, + data, + track, + false, + log, + )( exportedProductJarsNoTracking, exportedProductJarsIfMissing, exportedProductJars - ) + ): Task[Classpath] } def unmanagedDependenciesTask: Initialize[Task[Classpath]] = @@ -328,7 +352,12 @@ private[sbt] object ClasspathImpl { } def analyzed[A](data: A, analysis: CompileAnalysis) = - Attributed.blank(data).put(Keys.analysis, analysis) + RemoteCache.postAnalysis(analysis) match + case Some(ref) => + Attributed + .blank(data) + .put(Keys.analysis, CacheImplicits.hashedVirtualFileRefToStr(ref)) + case None => Attributed.blank(data) def interSort( projectRef: ProjectRef, diff --git a/main/src/main/scala/sbt/internal/CompileInputs2.scala b/main/src/main/scala/sbt/internal/CompileInputs2.scala new file mode 100644 index 000000000..7d12771ef --- /dev/null +++ b/main/src/main/scala/sbt/internal/CompileInputs2.scala @@ -0,0 +1,38 @@ +package sbt.internal + +import scala.reflect.ClassTag +import sjsonnew.* +import xsbti.HashedVirtualFileRef + +// CompileOption has the list of sources etc +case class CompileInputs2( + classpath: Vector[HashedVirtualFileRef], + sources: Vector[HashedVirtualFileRef], + scalacOptions: Vector[String], + javacOptions: Vector[String], +) + +object CompileInputs2: + import sbt.util.CacheImplicits.given + + given IsoLList.Aux[ + CompileInputs2, + Vector[HashedVirtualFileRef] :*: Vector[HashedVirtualFileRef] :*: Vector[String] :*: + Vector[String] :*: LNil + ] = + LList.iso( + { (v: CompileInputs2) => + ("classpath", v.classpath) :*: + ("sources", v.sources) :*: + ("scalacOptions", v.scalacOptions) :*: + ("javacOptions", v.javacOptions) :*: + LNil + }, + { + (in: Vector[HashedVirtualFileRef] :*: Vector[HashedVirtualFileRef] :*: Vector[String] :*: + Vector[String] :*: LNil) => + CompileInputs2(in.head, in.tail.head, in.tail.tail.head, in.tail.tail.tail.head) + } + ) + given JsonFormat[CompileInputs2] = summon +end CompileInputs2 diff --git a/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala b/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala index 6591bba1c..698f2c00a 100644 --- a/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala +++ b/main/src/main/scala/sbt/internal/DefaultBackgroundJobService.scala @@ -30,6 +30,8 @@ import scala.concurrent.duration._ import scala.util.Try import sbt.util.LoggerContext import java.util.concurrent.TimeoutException +import xsbti.FileConverter +import xsbti.HashedVirtualFileRef /** * Interface between sbt and a thing running in the background. @@ -232,18 +234,21 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe products: Classpath, full: Classpath, workingDirectory: File, - hashFileContents: Boolean + hashFileContents: Boolean, + converter: FileConverter, ): Classpath = { - def syncTo(dir: File)(source0: Attributed[File]): Attributed[File] = { - val source = source0.data + def syncTo( + dir: File + )(source0: Attributed[HashedVirtualFileRef]): Attributed[HashedVirtualFileRef] = { + val source1 = source0.data + val source = converter.toPath(source1).toFile() val hash8 = Hash.toHex(Hash(source.toString)).take(8) val id: File => String = if (hashFileContents) hash else lastModified val dest = dir / hash8 / id(source) / source.getName - if (!dest.exists) { + if !dest.exists then if (source.isDirectory) IO.copyDirectory(source, dest) else IO.copyFile(source, dest) - } - Attributed.blank(dest) + Attributed.blank(converter.toVirtualFile(dest.toPath)) } val xs = (products.toVector map { syncTo(workingDirectory / "target") }) ++ ((full diff products) map { syncTo(serviceTempDir / "target") }) @@ -298,9 +303,10 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe override def copyClasspath( products: Classpath, full: Classpath, - workingDirectory: File + workingDirectory: File, + converter: FileConverter, ): Classpath = - copyClasspath(products, full, workingDirectory, hashFileContents = true) + copyClasspath(products, full, workingDirectory, hashFileContents = true, converter) } private[sbt] object BackgroundThreadPool { diff --git a/main/src/main/scala/sbt/internal/GlobalPlugin.scala b/main/src/main/scala/sbt/internal/GlobalPlugin.scala index e4b033e91..dae07423d 100644 --- a/main/src/main/scala/sbt/internal/GlobalPlugin.scala +++ b/main/src/main/scala/sbt/internal/GlobalPlugin.scala @@ -46,7 +46,7 @@ object GlobalPlugin { ) private[this] def injectInternalClasspath( config: Configuration, - cp: Seq[Attributed[File]] + cp: Def.Classpath, ): Setting[_] = (config / internalDependencyClasspath) ~= { prev => (prev ++ cp).distinct diff --git a/main/src/main/scala/sbt/internal/IvyConsole.scala b/main/src/main/scala/sbt/internal/IvyConsole.scala index dad54788a..4761bc3b2 100644 --- a/main/src/main/scala/sbt/internal/IvyConsole.scala +++ b/main/src/main/scala/sbt/internal/IvyConsole.scala @@ -51,7 +51,11 @@ object IvyConsole { val depSettings: Seq[Setting[_]] = Seq( libraryDependencies ++= managed.reverse, resolvers ++= repos.reverse.toVector, - Compile / unmanagedJars ++= Attributed blankSeq unmanaged.reverse, + Compile / unmanagedJars ++= { + val converter = fileConverter.value + val u = unmanaged.reverse.map(_.toPath).map(converter.toVirtualFile) + Attributed.blankSeq(u) + }, Global / logLevel := Level.Warn, Global / showSuccess := false ) diff --git a/main/src/main/scala/sbt/internal/Load.scala b/main/src/main/scala/sbt/internal/Load.scala index e44079a04..873899eb6 100755 --- a/main/src/main/scala/sbt/internal/Load.scala +++ b/main/src/main/scala/sbt/internal/Load.scala @@ -28,7 +28,7 @@ import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyDependencyResoluti import sbt.librarymanagement.{ Configuration, Configurations, Resolver } import sbt.nio.Settings import sbt.util.{ Logger, Show } -import xsbti.VirtualFile +import xsbti.{ HashedVirtualFileRef, VirtualFile } import xsbti.compile.{ ClasspathOptionsUtil, Compilers } import java.io.File import java.net.URI @@ -71,15 +71,20 @@ private[sbt] object Load { val launcher = scalaProvider.launcher val stagingDirectory = getStagingDirectory(state, globalBase).getCanonicalFile val javaHome = Paths.get(sys.props("java.home")) + val out = baseDirectory.toPath.resolve("target").resolve("out") val rootPaths = Map( + "OUT" -> out, "BASE" -> baseDirectory.toPath, "SBT_BOOT" -> launcher.bootDirectory.toPath, "IVY_HOME" -> launcher.ivyHome.toPath, "JAVA_HOME" -> javaHome, ) val loader = getClass.getClassLoader - val classpath = - Attributed.blankSeq(provider.mainClasspath.toIndexedSeq ++ scalaProvider.jars.toIndexedSeq) + val converter = MappedFileConverter(rootPaths, false) + val cp0 = provider.mainClasspath.toIndexedSeq ++ scalaProvider.jars.toIndexedSeq + val classpath = Attributed.blankSeq( + cp0.map(_.toPath).map(p => converter.toVirtualFile(p): HashedVirtualFileRef) + ) val ivyConfiguration = InlineIvyConfiguration() .withPaths( @@ -127,7 +132,7 @@ private[sbt] object Load { inject, None, Nil, - converter = MappedFileConverter(rootPaths, false), + converter = converter, log ) } @@ -172,10 +177,11 @@ private[sbt] object Load { def buildGlobalSettings( base: File, files: Seq[VirtualFile], - config: LoadBuildConfiguration + config: LoadBuildConfiguration, ): ClassLoader => Seq[Setting[_]] = { + val converter = config.converter val eval = mkEval( - classpath = data(config.globalPluginClasspath).map(_.toPath()), + classpath = data(config.globalPluginClasspath).map(converter.toPath), base = base, options = defaultEvalOptions, ) @@ -443,9 +449,10 @@ private[sbt] object Load { } def mkEval(unit: BuildUnit): Eval = { + val converter = unit.converter val defs = unit.definitions mkEval( - (defs.target ++ unit.plugins.classpath).map(_.toPath()), + (defs.target).map(_.toPath) ++ unit.plugins.classpath.map(converter.toPath), defs.base, unit.plugins.pluginData.scalacOptions, ) @@ -541,7 +548,7 @@ private[sbt] object Load { } def addOverrides(unit: BuildUnit, loaders: BuildLoader): BuildLoader = - loaders updatePluginManagement PluginManagement.extractOverrides(unit.plugins.fullClasspath) + loaders.updatePluginManagement(PluginManagement.extractOverrides(unit.plugins.fullClasspath)) def addResolvers(unit: BuildUnit, isRoot: Boolean, loaders: BuildLoader): BuildLoader = unit.definitions.builds.flatMap(_.buildLoaders).toList match { @@ -740,6 +747,7 @@ private[sbt] object Load { val buildLevelExtraProjects = plugs.detected.autoPlugins flatMap { d => d.value.extraProjects map { _.setProjectOrigin(ProjectOrigin.ExtraProject) } } + val converter = config.converter // NOTE - because we create an eval here, we need a clean-eval later for this URI. lazy val eval = timed("Load.loadUnit: mkEval", log) { @@ -752,7 +760,7 @@ private[sbt] object Load { // new BuildServerEvalReporter(buildTarget, new ConsoleReporter(settings)) // } mkEval( - classpath = plugs.classpath.map(_.toPath()), + classpath = plugs.classpath.map(converter.toPath), defDir, plugs.pluginData.scalacOptions, mkReporter, @@ -834,7 +842,7 @@ private[sbt] object Load { plugs.detected.builds.names, valDefinitions ) - new BuildUnit(uri, normBase, loadedDefs, plugs) + new BuildUnit(uri, normBase, loadedDefs, plugs, converter) } private[this] def autoID( @@ -1266,11 +1274,10 @@ private[sbt] object Load { ) } - def globalPluginClasspath(globalPlugin: Option[GlobalPlugin]): Seq[Attributed[File]] = - globalPlugin match { + def globalPluginClasspath(globalPlugin: Option[GlobalPlugin]): Def.Classpath = + globalPlugin match case Some(cp) => cp.data.fullClasspath case None => Nil - } /** These are the settings defined when loading a project "meta" build. */ @nowarn @@ -1287,6 +1294,7 @@ private[sbt] object Load { val managedSrcDirs = (Configurations.Compile / managedSourceDirectories).value val managedSrcs = (Configurations.Compile / managedSources).value val buildTarget = (Configurations.Compile / bspTargetIdentifier).value + val converter = fileConverter.value PluginData( removeEntries(cp, prod), prod, @@ -1297,7 +1305,8 @@ private[sbt] object Load { unmanagedSrcs, managedSrcDirs, managedSrcs, - Some(buildTarget) + Some(buildTarget), + converter, ) }, scalacOptions += "-Wconf:cat=unused-nowarn:s", @@ -1306,14 +1315,13 @@ private[sbt] object Load { ) private[this] def removeEntries( - cp: Seq[Attributed[File]], - remove: Seq[Attributed[File]] - ): Seq[Attributed[File]] = { + cp: Def.Classpath, + remove: Def.Classpath + ): Def.Classpath = val files = data(remove).toSet cp filter { f => !files.contains(f.data) } - } def enableSbtPlugin(config: LoadBuildConfiguration): LoadBuildConfiguration = config.copy( @@ -1353,7 +1361,19 @@ private[sbt] object Load { loadPluginDefinition( dir, config, - PluginData(config.globalPluginClasspath, Nil, None, None, Nil, Nil, Nil, Nil, Nil, None) + PluginData( + config.globalPluginClasspath, + Nil, + None, + None, + Nil, + Nil, + Nil, + Nil, + Nil, + None, + config.converter, + ) ) def buildPlugins(dir: File, s: State, config: LoadBuildConfiguration): LoadedPlugins = @@ -1392,11 +1412,11 @@ private[sbt] object Load { */ def buildPluginClasspath( config: LoadBuildConfiguration, - depcp: Seq[Attributed[File]] - ): Def.Classpath = { - if (depcp.isEmpty) config.classpath + depcp: Def.Classpath, + ): Def.Classpath = + if depcp.isEmpty + then config.classpath else (depcp ++ config.classpath).distinct - } /** * Creates a classloader with a hierarchical structure, where the parent @@ -1412,22 +1432,27 @@ private[sbt] object Load { config: LoadBuildConfiguration, dependencyClasspath: Def.Classpath, definitionClasspath: Def.Classpath - ): ClassLoader = { + ): ClassLoader = val manager = config.pluginManagement - val parentLoader: ClassLoader = { - if (dependencyClasspath.isEmpty) manager.initialLoader - else { + val converter = config.converter + val parentLoader: ClassLoader = + if dependencyClasspath.isEmpty then manager.initialLoader + else // Load only the dependency classpath for the common plugin classloader val loader = manager.loader - loader.add(sbt.io.Path.toURLs(data(dependencyClasspath))) + loader.add( + sbt.io.Path.toURLs( + data(dependencyClasspath) + .map(converter.toPath) + .map(_.toFile()) + ) + ) loader - } - } - // Load the definition classpath separately to avoid conflicts, see #511. - if (definitionClasspath.isEmpty) parentLoader - else ClasspathUtil.toLoader(data(definitionClasspath).map(_.toPath), parentLoader) - } + if definitionClasspath.isEmpty then parentLoader + else + val cp = data(definitionClasspath).map(converter.toPath) + ClasspathUtil.toLoader(cp, parentLoader) def buildPluginDefinition(dir: File, s: State, config: LoadBuildConfiguration): PluginData = { val (eval, pluginDef) = apply(dir, s, config) @@ -1554,9 +1579,10 @@ final case class LoadBuildConfiguration( Nil, Nil, Nil, - None + None, + converter, ) - case None => PluginData(globalPluginClasspath) + case None => PluginData(globalPluginClasspath, converter) } val baseDir = globalPlugin match { case Some(x) => x.base diff --git a/main/src/main/scala/sbt/internal/PluginDiscovery.scala b/main/src/main/scala/sbt/internal/PluginDiscovery.scala index d81563f64..ba6133047 100644 --- a/main/src/main/scala/sbt/internal/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/internal/PluginDiscovery.scala @@ -13,13 +13,15 @@ import java.io.File import java.net.URL import Attributed.data import sbt.internal.BuildDef.analyzed +import xsbti.FileConverter import xsbt.api.{ Discovered, Discovery } import xsbti.compile.CompileAnalysis import sbt.internal.inc.ModuleUtilities import sbt.io.IO +import scala.reflect.ClassTag -object PluginDiscovery { +object PluginDiscovery: /** * Relative paths of resources that list top-level modules that are available. @@ -39,8 +41,8 @@ object PluginDiscovery { /** Discovers and loads the sbt-plugin-related top-level modules from the classpath and source analysis in `data` and using the provided class `loader`. */ def discoverAll(data: PluginData, loader: ClassLoader): DetectedPlugins = { - def discover[T](resource: String)(implicit manifest: Manifest[T]) = - binarySourceModules[T](data, loader, resource) + def discover[A1: ClassTag](resource: String) = + binarySourceModules[A1](data, loader, resource) import Paths._ // TODO - Fix this once we can autodetect AutoPlugins defined by sbt itself. val defaultAutoPlugins = Seq( @@ -97,13 +99,14 @@ object PluginDiscovery { * available as analyzed source and extending from any of `subclasses` as per [[sourceModuleNames]]. */ def binarySourceModuleNames( - classpath: Seq[Attributed[File]], + classpath: Def.Classpath, + converter: FileConverter, loader: ClassLoader, resourceName: String, subclasses: String* ): Seq[String] = ( - binaryModuleNames(data(classpath), loader, resourceName) ++ + binaryModuleNames(classpath, converter, loader, resourceName) ++ (analyzed(classpath) flatMap (a => sourceModuleNames(a, subclasses: _*))) ).distinct @@ -125,42 +128,52 @@ object PluginDiscovery { * doesn't bring in any resources outside of the intended `classpath`, such as from parent loaders. */ def binaryModuleNames( - classpath: Seq[File], + classpath: Def.Classpath, + converter: FileConverter, loader: ClassLoader, resourceName: String - ): Seq[String] = { + ): Seq[String] = import collection.JavaConverters._ - loader.getResources(resourceName).asScala.toSeq.filter(onClasspath(classpath)) flatMap { u => + loader + .getResources(resourceName) + .asScala + .toSeq + .filter(onClasspath(classpath, converter)) flatMap { u => IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty) } - } /** Returns `true` if `url` is an entry in `classpath`. */ - def onClasspath(classpath: Seq[File])(url: URL): Boolean = - IO.urlAsFile(url) exists (classpath.contains _) + def onClasspath(classpath: Def.Classpath, converter: FileConverter)(url: URL): Boolean = + val cpFiles = classpath.map(_.data).map(converter.toPath).map(_.toFile) + IO.urlAsFile(url) exists (cpFiles.contains _) - private[sbt] def binarySourceModules[T]( + private[sbt] def binarySourceModules[A: ClassTag]( data: PluginData, loader: ClassLoader, resourceName: String - )(implicit classTag: reflect.ClassTag[T]): DetectedModules[T] = { + ): DetectedModules[A] = val classpath = data.classpath + val classTag = summon[ClassTag[A]] val namesAndValues = - if (classpath.isEmpty) Nil - else { + if classpath.isEmpty then Nil + else val names = - binarySourceModuleNames(classpath, loader, resourceName, classTag.runtimeClass.getName) - loadModules[T](data, names, loader) - } - new DetectedModules(namesAndValues) - } + binarySourceModuleNames( + classpath, + data.converter, + loader, + resourceName, + classTag.runtimeClass.getName + ) + loadModules[A](data, names, loader) + DetectedModules(namesAndValues) - private[this] def loadModules[T: reflect.ClassTag]( + private[this] def loadModules[A: reflect.ClassTag]( data: PluginData, names: Seq[String], loader: ClassLoader - ): Seq[(String, T)] = - try ModuleUtilities.getCheckedObjects[T](names, loader) + ): Seq[(String, A)] = + try ModuleUtilities.getCheckedObjects[A](names, loader) catch { case e: ExceptionInInitializerError => val cause = e.getCause @@ -183,4 +196,4 @@ object PluginDiscovery { ) throw new IncompatiblePluginsException(msgBase + msgExtra, t) } -} +end PluginDiscovery diff --git a/main/src/main/scala/sbt/internal/PluginManagement.scala b/main/src/main/scala/sbt/internal/PluginManagement.scala index fea4d34a9..74f616d5e 100644 --- a/main/src/main/scala/sbt/internal/PluginManagement.scala +++ b/main/src/main/scala/sbt/internal/PluginManagement.scala @@ -62,7 +62,13 @@ object PluginManagement { ) def extractOverrides(classpath: Classpath): Set[ModuleID] = - classpath flatMap { _.metadata get Keys.moduleID.key map keepOverrideInfo } toSet; + (classpath + .flatMap: cp => + cp.metadata + .get(Keys.moduleIDStr) + .map: str => + keepOverrideInfo(Classpaths.moduleIdJsonKeyFormat.read(str))) + .toSet def keepOverrideInfo(m: ModuleID): ModuleID = ModuleID(m.organization, m.name, m.revision).withCrossVersion(m.crossVersion) diff --git a/main/src/main/scala/sbt/internal/SysProp.scala b/main/src/main/scala/sbt/internal/SysProp.scala index 629dc6a5c..32452fef3 100644 --- a/main/src/main/scala/sbt/internal/SysProp.scala +++ b/main/src/main/scala/sbt/internal/SysProp.scala @@ -218,7 +218,7 @@ object SysProp { .orElse(windowsCacheDir) .orElse(macCacheDir) .getOrElse(linuxCache) - baseCache.getAbsoluteFile / "v1" + baseCache.getAbsoluteFile / "v2" } lazy val sbtCredentialsEnv: Option[Credentials] = diff --git a/main/src/main/scala/sbt/internal/server/BuildServerProtocol.scala b/main/src/main/scala/sbt/internal/server/BuildServerProtocol.scala index 021822764..e11ea4a36 100644 --- a/main/src/main/scala/sbt/internal/server/BuildServerProtocol.scala +++ b/main/src/main/scala/sbt/internal/server/BuildServerProtocol.scala @@ -249,8 +249,11 @@ object BuildServerProtocol { val buildItems = builds.map { build => val plugins: LoadedPlugins = build._2.unit.plugins val scalacOptions = plugins.pluginData.scalacOptions - val pluginClassPath = plugins.classpath - val classpath = (pluginClassPath ++ sbtJars).map(_.toURI).toVector + val pluginClasspath = plugins.classpath + val converter = plugins.pluginData.converter + val classpath = + pluginClasspath.map(converter.toPath).map(_.toFile).map(_.toURI).toVector ++ + (sbtJars).map(_.toURI).toVector val item = ScalacOptionsItem( build._1, scalacOptions.toVector, @@ -772,7 +775,12 @@ object BuildServerProtocol { private def jvmEnvironmentItem(): Initialize[Task[JvmEnvironmentItem]] = Def.task { val target = Keys.bspTargetIdentifier.value - val classpath = Keys.fullClasspath.value.map(_.data.toURI).toVector + val converter = fileConverter.value + val classpath = Keys.fullClasspath.value + .map(_.data) + .map(converter.toPath) + .map(_.toFile.toURI) + .toVector val jvmOptions = Keys.javaOptions.value.toVector val baseDir = Keys.baseDirectory.value.getAbsolutePath val env = envVars.value @@ -796,7 +804,7 @@ object BuildServerProtocol { val internalDependencyClasspath = for { (ref, configs) <- bspInternalDependencyConfigurations.value config <- configs - } yield ref / config / Keys.classDirectory + } yield ref / config / Keys.packageBin ( target, scalacOptions, @@ -814,12 +822,17 @@ object BuildServerProtocol { internalDependencyClasspath ) => Def.task { - val classpath = internalDependencyClasspath.join.value.distinct ++ + val converter = fileConverter.value + val cp0 = internalDependencyClasspath.join.value.distinct ++ externalDependencyClasspath.map(_.data) + val classpath = cp0 + .map(converter.toPath) + .map(_.toFile.toURI) + .toVector ScalacOptionsItem( target, scalacOptions.toVector, - classpath.map(_.toURI).toVector, + classpath, classDirectory.toURI ) } @@ -964,8 +977,10 @@ object BuildServerProtocol { .toMap ) val runner = new ForkRun(forkOpts) + val converter = fileConverter.value + val cp = classpath.map(converter.toPath) val statusCode = runner - .run(mainClass.`class`, classpath, mainClass.arguments, logger) + .run(mainClass.`class`, cp, mainClass.arguments, logger) .fold( _ => StatusCode.Error, _ => StatusCode.Success diff --git a/notes/2.0.0/migration.md b/notes/2.0.0/migration.md new file mode 100644 index 000000000..18dd31853 --- /dev/null +++ b/notes/2.0.0/migration.md @@ -0,0 +1,8 @@ + + +## files extension on Classpath + +```scala ++ given FileConverter = fileConverter.value + val cp = (Compile / classpath).value.files +``` diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 5252c31ef..cb7755c6d 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -4,8 +4,8 @@ import sbt.contraband.ContrabandPlugin.autoImport._ object Dependencies { // WARNING: Please Scala update versions in PluginCross.scala too - val scala212 = "2.12.17" - val scala213 = "2.13.8" + val scala212 = "2.12.18" + val scala213 = "2.13.12" val scala3 = "3.3.1" val checkPluginCross = settingKey[Unit]("Make sure scalaVersion match up") val baseScalaVersion = scala3 @@ -16,7 +16,7 @@ object Dependencies { private val ioVersion = nightlyVersion.getOrElse("1.8.0") private val lmVersion = sys.props.get("sbt.build.lm.version").orElse(nightlyVersion).getOrElse("2.0.0-alpha13") - val zincVersion = nightlyVersion.getOrElse("2.0.0-alpha8") + val zincVersion = nightlyVersion.getOrElse("2.0.0-alpha10") private val sbtIO = "org.scala-sbt" %% "io" % ioVersion @@ -147,4 +147,5 @@ object Dependencies { val hedgehog = "qa.hedgehog" %% "hedgehog-sbt" % "0.7.0" val disruptor = "com.lmax" % "disruptor" % "3.4.2" val kindProjector = ("org.typelevel" % "kind-projector" % "0.13.2").cross(CrossVersion.full) + val zeroAllocationHashing = "net.openhft" % "zero-allocation-hashing" % "0.10.1" } diff --git a/project/HouseRulesPlugin.scala b/project/HouseRulesPlugin.scala index 609c4ade0..a83e88b83 100644 --- a/project/HouseRulesPlugin.scala +++ b/project/HouseRulesPlugin.scala @@ -14,15 +14,15 @@ object HouseRulesPlugin extends AutoPlugin { scalacOptions += "-language:implicitConversions", scalacOptions ++= "-Xfuture".ifScala213OrMinus.value.toList, scalacOptions += "-Xlint", - scalacOptions ++= "-Xfatal-warnings" - .ifScala(v => { - sys.props.get("sbt.build.fatal") match { - case Some(_) => java.lang.Boolean.getBoolean("sbt.build.fatal") - case _ => v == 12 - } - }) - .value - .toList, + // scalacOptions ++= "-Xfatal-warnings" + // .ifScala(v => { + // sys.props.get("sbt.build.fatal") match { + // case Some(_) => java.lang.Boolean.getBoolean("sbt.build.fatal") + // case _ => v == 12 + // } + // }) + // .value + // .toList, scalacOptions ++= "-Ykind-projector".ifScala3.value.toList, scalacOptions ++= "-Ysemanticdb".ifScala3.value.toList, scalacOptions ++= "-Yinline-warnings".ifScala211OrMinus.value.toList, diff --git a/run/src/main/scala/sbt/Run.scala b/run/src/main/scala/sbt/Run.scala index 86ea13776..d5a5ecabf 100644 --- a/run/src/main/scala/sbt/Run.scala +++ b/run/src/main/scala/sbt/Run.scala @@ -8,6 +8,7 @@ package sbt import java.io.File +import java.nio.file.{ Path => NioPath } import java.lang.reflect.Method import java.lang.reflect.Modifier.{ isPublic, isStatic } import sbt.internal.inc.ScalaInstance @@ -20,11 +21,16 @@ import scala.sys.process.Process import scala.util.control.NonFatal import scala.util.{ Failure, Success, Try } -sealed trait ScalaRun { - def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Try[Unit] -} +sealed trait ScalaRun: + def run(mainClass: String, classpath: Seq[NioPath], options: Seq[String], log: Logger): Try[Unit] + class ForkRun(config: ForkOptions) extends ScalaRun { - def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Try[Unit] = { + def run( + mainClass: String, + classpath: Seq[NioPath], + options: Seq[String], + log: Logger + ): Try[Unit] = { def processExitCode(exitCode: Int, label: String): Try[Unit] = if (exitCode == 0) Success(()) else @@ -47,7 +53,12 @@ class ForkRun(config: ForkOptions) extends ScalaRun { processExitCode(exitCode, "runner") } - def fork(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Process = { + def fork( + mainClass: String, + classpath: Seq[NioPath], + options: Seq[String], + log: Logger + ): Process = { log.info(s"running (fork) $mainClass ${Run.runOptionsStr(options)}") val c = configLogged(log) @@ -64,23 +75,23 @@ class ForkRun(config: ForkOptions) extends ScalaRun { private def scalaOptions( mainClass: String, - classpath: Seq[File], - options: Seq[String] + classpath: Seq[NioPath], + options: Seq[String], ): Seq[String] = - "-classpath" :: Path.makeString(classpath) :: mainClass :: options.toList + "-classpath" :: Path.makeString(classpath.map(_.toFile())) :: mainClass :: options.toList } -class Run(private[sbt] val newLoader: Seq[File] => ClassLoader, trapExit: Boolean) +class Run(private[sbt] val newLoader: Seq[NioPath] => ClassLoader, trapExit: Boolean) extends ScalaRun { def this(instance: ScalaInstance, trapExit: Boolean, nativeTmp: File) = this( - (cp: Seq[File]) => ClasspathUtil.makeLoader(cp.map(_.toPath), instance, nativeTmp.toPath), + (cp: Seq[NioPath]) => ClasspathUtil.makeLoader(cp, instance, nativeTmp.toPath), trapExit ) private[sbt] def runWithLoader( loader: ClassLoader, - classpath: Seq[File], + classpath: Seq[NioPath], mainClass: String, options: Seq[String], log: Logger @@ -122,15 +133,19 @@ class Run(private[sbt] val newLoader: Seq[File] => ClassLoader, trapExit: Boolea } /** Runs the class 'mainClass' using the given classpath and options using the scala runner. */ - def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger): Try[Unit] = { + def run( + mainClass: String, + classpath: Seq[NioPath], + options: Seq[String], + log: Logger + ): Try[Unit] = { val loader = newLoader(classpath) try runWithLoader(loader, classpath, mainClass, options, log) finally - loader match { + loader match case ac: AutoCloseable => ac.close() case c: ClasspathFilter => c.close() case _ => - } } private def invokeMain( loader: ClassLoader, @@ -173,8 +188,8 @@ class Run(private[sbt] val newLoader: Seq[File] => ClassLoader, trapExit: Boolea } /** This module is an interface to starting the scala interpreter or runner. */ -object Run { - def run(mainClass: String, classpath: Seq[File], options: Seq[String], log: Logger)(implicit +object Run: + def run(mainClass: String, classpath: Seq[NioPath], options: Seq[String], log: Logger)(implicit runner: ScalaRun ) = runner.run(mainClass, classpath, options, log) @@ -195,4 +210,4 @@ object Run { case str if str.contains(" ") => "\"" + str + "\"" case str => str }).mkString(" ") -} +end Run diff --git a/sbt-app/src/main/scala/package.scala b/sbt-app/src/main/scala/package.scala index dc160ad77..a2a4434cc 100644 --- a/sbt-app/src/main/scala/package.scala +++ b/sbt-app/src/main/scala/package.scala @@ -68,6 +68,7 @@ package object sbt final val ThisScope = Scope.ThisScope final val Global = Scope.Global final val GlobalScope = Scope.GlobalScope + val `Package` = Pkg inline def config(name: String): Configuration = ${ ConfigurationMacro.configMacroImpl('{ name }) diff --git a/sbt-app/src/main/scala/sbt/Import.scala b/sbt-app/src/main/scala/sbt/Import.scala index 41eb0ba86..25450d9d8 100644 --- a/sbt-app/src/main/scala/sbt/Import.scala +++ b/sbt-app/src/main/scala/sbt/Import.scala @@ -366,5 +366,7 @@ trait Import { type IvyPaths = sbt.librarymanagement.ivy.IvyPaths val IvyPaths = sbt.librarymanagement.ivy.IvyPaths + type FileConverter = xsbti.FileConverter + type HashedVirtualFileRef = xsbti.HashedVirtualFileRef type IncOptions = xsbti.compile.IncOptions } diff --git a/sbt-app/src/sbt-test/actions/aggregate/test b/sbt-app/src/sbt-test/actions/aggregate/test index ed779d712..fcd925ea8 100644 --- a/sbt-app/src/sbt-test/actions/aggregate/test +++ b/sbt-app/src/sbt-test/actions/aggregate/test @@ -23,9 +23,8 @@ $ exists ran $ delete ran # switch to multi-project, no aggregation yet. 'reload' will drop session settings -$ touch multi -$ copy-file changes/build.sbt build.sbt $ mkdir sub sub/sub +$ copy-file changes/build.sbt build.sbt > reload # define in root project only diff --git a/sbt-app/src/sbt-test/actions/clean-keep/build.sbt b/sbt-app/src/sbt-test/actions/clean-keep/build.sbt index 9c2ae05b6..1b1b9de4e 100644 --- a/sbt-app/src/sbt-test/actions/clean-keep/build.sbt +++ b/sbt-app/src/sbt-test/actions/clean-keep/build.sbt @@ -1,8 +1,12 @@ import sbt.nio.file.Glob +scalaVersion := "2.12.18" + +target := baseDirectory.value / "target" + cleanKeepFiles ++= Seq( - target.value / "keep", - target.value / "keepfile" + target.value / "keep", + target.value / "keepfile" ) cleanKeepGlobs += target.value.toGlob / "keepdir" / ** diff --git a/sbt-app/src/sbt-test/actions/cross-multiproject/build.sbt b/sbt-app/src/sbt-test/actions/cross-multiproject/build.sbt index 73c8fe20a..db47ed628 100644 --- a/sbt-app/src/sbt-test/actions/cross-multiproject/build.sbt +++ b/sbt-app/src/sbt-test/actions/cross-multiproject/build.sbt @@ -1,7 +1,7 @@ lazy val scala3 = "3.2.1" lazy val scala213 = "2.13.1" -ThisBuild / crossScalaVersions := Seq(scala3, scala213) +crossScalaVersions := Seq(scala3, scala213) ThisBuild / scalaVersion := scala3 lazy val rootProj = (project in file(".")) diff --git a/sbt-app/src/sbt-test/actions/cross-multiproject/test b/sbt-app/src/sbt-test/actions/cross-multiproject/test index 92e8c90d7..a503b84d2 100644 --- a/sbt-app/src/sbt-test/actions/cross-multiproject/test +++ b/sbt-app/src/sbt-test/actions/cross-multiproject/test @@ -1,67 +1,44 @@ > show rootProj/projectID > + compile -$ exists lib/target/scala-3.2.1 -$ exists lib/target/scala-2.13 -$ exists sbt-foo/target/scala-3.2.1 --$ exists sbt-foo/target/scala-2.13 +$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-3.2.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-2.13.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar > clean > + libProj/compile -$ exists lib/target/scala-3.2.1 -$ exists lib/target/scala-2.13 --$ exists sbt-foo/target/scala-3.2.1 --$ exists sbt-foo/target/scala-2.13 - -# test safe switching -> clean -> ++ 3.2.1 -v compile -$ exists lib/target/scala-3.2.1 --$ exists lib/target/scala-2.13 -$ exists sbt-foo/target/scala-3.2.1 --$ exists sbt-foo/target/scala-2.13 - -# Test legacy cross build with command support -# > clean -# > + build -# $ exists lib/target/scala-3.2.1 -# $ exists lib/target/scala-2.13 -# $ exists sbt-foo/target/scala-3.2.1 -# -$ exists sbt-foo/target/scala-2.13 +$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.2.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar # Test ++ leaves crossScalaVersions unchanged > clean > ++3.2.1 > +extrasProj/compile -$ exists extras/target/scala-2.13 -$ exists extras/target/scala-3.2.1 +$ exists target/out/jvm/scala-2.13.1/foo-extras/foo-extras_2.13-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-3.2.1/foo-extras/foo-extras_3-0.1.0-SNAPSHOT-noresources.jar # test safe switching > clean > ++ 2.13.1 -v compile -$ exists lib/target/scala-2.13 --$ exists lib/target/scala-3.2.1 -# -$ exists sbt-foo/target/scala-3.2.1 --$ exists sbt-foo/target/scala-2.13 +-$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar # test wildcard switching (3.2.1 > clean > ++ 3.* -v compile -$ exists lib/target/scala-3.2.1 --$ exists lib/target/scala-2.13 -$ exists sbt-foo/target/scala-3.2.1 --$ exists sbt-foo/target/scala-2.13 +$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-3.2.1/sbt-foo/sbt-foo-0.1.0-SNAPSHOT-noresources.jar # test wildcard switching (2.13) > clean > ++ 2.13.x -v compile -$ exists lib/target/scala-2.13 --$ exists lib/target/scala-3.2.1 -# -$ exists sbt-foo/target/scala-3.2.1 --$ exists sbt-foo/target/scala-2.13 +-$ exists target/out/jvm/scala-3.2.1/foo-lib/foo-lib_3-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-2.13.1/foo-lib/foo-lib_2.13-0.1.0-SNAPSHOT-noresources.jar # test wildcard switching (no matches) -> ++ 4.* # test wildcard switching (multiple matches) > ++ 2.* - diff --git a/sbt-app/src/sbt-test/actions/cross-strict-aggregation-scala-3/test b/sbt-app/src/sbt-test/actions/cross-strict-aggregation-scala-3/test index d4b96cd58..9afecfa1e 100644 --- a/sbt-app/src/sbt-test/actions/cross-strict-aggregation-scala-3/test +++ b/sbt-app/src/sbt-test/actions/cross-strict-aggregation-scala-3/test @@ -1,19 +1,19 @@ > ++3.0.2 compile -$ exists core/target/scala-3.0.2 --$ exists core/target/scala-3.1.2 --$ exists subproj/target/scala-3.0.2 --$ exists subproj/target/scala-3.1.2 +$ exists target/out/jvm/scala-3.0.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.1.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.0.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.1.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar > clean --$ exists core/target/scala-3.0.2 --$ exists core/target/scala-3.1.2 --$ exists subproj/target/scala-3.0.2 --$ exists subproj/target/scala-3.1.2 +-$ exists target/out/jvm/scala-3.0.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.1.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.0.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.1.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar > ++3.1.2 compile --$ exists core/target/scala-3.0.2 -$ exists core/target/scala-3.1.2 --$ exists subproj/target/scala-3.0.2 -$ exists subproj/target/scala-3.1.2 +-$ exists target/out/jvm/scala-3.0.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-3.1.2/core/core_3-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-3.0.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar +$ exists target/out/jvm/scala-3.1.2/subproj/subproj_3-0.1.0-SNAPSHOT-noresources.jar diff --git a/sbt-app/src/sbt-test/actions/cross-strict-aggregation/test b/sbt-app/src/sbt-test/actions/cross-strict-aggregation/test index 996094f8e..729999288 100644 --- a/sbt-app/src/sbt-test/actions/cross-strict-aggregation/test +++ b/sbt-app/src/sbt-test/actions/cross-strict-aggregation/test @@ -4,6 +4,6 @@ > ++2.13.1 compile -$ exists core/target/scala-2.13 --$ exists module/target/scala-2.13 --$ exists module/target/scala-2.12 +$ exists target/out/jvm/scala-2.13.1/core/core_2.13-0.1.0-SNAPSHOT-noresources.jar +-$ exists target/out/jvm/scala-2.13.1/module +-$ exists target/out/jvm/scala-2.12.18/module diff --git a/sbt-app/src/sbt-test/actions/doc-file-options/Main.scala b/sbt-app/src/sbt-test/actions/doc-file-options/Main.scala index a75d7caa1..ee37dfd23 100644 --- a/sbt-app/src/sbt-test/actions/doc-file-options/Main.scala +++ b/sbt-app/src/sbt-test/actions/doc-file-options/Main.scala @@ -1,3 +1,2 @@ -object Main{ - +object Main { } diff --git a/sbt-app/src/sbt-test/actions/doc-file-options/build.sbt b/sbt-app/src/sbt-test/actions/doc-file-options/build.sbt index 2c90c5429..204628694 100644 --- a/sbt-app/src/sbt-test/actions/doc-file-options/build.sbt +++ b/sbt-app/src/sbt-test/actions/doc-file-options/build.sbt @@ -2,7 +2,7 @@ val newContents = "bbbbbbbbb" val rootContentFile = "root.txt" -ThisBuild / scalaVersion := "2.12.12" +ThisBuild / scalaVersion := "2.13.12" lazy val root = (project in file(".")) .settings( diff --git a/sbt-app/src/sbt-test/actions/doc-scala3/test b/sbt-app/src/sbt-test/actions/doc-scala3/test index c13e97789..b0a167b0c 100644 --- a/sbt-app/src/sbt-test/actions/doc-scala3/test +++ b/sbt-app/src/sbt-test/actions/doc-scala3/test @@ -2,14 +2,14 @@ # there shouldn't be two api/ directories # see https://github.com/lampepfl/dotty/issues/11412 -$ exists rc1/target/scala-3.0.0-RC1/api/api/index.html -$ exists rc1/target/scala-3.0.0-RC1/api/api/foo/A$.html -$ exists rc1/target/scala-3.0.0-RC1/api/api/foo.html +$ exists target/out/jvm/scala-3.0.0-RC1/rc1/api/api/index.html +$ exists target/out/jvm/scala-3.0.0-RC1/rc1/api/api/foo/A$.html +$ exists target/out/jvm/scala-3.0.0-RC1/rc1/api/api/foo.html > m3 / doc # there shouldn't be two api/ directories # see https://github.com/lampepfl/dotty/issues/11412 -$ exists m3/target/scala-3.0.0-M3/api/index.html -$ exists m3/target/scala-3.0.0-M3/api/api/foo/A$.html -$ exists m3/target/scala-3.0.0-M3/api/api/foo.html +$ exists target/out/jvm/scala-3.0.0-M3/m3/api/index.html +$ exists target/out/jvm/scala-3.0.0-M3/m3/api/api/foo/A$.html +$ exists target/out/jvm/scala-3.0.0-M3/m3/api/api/foo.html diff --git a/sbt-app/src/sbt-test/actions/doc/build.sbt b/sbt-app/src/sbt-test/actions/doc/build.sbt index 7036a6f1e..3bc429b9a 100644 --- a/sbt-app/src/sbt-test/actions/doc/build.sbt +++ b/sbt-app/src/sbt-test/actions/doc/build.sbt @@ -5,8 +5,8 @@ import Parsers._ lazy val root = (project in file(".")) .settings( crossPaths := false, - crossScalaVersions := Seq("2.12.12", "2.13.3"), - scalaVersion := "2.12.12", + crossScalaVersions := Seq("2.12.18", "2.13.3"), + scalaVersion := "2.12.18", Compile / doc / scalacOptions += "-Xfatal-warnings", commands += Command.command("excludeB") { s => val impl = """val src = (Compile / sources).value; src.filterNot(_.getName.contains("B"))""" diff --git a/sbt-app/src/sbt-test/actions/doc/pending b/sbt-app/src/sbt-test/actions/doc/pending deleted file mode 100644 index 14318f4e5..000000000 --- a/sbt-app/src/sbt-test/actions/doc/pending +++ /dev/null @@ -1,47 +0,0 @@ --> doc - -> excludeB - -# hybrid project, only scaladoc run -> doc -$ exists target/api/index.js -$ exists target/api/A$.html -$ absent target/api/scala -$ absent target/api/java - -> setDocExtension scala - -# The original B.scala fails scaladoc -$ copy-file changes/B.scala B.scala -# compile task is superfluous. Since doc task preceded by compile task has been problematic due to scala -# compiler's way of handling empty classpath. We have it here to test that our workaround works. -> clean ; compile ; doc - -# pure scala project, only scaladoc at top level -$ exists target/api/index.js -$ exists target/api/A$.html -$ exists target/api/B$.html -$ absent target/api/package-list -$ absent target/api/scala -$ absent target/api/java - -> setDocExtension java - -> clean ; doc - -# pure java project, only javadoc at top level -$ exists target/api/index.html -$ exists target/api/pkg/J.html -$ absent target/api/index.js - -> setDocExtension scala -> ++2.13.3 -> clean -$ absent target/api/A$.html -> doc -$ exists target/api/A$.html -$ exists target/api/B$.html - -# pending -# $ absent target/api/scala -# $ absent target/api/java diff --git a/sbt-app/src/sbt-test/actions/doc/test b/sbt-app/src/sbt-test/actions/doc/test new file mode 100644 index 000000000..354fe9643 --- /dev/null +++ b/sbt-app/src/sbt-test/actions/doc/test @@ -0,0 +1,47 @@ +-> doc + +> excludeB + +# hybrid project, only scaladoc run +> doc +> packageBin +$ exists target/out/jvm/scala-2.12.18/root/api/index.js +$ exists target/out/jvm/scala-2.12.18/root/api/A$.html +$ absent target/out/jvm/scala-2.12.18/root/api/scala +$ absent target/out/jvm/scala-2.12.18/root/api/java + +> setDocExtension scala + +# The original B.scala fails scaladoc +$ copy-file changes/B.scala B.scala +# compile task is superfluous. Since doc task preceded by compile task has been problematic due to scala +# compiler's way of handling empty classpath. We have it here to test that our workaround works. +> clean ; compile ; doc + +# pure scala project, only scaladoc at top level +$ exists target/out/jvm/scala-2.12.18/root/api/index.js +$ exists target/out/jvm/scala-2.12.18/root/api/A$.html +$ exists target/out/jvm/scala-2.12.18/root/api/B$.html +$ absent target/out/jvm/scala-2.12.18/root/api/package-list +$ absent target/out/jvm/scala-2.12.18/root/api/scala +$ absent target/out/jvm/scala-2.12.18/root/api/java + +> setDocExtension java + +> clean + +# pending because Javadoc doesn't work +> doc + +# pure java project, only javadoc at top level +$ exists target/out/jvm/scala-2.12.18/root/api/index.html +$ exists target/out/jvm/scala-2.12.18/root/api/pkg/J.html +$ absent target/out/jvm/scala-2.12.18/root/api/index.js + +> setDocExtension scala +> ++2.13.x +> clean +$ absent target/out/jvm/scala-2.12.18/root/api/A$.html +> doc +$ exists target/out/jvm/scala-2.13.3/root/api/A$.html +$ exists target/out/jvm/scala-2.13.3/root/api/B$.html diff --git a/sbt-app/src/sbt-test/actions/external-doc/build.sbt b/sbt-app/src/sbt-test/actions/external-doc/build.sbt index 74edc1a64..ab3cb5847 100644 --- a/sbt-app/src/sbt-test/actions/external-doc/build.sbt +++ b/sbt-app/src/sbt-test/actions/external-doc/build.sbt @@ -1,3 +1,5 @@ +import xsbti.HashedVirtualFileRef + // https://github.com/coursier/coursier/issues/1123 ThisBuild / useCoursier := false @@ -34,17 +36,18 @@ val checkApiMappings = taskKey[Unit]("Verifies that the API mappings are collect def expectedMappings = Def.task { val stdLibVersion = "2.13.10" val binVersion = scalaBinaryVersion.value + val converter = fileConverter.value val ms = update.value.configuration(Compile).get.modules.flatMap { mod => mod.artifacts.flatMap { case (a, f) => val n = a.name.stripSuffix("_" + binVersion) n match { - case "a" | "b" | "c" => (f, apiBase(n)) :: Nil - case "scala-library" => (f, scalaLibraryBase(stdLibVersion)) :: Nil + case "a" | "b" | "c" => (converter.toVirtualFile(f.toPath()): HashedVirtualFileRef, apiBase(n)) :: Nil + case "scala-library" => (converter.toVirtualFile(f.toPath()): HashedVirtualFileRef, scalaLibraryBase(stdLibVersion)) :: Nil case _ => Nil } } } - val mc = (c / Compile / classDirectory).value -> apiBase("c") + val mc = (c / Compile / packageBin).value -> apiBase("c") (mc +: ms).toMap } diff --git a/sbt-app/src/sbt-test/actions/package-delete-target/build.sbt b/sbt-app/src/sbt-test/actions/package-delete-target/build.sbt index 521d0c6da..dbecc17a2 100644 --- a/sbt-app/src/sbt-test/actions/package-delete-target/build.sbt +++ b/sbt-app/src/sbt-test/actions/package-delete-target/build.sbt @@ -1,5 +1,5 @@ lazy val root = (project in file(".")) .settings( name := "delete-target", - scalaVersion := "2.12.1" + scalaVersion := "2.12.18" ) diff --git a/sbt-app/src/sbt-test/actions/package-delete-target/test b/sbt-app/src/sbt-test/actions/package-delete-target/test index 7fa1b3ae7..a4f29a7c5 100644 --- a/sbt-app/src/sbt-test/actions/package-delete-target/test +++ b/sbt-app/src/sbt-test/actions/package-delete-target/test @@ -1,4 +1,4 @@ -$ mkdir target/scala-2.12/delete-target_2.12-0.1.0-SNAPSHOT.jar -$ touch target/scala-2.12/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello +$ mkdir target/out/jvm/scala-2.12.18/delete-target/delete-target_2.12-0.1.0-SNAPSHOT.jar +$ touch target/out/jvm/scala-2.12.18/delete-target/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello -> package -$ exists target/scala-2.12/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello +$ exists target/out/jvm/scala-2.12.18/delete-target/delete-target_2.12-0.1.0-SNAPSHOT.jar/hello diff --git a/sbt-app/src/sbt-test/actions/remote-cache-semanticdb/test b/sbt-app/src/sbt-test/actions/remote-cache-semanticdb/disabled similarity index 100% rename from sbt-app/src/sbt-test/actions/remote-cache-semanticdb/test rename to sbt-app/src/sbt-test/actions/remote-cache-semanticdb/disabled diff --git a/sbt-app/src/sbt-test/actions/remote-cache/test b/sbt-app/src/sbt-test/actions/remote-cache/disabled similarity index 100% rename from sbt-app/src/sbt-test/actions/remote-cache/test rename to sbt-app/src/sbt-test/actions/remote-cache/disabled diff --git a/sbt-app/src/sbt-test/cache/basic/build.sbt b/sbt-app/src/sbt-test/cache/basic/build.sbt new file mode 100644 index 000000000..ccb49f110 --- /dev/null +++ b/sbt-app/src/sbt-test/cache/basic/build.sbt @@ -0,0 +1,31 @@ +import sbt.internal.util.StringVirtualFile1 +import sjsonnew.BasicJsonProtocol.* + +val pure1 = taskKey[Unit]("") +val map1 = taskKey[String]("") +val mapN1 = taskKey[Unit]("") + +Global / localCacheDirectory := new File("/tmp/sbt/diskcache/") + +pure1 := (Def.cachedTask { + val output = StringVirtualFile1("a.txt", "foo") + Def.declareOutput(output) + () +}).value + +map1 := (Def.cachedTask { + pure1.value + val output1 = StringVirtualFile1("b1.txt", "foo") + val output2 = StringVirtualFile1("b2.txt", "foo") + Def.declareOutput(output1) + Def.declareOutput(output2) + "something" +}).value + +mapN1 := (Def.cachedTask { + pure1.value + map1.value + val output = StringVirtualFile1("c.txt", "foo") + Def.declareOutput(output) + () +}).value diff --git a/sbt-app/src/sbt-test/cache/basic/test b/sbt-app/src/sbt-test/cache/basic/test new file mode 100644 index 000000000..e45a58691 --- /dev/null +++ b/sbt-app/src/sbt-test/cache/basic/test @@ -0,0 +1,18 @@ +> startServer +> pure1 +$ exists target/out/a.txt +> clean +> pure1 +$ exists target/out/a.txt + +> clean +> map1 +$ exists target/out/a.txt +$ exists target/out/b1.txt +$ exists target/out/b2.txt + +> clean +> mapN1 +$ exists target/out/a.txt +$ exists target/out/b1.txt +$ exists target/out/c.txt diff --git a/sbt-app/src/sbt-test/cache/optout/build.sbt b/sbt-app/src/sbt-test/cache/optout/build.sbt new file mode 100644 index 000000000..cb07d73e3 --- /dev/null +++ b/sbt-app/src/sbt-test/cache/optout/build.sbt @@ -0,0 +1,25 @@ +import sbt.internal.util.StringVirtualFile1 +import sjsonnew.BasicJsonProtocol.* +import CustomKeys.* + +Global / localCacheDirectory := new File("/tmp/sbt/diskcache/") + +aa := A() + +// This tests that pure1 is opt'ed out from caching +map1 := (Def.cachedTask { + aa.value + val output1 = StringVirtualFile1("b1.txt", "foo") + val output2 = StringVirtualFile1("b2.txt", "foo") + Def.declareOutput(output1) + Def.declareOutput(output2) + "something" +}).value + +mapN1 := (Def.cachedTask { + aa.value + map1.value + val output = StringVirtualFile1("c.txt", "foo") + Def.declareOutput(output) + () +}).value diff --git a/sbt-app/src/sbt-test/cache/optout/project/A.scala b/sbt-app/src/sbt-test/cache/optout/project/A.scala new file mode 100644 index 000000000..abafb08a9 --- /dev/null +++ b/sbt-app/src/sbt-test/cache/optout/project/A.scala @@ -0,0 +1,10 @@ +import sbt.* +import sbt.util.cacheLevel + +case class A() + +object CustomKeys: + @cacheLevel(include = Array.empty) + val aa = taskKey[A]("") + val map1 = taskKey[String]("") + val mapN1 = taskKey[Unit]("") diff --git a/sbt-app/src/sbt-test/cache/optout/test b/sbt-app/src/sbt-test/cache/optout/test new file mode 100644 index 000000000..ef47495e1 --- /dev/null +++ b/sbt-app/src/sbt-test/cache/optout/test @@ -0,0 +1,4 @@ +> startServer +> map1 +$ exists target/out/b1.txt +$ exists target/out/b2.txt diff --git a/sbt-app/src/sbt-test/classloader-cache/jni/build.sbt b/sbt-app/src/sbt-test/classloader-cache/jni/build.sbt index 9bd266eb7..c51ecc780 100644 --- a/sbt-app/src/sbt-test/classloader-cache/jni/build.sbt +++ b/sbt-app/src/sbt-test/classloader-cache/jni/build.sbt @@ -10,7 +10,7 @@ val wrappedTest = taskKey[Unit]("Test with modified java.library.path") def wrap(task: InputKey[Unit]): Def.Initialize[Task[Unit]] = Def.sequential(appendToLibraryPath, task.toTask(""), dropLibraryPath) -ThisBuild / turbo := true +// ThisBuild / turbo := true val root = (project in file(".")).settings( scalaVersion := "2.12.12", diff --git a/sbt-app/src/sbt-test/classloader-cache/resources/test b/sbt-app/src/sbt-test/classloader-cache/resources/pending similarity index 100% rename from sbt-app/src/sbt-test/classloader-cache/resources/test rename to sbt-app/src/sbt-test/classloader-cache/resources/pending diff --git a/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/build.sbt b/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/build.sbt index 538540498..8264fcba0 100644 --- a/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/build.sbt +++ b/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/build.sbt @@ -1,8 +1,24 @@ +import sbt.io.Using import xsbti.compile.TastyFiles -ThisBuild / scalaVersion := "3.0.0-M3" +ThisBuild / scalaVersion := "3.3.1" TaskKey[Unit]("check") := { assert((Compile / auxiliaryClassFiles).value == Seq(TastyFiles.instance)) assert((Test / auxiliaryClassFiles).value == Seq(TastyFiles.instance)) } + +TaskKey[Unit]("check2") := checkTastyFiles(true, true).value + +TaskKey[Unit]("check3") := checkTastyFiles(true, false).value + +def checkTastyFiles(aExists: Boolean, bExists: Boolean) = Def.task { + val p = (Compile / packageBin).value + val c = fileConverter.value + Using.jarFile(false)(c.toPath(p).toFile()): jar => + if aExists then assert(jar.getJarEntry("A.tasty") ne null) + else assert(jar.getJarEntry("A.tasty") eq null) + + if bExists then assert(jar.getJarEntry("B.tasty") ne null) + else assert(jar.getJarEntry("B.tasty") eq null) +} diff --git a/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/test b/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/test index e709a96dd..a1a2e456c 100644 --- a/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/test +++ b/sbt-app/src/sbt-test/compiler-project/scala3-tasty-management/test @@ -1,12 +1,7 @@ > check -> compile -$ exists target/scala-3.0.0-M3/classes/A.tasty -$ exists target/scala-3.0.0-M3/classes/B.tasty +> check2 $ delete src/main/scala/B.scala -> compile - -$ exists target/scala-3.0.0-M3/classes/A.tasty --$ exists target/scala-3.0.0-M3/classes/B.tasty +> check3 diff --git a/sbt-app/src/sbt-test/compiler-project/separate-analysis-per-scala/test b/sbt-app/src/sbt-test/compiler-project/separate-analysis-per-scala/test index 40ef2a8c8..fc6216380 100644 --- a/sbt-app/src/sbt-test/compiler-project/separate-analysis-per-scala/test +++ b/sbt-app/src/sbt-test/compiler-project/separate-analysis-per-scala/test @@ -1,3 +1,3 @@ > + compile -$ exists target/scala-2.12 -$ exists target/scala-2.13 +$ exists target/out/jvm/scala-2.12.12/foo +$ exists target/out/jvm/scala-2.13.1/foo diff --git a/sbt-app/src/sbt-test/dependency-management/artifact/build.sbt b/sbt-app/src/sbt-test/dependency-management/artifact/build.sbt index c9cec1c99..49745392f 100644 --- a/sbt-app/src/sbt-test/dependency-management/artifact/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/artifact/build.sbt @@ -28,7 +28,11 @@ lazy val root = (project in file(".")) Compile / packageBin / artifact := mainArtifact, libraryDependencies ++= (if (baseDirectory.value / "retrieve").exists then publishedID :: Nil else Nil), // needed to add a jar with a different type to the managed classpath - Compile / unmanagedClasspath ++= scalaInstance.value.libraryJars.toSeq, + Compile / unmanagedClasspath ++= { + val converter = fileConverter.value + val xs = scalaInstance.value.libraryJars.toSeq + xs.map(x => converter.toVirtualFile(x.toPath()): HashedVirtualFileRef) + }, classpathTypes := Set(tpe), // custom configuration artifacts @@ -66,7 +70,8 @@ def retrieveID = org % "test-retrieve" % "2.0" def checkTask(classpath: TaskKey[Classpath]) = Def.task { val deps = libraryDependencies.value - val cp = (Compile / classpath).value.files + given FileConverter = fileConverter.value + val cp = (Compile / classpath).value.files.map(_.toFile()) val loader = ClasspathUtilities.toLoader(cp, scalaInstance.value.loader) try { Class.forName("test.Test", false, loader); () } catch { case _: ClassNotFoundException | _: NoClassDefFoundError => sys.error(s"Dependency not retrieved properly: $deps, $cp") } diff --git a/sbt-app/src/sbt-test/dependency-management/auto-scala-library/build.sbt b/sbt-app/src/sbt-test/dependency-management/auto-scala-library/build.sbt index 2d2277c17..fa156a106 100644 --- a/sbt-app/src/sbt-test/dependency-management/auto-scala-library/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/auto-scala-library/build.sbt @@ -7,7 +7,7 @@ val checkScalaLibrary = TaskKey[Unit]("checkScalaLibrary") checkScalaLibrary := { val scalaLibsJars = (Compile / managedClasspath) .value - .map(_.data.getName) + .map(_.data.name) .filter(_.startsWith("scala-library")) .sorted val expectedScalaLibsJars = Seq( diff --git a/sbt-app/src/sbt-test/dependency-management/cache-resolver/pending b/sbt-app/src/sbt-test/dependency-management/cache-resolver/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/cache-resolver/pending rename to sbt-app/src/sbt-test/dependency-management/cache-resolver/test diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-circular/changes/multi.sbt b/sbt-app/src/sbt-test/dependency-management/cached-resolution-circular/changes/multi.sbt index 1616498cc..46ef80548 100644 --- a/sbt-app/src/sbt-test/dependency-management/cached-resolution-circular/changes/multi.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cached-resolution-circular/changes/multi.sbt @@ -54,7 +54,7 @@ lazy val root = (project in file(".")). settings(commonSettings: _*). settings( check := { - val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted + val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted if (!(acp contains "netty-3.2.0.Final.jar")) { sys.error("netty-3.2.0.Final not found when it should be included: " + acp.toString) } diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/multi.sbt b/sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/multi.sbt index eaa3d983a..0b97c5ab1 100644 --- a/sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/multi.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/multi.sbt @@ -70,9 +70,9 @@ lazy val root = (project in file(".")). (ThisBuild / organization) := "org.example", (ThisBuild / version) := "1.0", check := { - val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted - val bcp = (b / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted - val ccp = (c / Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "demo_2.10.jar"} + val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted + val bcp = (b / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted + val ccp = (c / Compile / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "demo_2.10.jar"} if (!(acp contains "commons-io-1.4-sources.jar")) { sys.error("commons-io-1.4-sources not found when it should be included: " + acp.toString) } @@ -90,9 +90,9 @@ lazy val root = (project in file(".")). "\n - b (plain) " + bcpWithoutSource.toString + "\n - c (inter-project) " + ccpWithoutSource.toString) - val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "commons-io-1.4.jar"} - val btestcp = (b / Test / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "commons-io-1.4.jar"} - val ctestcp = (c / Test / externalDependencyClasspath).value.map {_.data.getName}.sorted filterNot { _ == "demo_2.10.jar"} filterNot { _ == "commons-io-1.4.jar"} + val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "commons-io-1.4.jar"} + val btestcp = (b / Test / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "commons-io-1.4.jar"} + val ctestcp = (c / Test / externalDependencyClasspath).value.map {_.data.name}.sorted filterNot { _ == "demo_2.10.jar"} filterNot { _ == "commons-io-1.4.jar"} if (ctestcp contains "junit-4.13.1.jar") { sys.error("junit found when it should be excluded: " + ctestcp.toString) } diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/pending b/sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/pending rename to sbt-app/src/sbt-test/dependency-management/cached-resolution-classifier/test diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt b/sbt-app/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt index ed8b6cd22..9a5e0fa11 100644 --- a/sbt-app/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cached-resolution-conflicts/multi.sbt @@ -45,7 +45,7 @@ val y2 = project.settings( ) TaskKey[Unit]("check") := { - val x1cp = (x1 / Compile / externalDependencyClasspath).value.map(_.data.getName).sorted + val x1cp = (x1 / Compile / externalDependencyClasspath).value.map(_.data.name).sorted def x1cpStr = x1cp.mkString("\n* ", "\n* ", "") // if (!(x1cp contains "slf4j-api-1.6.6.jar")) diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt b/sbt-app/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt index 8a5dc14e3..7490e7443 100644 --- a/sbt-app/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cached-resolution-exclude/multi.sbt @@ -38,16 +38,16 @@ lazy val root = (project in file(".")). version := "1.0", updateOptions := updateOptions.value.withCachedResolution(true), check := { - val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} - val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} - if (acp exists { _.data.getName contains "commons-io" }) { + val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name} + val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name} + if (acp exists { _.data.name contains "commons-io" }) { sys.error("commons-io found when it should be excluded") } - if (acp exists { _.data.getName contains "commons-codec" }) { + if (acp exists { _.data.name contains "commons-codec" }) { sys.error("commons-codec found when it should be excluded") } // This is checking to make sure excluded graph is not getting picked up - if (!(bcp exists { _.data.getName contains "commons-io" })) { + if (!(bcp exists { _.data.name contains "commons-io" })) { sys.error("commons-io NOT found when it should NOT be excluded") } } diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-force/multi.sbt b/sbt-app/src/sbt-test/dependency-management/cached-resolution-force/multi.sbt index 5b15fc6fe..4f09e7eb7 100644 --- a/sbt-app/src/sbt-test/dependency-management/cached-resolution-force/multi.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cached-resolution-force/multi.sbt @@ -67,33 +67,33 @@ lazy val root = (project in file(".")). ThisBuild / version := "1.0", check := { // sys.error(dependencyCacheDirectory.value.toString) - val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} - val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} - val ccp = (c / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} - val dcp = (d / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} + val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name} + val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name} + val ccp = (c / Compile / externalDependencyClasspath).value.sortBy {_.data.name} + val dcp = (d / Compile / externalDependencyClasspath).value.sortBy {_.data.name} - if (!(acp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) { + if (!(acp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) { sys.error("spring-core-3.2.2 is not found on a") } - if (!(bcp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) { + if (!(bcp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) { sys.error("spring-core-3.2.2 is not found on b") } - if (!(ccp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) { + if (!(ccp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) { sys.error("spring-core-3.2.2 is not found on c") } - if (!(dcp exists {_.data.getName contains "spring-core-3.2.2.RELEASE"})) { + if (!(dcp exists {_.data.name contains "spring-core-3.2.2.RELEASE"})) { sys.error("spring-core-3.2.2 is not found on d\n" + dcp.toString) } - if (!(acp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) { + if (!(acp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) { sys.error("spring-tx-3.1.2 is not found on a") } - if (!(bcp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) { + if (!(bcp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) { sys.error("spring-tx-3.1.2 is not found on b") } - if (!(ccp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) { + if (!(ccp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) { sys.error("spring-tx-3.1.2 is not found on c") } - if (!(dcp exists {_.data.getName contains "spring-tx-3.1.2.RELEASE"})) { + if (!(dcp exists {_.data.name contains "spring-tx-3.1.2.RELEASE"})) { sys.error("spring-tx-3.1.2 is not found on d") } if (acp == bcp) () diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt b/sbt-app/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt index 9262d8e12..1e4757556 100644 --- a/sbt-app/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cached-resolution-interproj/multi.sbt @@ -40,9 +40,9 @@ lazy val root = (project in file(".")). updateOptions := updateOptions.value.withCachedResolution(true), check := { val ur = (a / update).value - val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.getName} + val acp = (a / Compile / externalDependencyClasspath).value.map {_.data.name} val atestcp0 = (a / Test / fullClasspath).value - val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.getName} + val atestcp = (a / Test / externalDependencyClasspath).value.map {_.data.name} // This is checking to make sure interproject dependency works if (acp exists { _ contains "scalatest" }) { sys.error("scalatest found when it should NOT be included: " + acp.toString) diff --git a/sbt-app/src/sbt-test/dependency-management/cached-resolution-overrides/multi.sbt b/sbt-app/src/sbt-test/dependency-management/cached-resolution-overrides/multi.sbt index b30a35185..a7803263e 100644 --- a/sbt-app/src/sbt-test/dependency-management/cached-resolution-overrides/multi.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cached-resolution-overrides/multi.sbt @@ -43,8 +43,8 @@ lazy val b = project. lazy val root = (project in file(".")). settings( check := { - val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} - val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} + val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name} + val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name} if (acp == bcp) () else sys.error("Different classpaths are found:" + "\n - a (overrides + cached) " + acp.toString + diff --git a/sbt-app/src/sbt-test/dependency-management/cross-ivy-maven/build.sbt b/sbt-app/src/sbt-test/dependency-management/cross-ivy-maven/build.sbt index c323436db..0e0d5e0f9 100644 --- a/sbt-app/src/sbt-test/dependency-management/cross-ivy-maven/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/cross-ivy-maven/build.sbt @@ -12,6 +12,6 @@ TaskKey[Unit]("check") := { def isTestJar(n: String): Boolean = (n contains "scalacheck") || (n contains "specs2") - val testLibs = cp map (_.data.getName) filter isTestJar + val testLibs = cp map (_.data.name) filter isTestJar assert(testLibs.isEmpty, s"Compile Classpath has test libs:\n * ${testLibs.mkString("\n * ")}") } \ No newline at end of file diff --git a/sbt-app/src/sbt-test/dependency-management/deliver-artifacts/pending b/sbt-app/src/sbt-test/dependency-management/deliver-artifacts/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/deliver-artifacts/pending rename to sbt-app/src/sbt-test/dependency-management/deliver-artifacts/test diff --git a/sbt-app/src/sbt-test/dependency-management/exclude-dependencies/build.sbt b/sbt-app/src/sbt-test/dependency-management/exclude-dependencies/build.sbt index e9e171f3a..11e675987 100644 --- a/sbt-app/src/sbt-test/dependency-management/exclude-dependencies/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/exclude-dependencies/build.sbt @@ -26,13 +26,13 @@ lazy val root = (project in file(".")). check := { (a / update).value (b / update).value - val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} - val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.getName} + val acp = (a / Compile / externalDependencyClasspath).value.sortBy {_.data.name} + val bcp = (b / Compile / externalDependencyClasspath).value.sortBy {_.data.name} - if (acp exists { _.data.getName contains "slf4j-api-1.7.5.jar" }) { + if (acp exists { _.data.name contains "slf4j-api-1.7.5.jar" }) { sys.error("slf4j-api-1.7.5.jar found when it should NOT be included: " + acp.toString) } - if (bcp exists { _.data.getName contains "dispatch-core_2.11-0.11.1.jar" }) { + if (bcp exists { _.data.name contains "dispatch-core_2.11-0.11.1.jar" }) { sys.error("dispatch-core_2.11-0.11.1.jar found when it should NOT be included: " + bcp.toString) } diff --git a/sbt-app/src/sbt-test/dependency-management/exclude-scala/build.sbt b/sbt-app/src/sbt-test/dependency-management/exclude-scala/build.sbt index 50a29e584..549a1c14d 100644 --- a/sbt-app/src/sbt-test/dependency-management/exclude-scala/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/exclude-scala/build.sbt @@ -11,13 +11,16 @@ lazy val root = (project in file(".")). scalaOverride := check("scala.App").value ) -def check(className: String): Def.Initialize[Task[Unit]] = (Compile / fullClasspath) map { cp => - val existing = cp.files.filter(_.getName contains "scala-library") - println("Full classpath: " + cp.mkString("\n\t", "\n\t", "")) - println("scala-library.jar: " + existing.mkString("\n\t", "\n\t", "")) - val loader = ClasspathUtilities.toLoader(existing) - Class.forName(className, false, loader) -} +def check(className: String): Def.Initialize[Task[Unit]] = + import sbt.TupleSyntax.* + (Compile / fullClasspath, fileConverter.toTaskable) mapN { (cp, c) => + given FileConverter = c + val existing = cp.files.filter(_.toFile.getName contains "scala-library") + println("Full classpath: " + cp.mkString("\n\t", "\n\t", "")) + println("scala-library.jar: " + existing.mkString("\n\t", "\n\t", "")) + val loader = ClasspathUtilities.toLoader(existing.map(_.toFile())) + Class.forName(className, false, loader) + } def dependencies(base: File) = if( ( base / "stm").exists ) ("org.scala-tools" % "scala-stm_2.8.2" % "0.6") :: Nil diff --git a/sbt-app/src/sbt-test/dependency-management/force-update-period/test b/sbt-app/src/sbt-test/dependency-management/force-update-period/pending similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/force-update-period/test rename to sbt-app/src/sbt-test/dependency-management/force-update-period/pending diff --git a/sbt-app/src/sbt-test/dependency-management/force/build.sbt b/sbt-app/src/sbt-test/dependency-management/force/build.sbt index 363063a7f..7af3315f7 100644 --- a/sbt-app/src/sbt-test/dependency-management/force/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/force/build.sbt @@ -19,7 +19,7 @@ def libraryDeps(base: File) = { def check(ver: String) = (Compile / dependencyClasspath) map { jars => val log4j = jars map (_.data) collect { - case f if f.getName contains "log4j-" => f.getName + case f if f.name contains "log4j-" => f.name } if (log4j.size != 1 || !log4j.head.contains(ver)) sys.error("Did not download the correct jar.") diff --git a/sbt-app/src/sbt-test/dependency-management/ivy-settings-c/build.sbt b/sbt-app/src/sbt-test/dependency-management/ivy-settings-c/build.sbt index c93bab708..293d3fa64 100644 --- a/sbt-app/src/sbt-test/dependency-management/ivy-settings-c/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/ivy-settings-c/build.sbt @@ -3,7 +3,11 @@ ThisBuild / useCoursier := false lazy val commonSettings = Seq( autoScalaLibrary := false, scalaModuleInfo := None, - (Compile / unmanagedJars) ++= (scalaInstance map (_.allJars.toSeq)).value, + (Compile / unmanagedJars) ++= { + val converter = fileConverter.value + val xs = scalaInstance.value.allJars.toSeq + xs.map(_.toPath).map(x => converter.toVirtualFile(x): HashedVirtualFileRef) + }, (packageSrc / publishArtifact) := false, (packageDoc / publishArtifact) := false, publishMavenStyle := false diff --git a/sbt-app/src/sbt-test/dependency-management/make-pom-type/build.sbt b/sbt-app/src/sbt-test/dependency-management/make-pom-type/build.sbt index 2de1a2dd4..f216d47d8 100644 --- a/sbt-app/src/sbt-test/dependency-management/make-pom-type/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/make-pom-type/build.sbt @@ -32,8 +32,9 @@ lazy val expectedInter = def checkTask(expectedDep: xml.Elem) = TaskKey[Unit]("checkPom") := { - val file = makePom.value - val pom = xml.XML.loadFile(file) + val vf = makePom.value + val converter = fileConverter.value + val pom = xml.XML.loadFile(converter.toPath(vf).toFile) val actual = pom \\ "dependencies" val expected = {expectedDep} diff --git a/sbt-app/src/sbt-test/dependency-management/make-pom/build.sbt b/sbt-app/src/sbt-test/dependency-management/make-pom/build.sbt index 82c87dbea..23e38d5a6 100644 --- a/sbt-app/src/sbt-test/dependency-management/make-pom/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/make-pom/build.sbt @@ -1,7 +1,11 @@ import scala.xml._ lazy val root = (project in file(".")) settings ( - readPom := (makePom map XML.loadFile).value, + readPom := { + val vf = makePom.value + val converter = fileConverter.value + XML.loadFile(converter.toPath(vf).toFile) + }, TaskKey[Unit]("checkPom") := checkPom.value, TaskKey[Unit]("checkExtra") := checkExtra.value, TaskKey[Unit]("checkVersionPlusMapping") := checkVersionPlusMapping.value, diff --git a/sbt-app/src/sbt-test/dependency-management/module-name/build.sbt b/sbt-app/src/sbt-test/dependency-management/module-name/build.sbt index 09e992ccf..edbd9b91e 100644 --- a/sbt-app/src/sbt-test/dependency-management/module-name/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/module-name/build.sbt @@ -5,7 +5,9 @@ moduleName := "asdf" crossPaths := false TaskKey[Unit]("checkName") := Def task { - val path = (Compile / packageBin).value.getAbsolutePath + val converter = fileConverter.value + val vf = (Compile / packageBin).value + val path = converter.toPath(vf).toAbsolutePath.toString val module = moduleName.value val n = name.value assert(path contains module, s"Path $path did not contain module name $module") diff --git a/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/build.sbt b/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/build.sbt index eeacc0676..9d9b2b8d3 100644 --- a/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/build.sbt @@ -1,30 +1,31 @@ libraryDependencies ++= Seq("natives-windows", "natives-linux", "natives-osx") map ( c => - "org.lwjgl.lwjgl" % "lwjgl-platform" % "2.8.2" classifier c + "org.lwjgl.lwjgl" % "lwjgl-platform" % "2.8.2" classifier c ) autoScalaLibrary := false TaskKey[Unit]("check") := ((Compile / dependencyClasspath) map { cp => - assert(cp.size == 3, "Expected 3 jars, got: " + cp.files.mkString("(", ", ", ")")) + assert(cp.size == 3, "Expected 3 jars, got: " + cp.mkString("(", ", ", ")")) }).value TaskKey[Unit]("checkPom") := { - val file = makePom.value - val pom = xml.XML.loadFile(file) - val actual = pom \\ "dependencies" - def depSection(classifier: String) = - - org.lwjgl.lwjgl - lwjgl-platform - 2.8.2 - {classifier} - - val sections = + val vf = makePom.value + val converter = fileConverter.value + val pom = xml.XML.loadFile(converter.toPath(vf).toFile) + val actual = pom \\ "dependencies" + def depSection(classifier: String) = + + org.lwjgl.lwjgl + lwjgl-platform + 2.8.2 + {classifier} + + val sections = {depSection("natives-windows") ++ depSection("natives-linux") ++ depSection("natives-osx")} - def dropTopElem(s:String): String = s.split("""\n""").drop(1).dropRight(1).mkString("\n") - val pp = new xml.PrettyPrinter(Int.MaxValue, 0) - val expectedString = dropTopElem(pp.formatNodes(sections)) - val actualString = dropTopElem(pp.formatNodes(actual)) - assert(expectedString == actualString, "Expected dependencies section:\n" + expectedString + "\n\nActual:\n" + actualString) + def dropTopElem(s:String): String = s.split("""\n""").drop(1).dropRight(1).mkString("\n") + val pp = new xml.PrettyPrinter(Int.MaxValue, 0) + val expectedString = dropTopElem(pp.formatNodes(sections)) + val actualString = dropTopElem(pp.formatNodes(actual)) + assert(expectedString == actualString, "Expected dependencies section:\n" + expectedString + "\n\nActual:\n" + actualString) } diff --git a/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/changes/non-mergeable.sbt b/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/changes/non-mergeable.sbt index 823ae5825..dc2018bca 100644 --- a/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/changes/non-mergeable.sbt +++ b/sbt-app/src/sbt-test/dependency-management/multiple-classifiers/changes/non-mergeable.sbt @@ -8,8 +8,8 @@ autoScalaLibrary := false TaskKey[Unit]("check") := { val cp = (Compile / externalDependencyClasspath).value val tcp = (Test / externalDependencyClasspath).value - assert(cp.size == 2, "Expected 2 jars on compile classpath, got: " + cp.files.mkString("(", ", ", ")")) + assert(cp.size == 2, "Expected 2 jars on compile classpath, got: " + cp.mkString("(", ", ", ")")) // this should really be 1 because of intransitive(), but Ivy doesn't handle this. // So, this test can only check that the assertion reported in #582 isn't triggered. - assert(tcp.size == 2, "Expected 2 jar on test classpath, got: " + tcp.files.mkString("(", ", ", ")")) + assert(tcp.size == 2, "Expected 2 jar on test classpath, got: " + tcp.mkString("(", ", ", ")")) } diff --git a/sbt-app/src/sbt-test/dependency-management/no-file-fails-publish/build.sbt b/sbt-app/src/sbt-test/dependency-management/no-file-fails-publish/build.sbt index 2330bcdff..c1fe3d52a 100644 --- a/sbt-app/src/sbt-test/dependency-management/no-file-fails-publish/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/no-file-fails-publish/build.sbt @@ -1,13 +1,13 @@ ThisBuild / csrCacheDirectory := (ThisBuild / baseDirectory).value / "coursier-cache" ivyPaths := IvyPaths(baseDirectory.value.toString, Some(((ThisBuild / baseDirectory).value / "ivy" / "cache").toString)) - organization := "org.example" - name := "publish-missing-test" - autoScalaLibrary := false - addArtifact( - name { n => Artifact(n, "txt", "txt") }, - baseDirectory map { _ / "topublish.txt" } + name { n => Artifact(n, "txt", "txt") }, + Def.task { + val base = baseDirectory.value + val converter = fileConverter.value + converter.toVirtualFile((base / "topublish.txt").toPath) + }, ) diff --git a/sbt-app/src/sbt-test/dependency-management/override2/build.sbt b/sbt-app/src/sbt-test/dependency-management/override2/build.sbt index 4a4cee7c6..d4f465bf8 100644 --- a/sbt-app/src/sbt-test/dependency-management/override2/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/override2/build.sbt @@ -2,11 +2,11 @@ lazy val check = taskKey[Unit]("Runs the check") lazy val root = (project in file(".")) .settings( - autoScalaLibrary := false, + autoScalaLibrary := false, libraryDependencies += "org.webjars.npm" % "is-odd" % "2.0.0", dependencyOverrides += "org.webjars.npm" % "is-number" % "5.0.0", check := { - val cp = (Compile / externalDependencyClasspath).value.map {_.data.getName}.sorted + val cp = (Compile / externalDependencyClasspath).value.map {_.data.name}.sorted if (!(cp contains "is-number-5.0.0.jar")) { sys.error("is-number-5.0.0 not found when it should be included: " + cp.toString) } diff --git a/sbt-app/src/sbt-test/dependency-management/pom-classpaths/build.sbt b/sbt-app/src/sbt-test/dependency-management/pom-classpaths/build.sbt deleted file mode 100644 index 723eba908..000000000 --- a/sbt-app/src/sbt-test/dependency-management/pom-classpaths/build.sbt +++ /dev/null @@ -1,35 +0,0 @@ -ThisBuild / useCoursier := false - -import complete._ -import complete.DefaultParsers._ - -lazy val root = (project in file(".")). - settings( - externalPom(), - scalaVersion := "2.9.0-1", - check := checkTask.evaluated, - (Provided / managedClasspath) := Classpaths.managedJars(Provided, classpathTypes.value, update.value) - ) - -def checkTask = Def.inputTask { - val result = parser.parsed - val (conf, names) = result - println("Checking: " + conf.name) - checkClasspath(conf match { - case Provided => (Provided / managedClasspath).value - case Compile => (Compile / fullClasspath).value - case Test => (Test / fullClasspath).value - case Runtime => (Runtime / fullClasspath).value - }, names.toSet) -} - -lazy val check = InputKey[Unit]("check") -def parser: Parser[(Configuration,Seq[String])] = (Space ~> token(cp(Compile) | cp(Runtime) | cp(Provided) | cp(Test))) ~ spaceDelimited("") -def cp(c: Configuration): Parser[Configuration] = c.name ^^^ c -def checkClasspath(cp: Seq[Attributed[File]], names: Set[String]) = { - val fs = cp.files filter { _.getName endsWith ".jar" } - val intersect = fs filter { f => names exists { f.getName startsWith _ } } - assert(intersect == fs, "Expected:" + seqStr(names.toSeq) + "Got: " + seqStr(fs)) - () -} -def seqStr(s: Seq[_]) = s.mkString("\n\t", "\n\t", "\n") diff --git a/sbt-app/src/sbt-test/dependency-management/pom-classpaths/pom.xml b/sbt-app/src/sbt-test/dependency-management/pom-classpaths/pom.xml deleted file mode 100644 index 20162df7e..000000000 --- a/sbt-app/src/sbt-test/dependency-management/pom-classpaths/pom.xml +++ /dev/null @@ -1,27 +0,0 @@ - - 4.0.0 - org.example - sbt-pom - 1.0-SNAPSHOT - - - org.scalatest - scalatest_2.9.0 - 1.6.1 - test - - - org.scala-lang - scala-library - 2.9.0-1 - compile - - - javax.servlet - servlet-api - 2.5 - provided - - - diff --git a/sbt-app/src/sbt-test/dependency-management/pom-classpaths/test b/sbt-app/src/sbt-test/dependency-management/pom-classpaths/test deleted file mode 100644 index f1d2f562d..000000000 --- a/sbt-app/src/sbt-test/dependency-management/pom-classpaths/test +++ /dev/null @@ -1,3 +0,0 @@ -> check test scalatest scala-library servlet-api -> check runtime scala-library -> check compile scala-library servlet-api \ No newline at end of file diff --git a/sbt-app/src/sbt-test/dependency-management/pom-packaging/build.sbt b/sbt-app/src/sbt-test/dependency-management/pom-packaging/build.sbt index 8f19e3462..18d2b74d3 100644 --- a/sbt-app/src/sbt-test/dependency-management/pom-packaging/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/pom-packaging/build.sbt @@ -1,21 +1,18 @@ val root = project in file(".") - val subJar = project in file("subJar") - def warArtifact = (Compile / packageBin / artifact) ~= (_ withType "war" withExtension "war") val subWar = project in file("subWar") settings warArtifact - val subParent = project in file("subParent") settings ((Compile / publishArtifact) := false) val checkPom = taskKey[Unit]("") (ThisBuild / checkPom) := { - checkPackaging((subJar / makePom).value, "jar") - checkPackaging((subWar / makePom).value, "war") - checkPackaging((subParent / makePom).value, "pom") + checkPackaging((subJar / makePom).value, "jar", fileConverter.value) + checkPackaging((subWar / makePom).value, "war", fileConverter.value) + checkPackaging((subParent / makePom).value, "pom", fileConverter.value) } -def checkPackaging(pom: File, expected: String) = { - val packaging = (xml.XML.loadFile(pom) \\ "packaging").text +def checkPackaging(vf: xsbti.HashedVirtualFileRef, expected: String, converter: xsbti.FileConverter) = { + val packaging = (xml.XML.loadFile(converter.toPath(vf).toFile) \\ "packaging").text if (packaging != expected) - sys error s"Incorrect packaging for '$pom'. Expected '$expected', but got '$packaging'" + sys error s"Incorrect packaging for '$vf'. Expected '$expected', but got '$packaging'" } diff --git a/sbt-app/src/sbt-test/dependency-management/pom-scope/build.sbt b/sbt-app/src/sbt-test/dependency-management/pom-scope/build.sbt index a1df43ecb..310258a92 100644 --- a/sbt-app/src/sbt-test/dependency-management/pom-scope/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/pom-scope/build.sbt @@ -20,7 +20,10 @@ lazy val root = (project in file(".")). ) ) -def checkPom = makePom map { pom => +val checkPom = Def.task { + val vf = makePom.value + val converter = fileConverter.value + val pom = converter.toPath(vf).toFile val expected = Seq( ("a", None, false, None), ("b", Some("runtime"), true, None), diff --git a/sbt-app/src/sbt-test/dependency-management/pom-type/build.sbt b/sbt-app/src/sbt-test/dependency-management/pom-type/build.sbt index 5c03c5e61..2f327c51a 100644 --- a/sbt-app/src/sbt-test/dependency-management/pom-type/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/pom-type/build.sbt @@ -6,8 +6,9 @@ lazy val root = (project in file(".")). libraryDependencies += { ("org.scala-tools.sbinary" %% "sbinary" % "0.4.1").withSources().withJavadoc() }, libraryDependencies += { ("org.scala-sbt" % "io" % "0.13.8").intransitive() }, checkPom := { + val converter = fileConverter.value val pomFile = makePom.value - val pom = xml.XML.loadFile(pomFile) + val pom = xml.XML.loadFile(converter.toPath(pomFile).toFile) val tpe = pom \\ "type" if (tpe.nonEmpty) { sys.error("Expected no sections, got: " + tpe + " in \n\n" + pom) diff --git a/sbt-app/src/sbt-test/dependency-management/provided/build.sbt b/sbt-app/src/sbt-test/dependency-management/provided/build.sbt index bc3e7118f..e8d926fab 100644 --- a/sbt-app/src/sbt-test/dependency-management/provided/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/provided/build.sbt @@ -8,7 +8,7 @@ lazy val root = (project in file(".")). provided := (baseDirectory.value / "useProvided").exists, configuration := (if (provided.value) Provided else Compile), libraryDependencies += "javax.servlet" % "servlet-api" % "2.5" % configuration.value.name, - Provided / managedClasspath := Classpaths.managedJars(Provided, classpathTypes.value, update.value), + Provided / managedClasspath := Classpaths.managedJars(Provided, classpathTypes.value, update.value, fileConverter.value), check := { val result = ( Space ~> token(Compile.name.id | Runtime.name | Provided.name | Test.name) ~ token(Space ~> Bool) @@ -21,7 +21,8 @@ lazy val root = (project in file(".")). case Test.name => (Test / fullClasspath).value case _ => sys.error(s"Invalid config: $conf") } - checkServletAPI(cp.files, expected, conf) + given FileConverter = fileConverter.value + checkServletAPI(cp.files.map(_.toFile()), expected, conf) } ) diff --git a/sbt-app/src/sbt-test/dependency-management/scala3-auto-scala-library/build.sbt b/sbt-app/src/sbt-test/dependency-management/scala3-auto-scala-library/build.sbt index 9478d98dd..365c5e690 100644 --- a/sbt-app/src/sbt-test/dependency-management/scala3-auto-scala-library/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/scala3-auto-scala-library/build.sbt @@ -6,7 +6,7 @@ val checkScalaLibrary = TaskKey[Unit]("checkScalaLibrary") checkScalaLibrary := { val scalaLibsJars = (Compile / managedClasspath).value - .map(_.data.getName) + .map(_.data.name) .filter(name => name.startsWith("scala-library") || name.startsWith("scala3-library")) .sorted val expectedScalaLibsJars = Seq( diff --git a/sbt-app/src/sbt-test/dependency-management/test-artifact/pending b/sbt-app/src/sbt-test/dependency-management/test-artifact/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/test-artifact/pending rename to sbt-app/src/sbt-test/dependency-management/test-artifact/test diff --git a/sbt-app/src/sbt-test/dependency-management/transitive-excludes/build.sbt b/sbt-app/src/sbt-test/dependency-management/transitive-excludes/build.sbt index 5583686d4..766a4efe9 100644 --- a/sbt-app/src/sbt-test/dependency-management/transitive-excludes/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/transitive-excludes/build.sbt @@ -9,9 +9,9 @@ libraryDependencies += "exclude.test" % "app" % "1.0.0" val checkDependencies = taskKey[Unit]("Checks that dependencies are correct.") checkDependencies := { - val hasBadJar = (Compile / fullClasspath).value.exists { jar => jar.data.getName contains "bottom-1.0.0.jar"} - val errorJarString = (Compile / fullClasspath).value.map(_.data.getName).mkString(" * ", "\n * ", "") - val hasBadMiddleJar = (Compile / fullClasspath).value.exists { jar => jar.data.getName contains "middle-1.0.0.jar"} + val hasBadJar = (Compile / fullClasspath).value.exists { jar => jar.data.name contains "bottom-1.0.0.jar"} + val errorJarString = (Compile / fullClasspath).value.map(_.data.name).mkString(" * ", "\n * ", "") + val hasBadMiddleJar = (Compile / fullClasspath).value.exists { jar => jar.data.name contains "middle-1.0.0.jar"} assert(!hasBadMiddleJar, s"Failed to exclude excluded dependency on classpath!\nFound:\n$errorJarString") assert(!hasBadJar, s"Failed to exclude transitive excluded dependency on classpath!\nFound:\n$errorJarString") val modules = diff --git a/sbt-app/src/sbt-test/dependency-management/update-sbt-classifiers/test b/sbt-app/src/sbt-test/dependency-management/update-sbt-classifiers/pending similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/update-sbt-classifiers/test rename to sbt-app/src/sbt-test/dependency-management/update-sbt-classifiers/pending diff --git a/sbt-app/src/sbt-test/dependency-management/url/build.sbt b/sbt-app/src/sbt-test/dependency-management/url/build.sbt index f70617597..7f497aa55 100644 --- a/sbt-app/src/sbt-test/dependency-management/url/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/url/build.sbt @@ -15,9 +15,11 @@ lazy val root = (project in file(".")). ) def checkClasspath(conf: Configuration) = - (conf / fullClasspath) map { cp => + import sbt.TupleSyntax.* + (conf / fullClasspath, fileConverter.toTaskable) mapN { (cp, c) => + given FileConverter = c try { - val loader = ClasspathUtilities.toLoader(cp.files) + val loader = ClasspathUtilities.toLoader(cp.files.map(_.toFile())) Class.forName("org.jsoup.Jsoup", false, loader) () } diff --git a/sbt-app/src/sbt-test/java/options/test b/sbt-app/src/sbt-test/java/options/test index 3366df891..ddefda857 100644 --- a/sbt-app/src/sbt-test/java/options/test +++ b/sbt-app/src/sbt-test/java/options/test @@ -1,4 +1,4 @@ > 'set javacOptions := Nil' > compile > 'set javacOptions ++= Seq("-source", "1.4")' --> compile \ No newline at end of file +-> compile diff --git a/sbt-app/src/sbt-test/package/lazy-name/test b/sbt-app/src/sbt-test/package/lazy-name/test index 9afdadb29..f68ea1858 100644 --- a/sbt-app/src/sbt-test/package/lazy-name/test +++ b/sbt-app/src/sbt-test/package/lazy-name/test @@ -6,17 +6,18 @@ > set name := "lazy-package-name" > set crossPaths := false +> set scalaVersion := "3.3.1" > set version := "0.1.1" > package -$ exists target/lazy-package-name-0.1.1.jar +$ exists target/out/jvm/scala-3.3.1/lazy-package-name/lazy-package-name-0.1.1.jar > clean > set version := "0.1.2" > package -$ exists target/lazy-package-name-0.1.2.jar +$ exists target/out/jvm/scala-3.3.1/lazy-package-name/lazy-package-name-0.1.2.jar > clean > set version := "0.1.3" > package -$ exists target/lazy-package-name-0.1.3.jar +$ exists target/out/jvm/scala-3.3.1/lazy-package-name/lazy-package-name-0.1.3.jar diff --git a/sbt-app/src/sbt-test/package/manifest/build.sbt b/sbt-app/src/sbt-test/package/manifest/build.sbt index 2711329b8..b0ad580d1 100644 --- a/sbt-app/src/sbt-test/package/manifest/build.sbt +++ b/sbt-app/src/sbt-test/package/manifest/build.sbt @@ -1,12 +1,10 @@ import java.util.jar.{Attributes, Manifest} import Path.makeString +scalaVersion := "2.12.18" name := "Jar Manifest Test" - version := "0.2" - crossPaths := false - mainClass := Some("jartest.Main") Compile / packageBin / packageOptions := { diff --git a/sbt-app/src/sbt-test/package/manifest/src/main/scala/jartest/Main.scala b/sbt-app/src/sbt-test/package/manifest/src/main/scala/jartest/Main.scala index b1c446adc..fc2630c59 100644 --- a/sbt-app/src/sbt-test/package/manifest/src/main/scala/jartest/Main.scala +++ b/sbt-app/src/sbt-test/package/manifest/src/main/scala/jartest/Main.scala @@ -1,6 +1,5 @@ package jartest -object Main -{ - def main(args: Array[String]): Unit = () +object Main { + def main(args: Array[String]): Unit = () } diff --git a/sbt-app/src/sbt-test/package/manifest/test b/sbt-app/src/sbt-test/package/manifest/test index e77e9b799..1c162e9df 100644 --- a/sbt-app/src/sbt-test/package/manifest/test +++ b/sbt-app/src/sbt-test/package/manifest/test @@ -1,4 +1,4 @@ > package -$ exists ./target/jar-manifest-test-0.2.jar -$ exec java -jar ./target/jar-manifest-test-0.2.jar -> run \ No newline at end of file +$ exists target/out/jvm/scala-2.12.18/jar-manifest-test/jar-manifest-test-0.2.jar +$ exec java -jar ./target/out/jvm/scala-2.12.18/jar-manifest-test/jar-manifest-test-0.2.jar +> run diff --git a/sbt-app/src/sbt-test/package/mappings/build.sbt b/sbt-app/src/sbt-test/package/mappings/build.sbt index 81c727e70..3cf1bf928 100644 --- a/sbt-app/src/sbt-test/package/mappings/build.sbt +++ b/sbt-app/src/sbt-test/package/mappings/build.sbt @@ -1,17 +1,22 @@ name := "Mappings Test" +scalaVersion := "3.3.1" version := "0.2" Compile / packageBin / mappings ++= { - val test = file("test") + val converter = fileConverter.value + val test = converter.toVirtualFile(file("test").toPath) Seq( test -> "test1", - test -> "test1", + // not sure why we allowed duplicates here + // test -> "test1", test -> "test2" ) } lazy val unzipPackage = taskKey[Unit]("extract jar file") unzipPackage := { - IO.unzip((Compile / packageBin).value, target.value / "extracted") + val converter = fileConverter.value + val p = converter.toPath((Compile / packageBin).value) + IO.unzip(p.toFile(), target.value / "extracted") } diff --git a/sbt-app/src/sbt-test/package/mappings/test b/sbt-app/src/sbt-test/package/mappings/test index f95f9bb76..a870b5d97 100644 --- a/sbt-app/src/sbt-test/package/mappings/test +++ b/sbt-app/src/sbt-test/package/mappings/test @@ -1,3 +1,3 @@ > unzipPackage -$ exists target/extracted/test2 -$ exists target/extracted/test1 +$ exists target/out/jvm/scala-3.3.1/mappings-test/extracted/test2 +$ exists target/out/jvm/scala-3.3.1/mappings-test/extracted/test1 diff --git a/sbt-app/src/sbt-test/package/resources/build.sbt b/sbt-app/src/sbt-test/package/resources/build.sbt index 51f5865e6..e6e9709e7 100644 --- a/sbt-app/src/sbt-test/package/resources/build.sbt +++ b/sbt-app/src/sbt-test/package/resources/build.sbt @@ -1,10 +1,9 @@ import Path.makeString name := "Main Resources Test" - version := "0.1" - crossPaths := false +scalaVersion := "3.3.1" packageOptions := { def manifestExtra = { diff --git a/sbt-app/src/sbt-test/package/resources/test b/sbt-app/src/sbt-test/package/resources/test index fefcded95..27ee7a65d 100644 --- a/sbt-app/src/sbt-test/package/resources/test +++ b/sbt-app/src/sbt-test/package/resources/test @@ -10,7 +10,7 @@ # This should fail because sbt should include the resource in the jar but it won't have the right # directory structure --$ exec java -jar ./target/main-resources-test-0.1.jar +-$ exec java -jar ./target/out/jvm/scala-3.3.1/main-resources-test/main-resources-test-0.1.jar # Give the resource the right directory structure $ mkdir src/main/resources/jartest @@ -27,4 +27,4 @@ $ delete src/main/resources/main_resource_test > package # This should succeed because sbt should include the resource in the jar with the right directory structure -$ exec java -jar ./target/main-resources-test-0.1.jar \ No newline at end of file +$ exec java -jar ./target/out/jvm/scala-3.3.1/main-resources-test/main-resources-test-0.1.jar diff --git a/sbt-app/src/sbt-test/run/classpath/build.sbt b/sbt-app/src/sbt-test/run/classpath/build.sbt index acdd81418..219847e8c 100644 --- a/sbt-app/src/sbt-test/run/classpath/build.sbt +++ b/sbt-app/src/sbt-test/run/classpath/build.sbt @@ -1 +1,4 @@ -(Runtime / externalDependencyClasspath) += file("conf") \ No newline at end of file +(Runtime / externalDependencyClasspath) += { + val converter = fileConverter.value + converter.toVirtualFile(file("conf").toPath): HashedVirtualFileRef +} \ No newline at end of file diff --git a/sbt-app/src/sbt-test/run/concurrent/build.sbt b/sbt-app/src/sbt-test/run/concurrent/build.sbt index 12ec298a8..4578d405f 100644 --- a/sbt-app/src/sbt-test/run/concurrent/build.sbt +++ b/sbt-app/src/sbt-test/run/concurrent/build.sbt @@ -7,6 +7,7 @@ def runTestTask(pre: Def.Initialize[Task[Unit]]) = val cp = (Compile / fullClasspath).value val main = (Compile / mainClass).value getOrElse sys.error("No main class found") val args = baseDirectory.value.getAbsolutePath :: Nil + given FileConverter = fileConverter.value r.run(main, cp.files, args, streams.value.log).get } diff --git a/sbt-app/src/sbt-test/source-dependencies/pipelining-java/test b/sbt-app/src/sbt-test/source-dependencies/pipelining-java/disabled similarity index 100% rename from sbt-app/src/sbt-test/source-dependencies/pipelining-java/test rename to sbt-app/src/sbt-test/source-dependencies/pipelining-java/disabled diff --git a/sbt-app/src/sbt-test/source-dependencies/pipelining/test b/sbt-app/src/sbt-test/source-dependencies/pipelining/disabled similarity index 100% rename from sbt-app/src/sbt-test/source-dependencies/pipelining/test rename to sbt-app/src/sbt-test/source-dependencies/pipelining/disabled diff --git a/sbt-app/src/sbt-test/source-dependencies/replace-test-a/build.sbt b/sbt-app/src/sbt-test/source-dependencies/replace-test-a/build.sbt index 35547df68..dc343c323 100644 --- a/sbt-app/src/sbt-test/source-dependencies/replace-test-a/build.sbt +++ b/sbt-app/src/sbt-test/source-dependencies/replace-test-a/build.sbt @@ -1,4 +1,4 @@ -import java.net.URLClassLoader +import sbt.internal.inc.classpath.ClasspathUtilities lazy val root = (project in file(".")). settings( @@ -7,8 +7,11 @@ lazy val root = (project in file(".")). ) def checkTask(className: String) = - (Configurations.Runtime / fullClasspath) map { runClasspath => - val cp = runClasspath.map(_.data.toURI.toURL).toArray - Class.forName(className, false, new URLClassLoader(cp)) + import sbt.TupleSyntax.* + (Configurations.Runtime / fullClasspath, fileConverter) mapN { (runClasspath, c) => + given FileConverter = c + val cp = runClasspath.files + val loader = ClasspathUtilities.toLoader(cp.map(_.toFile())) + Class.forName(className, false, loader) () } diff --git a/sbt-app/src/sbt-test/tests/junit-xml-report/build.sbt b/sbt-app/src/sbt-test/tests/junit-xml-report/build.sbt index 8fae2d48f..1ecdd37ba 100644 --- a/sbt-app/src/sbt-test/tests/junit-xml-report/build.sbt +++ b/sbt-app/src/sbt-test/tests/junit-xml-report/build.sbt @@ -5,16 +5,16 @@ import Defaults._ val checkReport = taskKey[Unit]("Check the test reports") val checkNoReport = taskKey[Unit]("Check that no reports are present") -val oneSecondReportFile = "target/test-reports/TEST-a.pkg.OneSecondTest.xml" -val failingReportFile = "target/test-reports/TEST-another.pkg.FailingTest.xml" +val oneSecondReportFile = "target/out/jvm/scala-2.12.18/root/test-reports/TEST-a.pkg.OneSecondTest.xml" +val failingReportFile = "target/out/jvm/scala-2.12.18/root/test-reports/TEST-another.pkg.FailingTest.xml" -val flatSuiteReportFile = "target/test-reports/TEST-my.scalatest.MyFlatSuite.xml" -val nestedSuitesReportFile = "target/test-reports/TEST-my.scalatest.MyNestedSuites.xml" +val flatSuiteReportFile = "target/out/jvm/scala-2.12.18/root/test-reports/TEST-my.scalatest.MyFlatSuite.xml" +val nestedSuitesReportFile = "target/out/jvm/scala-2.12.18/root/test-reports/TEST-my.scalatest.MyNestedSuites.xml" val scalatest = "org.scalatest" %% "scalatest" % "3.0.5" val junitinterface = "com.novocode" % "junit-interface" % "0.11" -ThisBuild / scalaVersion := "2.12.12" +ThisBuild / scalaVersion := "2.12.18" lazy val root = (project in file(".")). settings( diff --git a/sbt-app/src/sbt-test/tests/source-directory-name/build.sbt b/sbt-app/src/sbt-test/tests/source-directory-name/build.sbt new file mode 100644 index 000000000..fdde0b775 --- /dev/null +++ b/sbt-app/src/sbt-test/tests/source-directory-name/build.sbt @@ -0,0 +1,8 @@ +import sbt.io.Using + +TaskKey[Unit]("check") := { + val p = (Compile / packageBin).value + val c = fileConverter.value + Using.jarFile(false)(c.toPath(p).toFile()): jar => + assert(jar.getJarEntry("ch/epfl/scala/Client.class") ne null) +} diff --git a/sbt-app/src/sbt-test/tests/source-directory-name/test b/sbt-app/src/sbt-test/tests/source-directory-name/test index 49be54930..c9f3e89a1 100644 --- a/sbt-app/src/sbt-test/tests/source-directory-name/test +++ b/sbt-app/src/sbt-test/tests/source-directory-name/test @@ -1,3 +1,3 @@ -> ++2.12.17! +> ++2.12.18! > compile -$ exists target/scala-2.12/classes/ch/epfl/scala/Client.class +> check diff --git a/sbt-app/src/sbt-test/tests/test-quick/test b/sbt-app/src/sbt-test/tests/test-quick/pending similarity index 98% rename from sbt-app/src/sbt-test/tests/test-quick/test rename to sbt-app/src/sbt-test/tests/test-quick/pending index c86f0276f..b3afce6e3 100644 --- a/sbt-app/src/sbt-test/tests/test-quick/test +++ b/sbt-app/src/sbt-test/tests/test-quick/pending @@ -9,7 +9,7 @@ $ copy-file changed/A.scala src/main/scala/A.scala > compile $ sleep 2000 # Create is run. Delete is not since it doesn't have src/main dependency. --> testQuick +> testQuick > testOnly Delete # Previous run of Create failed, re-run. > testQuick Create diff --git a/sbt-app/src/test/scala/sbt/RunFromSourceMain.scala b/sbt-app/src/test/scala/sbt/RunFromSourceMain.scala index b74cd9a2a..139e7c8c2 100644 --- a/sbt-app/src/test/scala/sbt/RunFromSourceMain.scala +++ b/sbt-app/src/test/scala/sbt/RunFromSourceMain.scala @@ -45,7 +45,7 @@ object RunFromSourceMain { Vector(workingDirectory.toString, scalaVersion, sbtVersion, cp.mkString(pathSeparator)) val context = LoggerContext() val log = context.logger("RunFromSourceMain.fork", None, None) - try runner.fork("sbt.RunFromSourceMain", cp, options, log) + try runner.fork("sbt.RunFromSourceMain", cp.map(_.toPath()), options, log) finally context.close() } diff --git a/util-cache/src/main/scala/sbt/internal/util/PlainVirtualFile1.scala b/util-cache/src/main/scala/sbt/internal/util/PlainVirtualFile1.scala new file mode 100644 index 000000000..ece67a279 --- /dev/null +++ b/util-cache/src/main/scala/sbt/internal/util/PlainVirtualFile1.scala @@ -0,0 +1,16 @@ +package sbt +package internal +package util + +import java.io.InputStream +import java.nio.file.{ Files, Path, Paths } +import sbt.util.{ Digest, HashUtil } +import xsbti.{ BasicVirtualFileRef, FileConverter, PathBasedFile, VirtualFileRef, VirtualFile } + +class PlainVirtualFile1(path: Path, id: String) extends BasicVirtualFileRef(id) with PathBasedFile: + override def contentHash: Long = HashUtil.farmHash(path) + override def contentHashStr: String = Digest.sha256Hash(input()).toString() + override def name(): String = path.getFileName.toString + override def input(): InputStream = Files.newInputStream(path) + override def toPath: Path = path +end PlainVirtualFile1 diff --git a/util-cache/src/main/scala/sbt/internal/util/StringVirtualFile1.scala b/util-cache/src/main/scala/sbt/internal/util/StringVirtualFile1.scala new file mode 100644 index 000000000..7315677c5 --- /dev/null +++ b/util-cache/src/main/scala/sbt/internal/util/StringVirtualFile1.scala @@ -0,0 +1,14 @@ +package sbt.internal.util + +import java.io.{ ByteArrayInputStream, InputStream } +import sbt.util.{ Digest, HashUtil } +import xsbti.{ BasicVirtualFileRef, VirtualFile } + +case class StringVirtualFile1(path: String, content: String) + extends BasicVirtualFileRef(path) + with VirtualFile: + override def contentHash: Long = HashUtil.farmHash(content.getBytes("UTF-8")) + override def contentHashStr: String = Digest.sha256Hash(input).toString() + override def input: InputStream = new ByteArrayInputStream(content.getBytes("UTF-8")) + override def toString: String = s"StringVirtualFile1($path, )" +end StringVirtualFile1 diff --git a/util-cache/src/main/scala/sbt/util/ActionCache.scala b/util-cache/src/main/scala/sbt/util/ActionCache.scala new file mode 100644 index 000000000..2f19010d6 --- /dev/null +++ b/util-cache/src/main/scala/sbt/util/ActionCache.scala @@ -0,0 +1,82 @@ +package sbt.util + +import scala.reflect.ClassTag +import scala.annotation.{ meta, StaticAnnotation } +import sjsonnew.{ HashWriter, JsonFormat } +import sjsonnew.support.murmurhash.Hasher +import xsbti.VirtualFile +import java.nio.file.Path +import scala.quoted.{ Expr, FromExpr, ToExpr, Quotes } + +object ActionCache: + /** + * This is a key function that drives remote caching. + * This is intended to be called from the cached task macro for the most part. + * + * - key: This represents the input key for this action, typically consists + * of all the input into the action. For the purpose of caching, + * all we need from the input is to generate some hash value. + * - codeContentHash: This hash represents the Scala code of the task. + * Even if the input tasks are the same, the code part needs to be tracked. + * - extraHash: Reserved for later, which we might use to invalidate the cache. + * - tags: Tags to track cache level. + * - action: The actual action to be cached. + * - config: The configuration that's used to store where the cache backends are. + */ + def cache[I: HashWriter, O: JsonFormat: ClassTag]( + key: I, + codeContentHash: Digest, + extraHash: Digest, + tags: List[CacheLevelTag], + )( + action: I => (O, Seq[VirtualFile]) + )( + config: BuildWideCacheConfiguration + ): O = + val input = + Digest.sha256Hash(codeContentHash, extraHash, Digest.dummy(Hasher.hashUnsafe[I](key))) + val store = config.store + val result = store + .get[O](input) + .getOrElse: + val (newResult, outputs) = action(key) + store.put[O](input, newResult, outputs) + // run the side effect to sync the output files + store.syncBlobs(result.outputFiles, config.outputDirectory) + result.value +end ActionCache + +class BuildWideCacheConfiguration( + val store: ActionCacheStore, + val outputDirectory: Path, +): + override def toString(): String = + s"BuildWideCacheConfiguration(store = $store, outputDirectory = $outputDirectory)" +end BuildWideCacheConfiguration + +@meta.getter +class cacheLevel( + include: Array[CacheLevelTag], +) extends StaticAnnotation + +enum CacheLevelTag: + case Local + case Remote +end CacheLevelTag + +object CacheLevelTag: + private[sbt] val all: Array[CacheLevelTag] = Array(CacheLevelTag.Local, CacheLevelTag.Remote) + + given CacheLevelTagToExpr: ToExpr[CacheLevelTag] with + def apply(tag: CacheLevelTag)(using Quotes): Expr[CacheLevelTag] = + tag match + case CacheLevelTag.Local => '{ CacheLevelTag.Local } + case CacheLevelTag.Remote => '{ CacheLevelTag.Remote } + + given CacheLevelTagFromExpr: FromExpr[CacheLevelTag] with + def unapply(expr: Expr[CacheLevelTag])(using Quotes): Option[CacheLevelTag] = + expr match + case '{ CacheLevelTag.Local } => Some(CacheLevelTag.Local) + case '{ CacheLevelTag.Remote } => Some(CacheLevelTag.Remote) + case _ => None +end CacheLevelTag diff --git a/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala b/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala new file mode 100644 index 000000000..dffc3a03c --- /dev/null +++ b/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala @@ -0,0 +1,198 @@ +package sbt.util + +import java.io.InputStream +import java.nio.file.{ Files, Path } +import sjsonnew.* +import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser } +import sjsonnew.shaded.scalajson.ast.unsafe.JValue + +import scala.collection.mutable +import scala.reflect.ClassTag +import scala.util.control.NonFatal +import sbt.internal.util.PlainVirtualFile1 +import sbt.io.IO +import sbt.io.syntax.* +import xsbti.{ HashedVirtualFileRef, PathBasedFile, VirtualFile, VirtualFileRef } +import sbt.nio.file.FileAttributes + +/** + * An abstration of a remote or local cache store. + */ +trait ActionCacheStore: + /** + * Put a value and blobs to the cache store for later retrieval, + * based on the `actionDigest`. + */ + def put[A1: ClassTag: JsonFormat]( + actionDigest: Digest, + value: A1, + blobs: Seq[VirtualFile], + ): ActionResult[A1] + + /** + * Get the value for the key from the cache store. + */ + def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]] + + /** + * Put VirtualFile blobs to the cache store for later retrieval. + */ + def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] + + /** + * Get blobs from the cache store. + */ + def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] + + /** + * Materialize blobs to the output directory. + */ + def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] +end ActionCacheStore + +class AggregateActionCacheStore(stores: Seq[ActionCacheStore]) extends ActionCacheStore: + extension [A1](xs: Seq[A1]) + // unlike collectFirst this accepts A1 => Option[A2] + inline def collectFirst1[A2](f: A1 => Option[A2]): Option[A2] = + xs.foldLeft(Option.empty[A2]): (res, x) => + res.orElse(f(x)) + + // unlike collectFirst this accepts A1 => Seq[A2] + inline def collectFirst2[A2](f: A1 => Seq[A2]): Seq[A2] = + xs.foldLeft(Seq.empty[A2]): (res, x) => + if res.isEmpty then f(x) else res + + override def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]] = + stores.collectFirst1(_.get[A1](input)) + + override def put[A1: ClassTag: JsonFormat]( + actionDigest: Digest, + value: A1, + blobs: Seq[VirtualFile], + ): ActionResult[A1] = + (stores + .foldLeft(Option.empty[ActionResult[A1]]): (res, store) => + // put the value into all stores + val v = store.put[A1](actionDigest, value, blobs) + res.orElse(Some(v)) + ) + .get + + override def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] = + stores.foldLeft(Seq.empty[HashedVirtualFileRef]): (res, store) => + // put the blobs in all stores + val xs = store.putBlobs(blobs) + if res.isEmpty then xs else res + + override def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] = + stores.collectFirst2(_.getBlobs(refs)) + + override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] = + stores.collectFirst2(_.syncBlobs(refs, outputDirectory)) +end AggregateActionCacheStore + +object AggregateActionCacheStore: + lazy val empty: AggregateActionCacheStore = AggregateActionCacheStore(Nil) +end AggregateActionCacheStore + +class InMemoryActionCacheStore extends ActionCacheStore: + private val underlying: mutable.Map[Digest, JValue] = mutable.Map.empty + + override def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]] = + underlying + .get(input) + .map: j => + Converter.fromJsonUnsafe[ActionResult[A1]](j) + + override def put[A1: ClassTag: JsonFormat]( + key: Digest, + value: A1, + blobs: Seq[VirtualFile], + ): ActionResult[A1] = + val refs = putBlobs(blobs) + val v = ActionResult(value, refs) + val json = Converter.toJsonUnsafe(v) + underlying(key) = json + v + + override def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] = + blobs.map: (b: VirtualFile) => + (b: HashedVirtualFileRef) + + // we won't keep the blobs in-memory so return Nil + override def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] = + Nil + + // we won't keep the blobs in-memory so return Nil + // to implement this correctly, we'd have to grab the content from the original file + override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] = + Nil + + override def toString(): String = + underlying.toString() +end InMemoryActionCacheStore + +class DiskActionCacheStore(base: Path) extends ActionCacheStore: + lazy val casBase: Path = { + val dir = base.resolve("cas") + IO.createDirectory(dir.toFile) + dir + } + + lazy val acBase: Path = { + val dir = base.resolve("ac") + IO.createDirectory(dir.toFile) + dir + } + + override def get[A1: ClassTag: JsonFormat](input: Digest): Option[ActionResult[A1]] = + val acFile = acBase.toFile / input.toString + if acFile.exists then + val str = IO.read(acFile) + val json = Parser.parseUnsafe(str) + try + val value = Converter.fromJsonUnsafe[ActionResult[A1]](json) + Some(value) + catch case NonFatal(_) => None + else None + + override def put[A1: ClassTag: JsonFormat]( + key: Digest, + value: A1, + blobs: Seq[VirtualFile], + ): ActionResult[A1] = + val acFile = acBase.toFile / key.toString + val refs = putBlobs(blobs) + val v = ActionResult(value, refs) + val json = Converter.toJsonUnsafe(v) + IO.write(acFile, CompactPrinter(json)) + v + + override def putBlobs(blobs: Seq[VirtualFile]): Seq[HashedVirtualFileRef] = + blobs.map: (b: VirtualFile) => + val outFile = casBase.toFile / Digest(b.contentHashStr).toString + IO.transfer(b.input, outFile) + (b: HashedVirtualFileRef) + + override def getBlobs(refs: Seq[HashedVirtualFileRef]): Seq[VirtualFile] = + refs.flatMap: r => + val casFile = casBase.toFile / Digest(r.contentHashStr).toString + if casFile.exists then + r match + case p: PathBasedFile => Some(p) + case _ => None + else None + + override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] = + refs.flatMap: r => + val casFile = casBase.toFile / Digest(r.contentHashStr).toString + if casFile.exists then + val shortPath = + if r.id.startsWith("${OUT}/") then r.id.drop(7) + else r.id + val outPath = outputDirectory.resolve(shortPath) + Files.createDirectories(outPath.getParent()) + if outPath.toFile().exists() then IO.delete(outPath.toFile()) + Some(Files.createSymbolicLink(outPath, casFile.toPath)) + else None +end DiskActionCacheStore diff --git a/util-cache/src/main/scala/sbt/util/ActionResult.scala b/util-cache/src/main/scala/sbt/util/ActionResult.scala new file mode 100644 index 000000000..da55a654f --- /dev/null +++ b/util-cache/src/main/scala/sbt/util/ActionResult.scala @@ -0,0 +1,37 @@ +package sbt.util + +import scala.reflect.ClassTag +import sjsonnew.* +import xsbti.HashedVirtualFileRef + +/** + * An action result represents a result from excuting a task. + * In addition to the value typically represented in the return type + * of the task, action value tracks the file output side effect. + */ +class ActionResult[A1](a: A1, outs: Seq[HashedVirtualFileRef]): + def value: A1 = a + def outputFiles: Seq[HashedVirtualFileRef] = outs + override def equals(o: Any): Boolean = + o match { + case o: ActionResult[a] => this.value == o.value && this.outputFiles == o.outputFiles + case _ => false + } + override def hashCode(): Int = (a, outs).## + override def toString(): String = s"ActionResult($a, $outs)" +end ActionResult + +object ActionResult: + import CacheImplicits.* + + given [A1: ClassTag: JsonFormat] + : IsoLList.Aux[ActionResult[A1], A1 :*: Vector[HashedVirtualFileRef] :*: LNil] = + LList.iso( + { (v: ActionResult[A1]) => + ("value", v.value) :*: ("outputFiles", v.outputFiles.toVector) :*: LNil + }, + { (in: A1 :*: Vector[HashedVirtualFileRef] :*: LNil) => + ActionResult(in.head, in.tail.head) + } + ) +end ActionResult diff --git a/util-cache/src/main/scala/sbt/util/BasicCacheImplicits.scala b/util-cache/src/main/scala/sbt/util/BasicCacheImplicits.scala index f0931a4fb..53e2773e9 100644 --- a/util-cache/src/main/scala/sbt/util/BasicCacheImplicits.scala +++ b/util-cache/src/main/scala/sbt/util/BasicCacheImplicits.scala @@ -7,7 +7,8 @@ package sbt.util -import sjsonnew.{ BasicJsonProtocol, JsonFormat } +import sjsonnew.{ BasicJsonProtocol, IsoString, JsonFormat } +import xsbti.{ HashedVirtualFileRef, VirtualFileRef } trait BasicCacheImplicits { self: BasicJsonProtocol => @@ -22,4 +23,22 @@ trait BasicCacheImplicits { self: BasicJsonProtocol => def singleton[T](t: T): SingletonCache[T] = SingletonCache.basicSingletonCache(asSingleton(t)) + + /** + * A string representation of HashedVirtualFileRef, delimited by `>`. + */ + def hashedVirtualFileRefToStr(ref: HashedVirtualFileRef): String = + s"${ref.id}>${ref.contentHashStr}" + + def strToHashedVirtualFileRef(s: String): HashedVirtualFileRef = + s.split(">").toList match { + case path :: hash :: Nil => HashedVirtualFileRef.of(path, hash) + case _ => throw new RuntimeException(s"invalid HashedVirtualFileRefIsoString $s") + } + + implicit lazy val virtualFileRefIsoString: IsoString[VirtualFileRef] = + IsoString.iso(_.id, VirtualFileRef.of) + + implicit lazy val hashedVirtualFileRefIsoString: IsoString[HashedVirtualFileRef] = + IsoString.iso(hashedVirtualFileRefToStr, strToHashedVirtualFileRef) } diff --git a/util-cache/src/main/scala/sbt/util/Digest.scala b/util-cache/src/main/scala/sbt/util/Digest.scala new file mode 100644 index 000000000..22cae2f63 --- /dev/null +++ b/util-cache/src/main/scala/sbt/util/Digest.scala @@ -0,0 +1,92 @@ +package sbt.util + +import sjsonnew.IsoString +import sbt.io.Hash +import java.io.{ BufferedInputStream, InputStream } +import java.nio.ByteBuffer +import java.security.{ DigestInputStream, MessageDigest } + +opaque type Digest = String + +object Digest: + private val sha256_upper = "SHA-256" + + extension (d: Digest) def toBytes: Array[Byte] = parse(d) + + def apply(s: String): Digest = + validateString(s) + s + + def apply(algo: String, bytes: Array[Byte]): Digest = + algo + "-" + toHexString(bytes) + + // used to wrap a Long value as a fake Digest, which will + // later be hashed using sha256 anyway. + def dummy(value: Long): Digest = + apply("murmur3", longsToBytes(Array(0L, value))) + + lazy val zero: Digest = dummy(0L) + + def sha256Hash(bytes: Array[Byte]): Digest = + apply("sha256", hashBytes(sha256_upper, bytes)) + + def sha256Hash(longs: Array[Long]): Digest = + apply("sha256", hashBytes(sha256_upper, longs)) + + def sha256Hash(input: InputStream): Digest = + apply("sha256", hashBytes(sha256_upper, input)) + + def sha256Hash(digests: Digest*): Digest = + sha256Hash(digests.toSeq.map(_.toBytes).flatten.toArray[Byte]) + + private def hashBytes(algo: String, bytes: Array[Byte]): Array[Byte] = + val digest = MessageDigest.getInstance(algo) + digest.digest(bytes) + + private def hashBytes(algo: String, longs: Array[Long]): Array[Byte] = + hashBytes(algo, longsToBytes(longs)) + + private def hashBytes(algo: String, input: InputStream): Array[Byte] = + val BufferSize = 8192 + val bis = BufferedInputStream(input) + val digest = MessageDigest.getInstance(algo) + try + val dis = DigestInputStream(bis, digest) + val buffer = new Array[Byte](BufferSize) + while dis.read(buffer) >= 0 do () + dis.close() + digest.digest + finally bis.close() + + private def validateString(s: String): Unit = + parse(s) + () + + private def parse(s: String): Array[Byte] = + val tokens = s.split("-").toList + tokens match + case "murmur3" :: value :: Nil => parseHex(value, 128) + case "md5" :: value :: Nil => parseHex(value, 128) + case "sha1" :: value :: Nil => parseHex(value, 160) + case "sha256" :: value :: Nil => parseHex(value, 256) + case "sha384" :: value :: Nil => parseHex(value, 384) + case "sha512" :: value :: Nil => parseHex(value, 512) + case _ => throw IllegalArgumentException(s"unexpected digest: $s") + + private def parseHex(value: String, expectedBytes: Int): Array[Byte] = + val bs = Hash.fromHex(value) + require(bs.length == expectedBytes / 8, s"expected $expectedBytes, but found a digest $value") + bs + + private def toHexString(bytes: Array[Byte]): String = + val sb = new StringBuilder + for b <- bytes do sb.append(f"${b & 0xff}%02x") + sb.toString + + private def longsToBytes(longs: Array[Long]): Array[Byte] = + val buffer = ByteBuffer.allocate(longs.length * java.lang.Long.BYTES) + for l <- longs do buffer.putLong(l) + buffer.array() + + given IsoString[Digest] = IsoString.iso(x => x, s => s) +end Digest diff --git a/util-cache/src/main/scala/sbt/util/HashUtil.scala b/util-cache/src/main/scala/sbt/util/HashUtil.scala new file mode 100644 index 000000000..c1466c2ef --- /dev/null +++ b/util-cache/src/main/scala/sbt/util/HashUtil.scala @@ -0,0 +1,25 @@ +package sbt.util + +import java.nio.file.{ Files, Path } +import net.openhft.hashing.LongHashFunction +import scala.util.Try + +object HashUtil: + private[sbt] def farmHash(bytes: Array[Byte]): Long = + LongHashFunction.farmNa().hashBytes(bytes) + + private[sbt] def farmHash(path: Path): Long = + import sbt.io.Hash + // allocating many byte arrays for large files may lead to OOME + // but it is more efficient for small files + val largeFileLimit = 10 * 1024 * 1024 + + if Files.size(path) < largeFileLimit then farmHash(Files.readAllBytes(path)) + else farmHash(Hash(path.toFile)) + + private[sbt] def farmHashStr(path: Path): String = + "farm64-" + farmHash(path).toHexString + + private[sbt] def toFarmHashString(digest: Long): String = + s"farm64-${digest.toHexString}" +end HashUtil diff --git a/util-cache/src/main/scala/sbt/util/PathHashWriters.scala b/util-cache/src/main/scala/sbt/util/PathHashWriters.scala new file mode 100644 index 000000000..36728c4f8 --- /dev/null +++ b/util-cache/src/main/scala/sbt/util/PathHashWriters.scala @@ -0,0 +1,41 @@ +package sbt.util + +import java.io.File +import java.nio.file.Path +import sjsonnew.{ Builder, HashWriter, JsonWriter } +import StringStrings.StringString +import xsbti.{ HashedVirtualFileRef, VirtualFile } + +trait PathHashWriters: + given stringStringLike[A](using conv: Conversion[A, StringString]): HashWriter[A] with + def write[J](obj: A, builder: Builder[J]): Unit = + val ev = summon[HashWriter[StringString]] + ev.write(conv(obj), builder) +end PathHashWriters + +object PathHashWriters extends PathHashWriters + +// Use opaque type to define HashWriter instance +object StringStrings: + opaque type StringString = (String, String) + object StringString: + def apply(first: String, second: String): StringString = (first, second) + + given Conversion[HashedVirtualFileRef, StringString] = + (x: HashedVirtualFileRef) => StringString(x.id, x.contentHashStr) + given Conversion[File, StringString] = + (x: File) => StringString(x.toString(), HashUtil.farmHashStr(x.toPath())) + given Conversion[Path, StringString] = + (x: Path) => StringString(x.toString(), HashUtil.farmHashStr(x)) + given Conversion[VirtualFile, StringString] = + (x: VirtualFile) => StringString(x.id, s"farm64-${x.contentHash.toHexString}") + + given HashWriter[StringString] = new HashWriter[StringString]: + def write[J](obj: StringString, builder: Builder[J]): Unit = + builder.beginObject() + builder.addFieldName("first") + builder.writeString(obj._1) + builder.addFieldName("second") + builder.writeString(obj._2) + builder.endObject() +end StringStrings diff --git a/util-cache/src/test/scala/sbt/util/ActionCacheTest.scala b/util-cache/src/test/scala/sbt/util/ActionCacheTest.scala new file mode 100644 index 000000000..e2a5ca56e --- /dev/null +++ b/util-cache/src/test/scala/sbt/util/ActionCacheTest.scala @@ -0,0 +1,90 @@ +package sbt.util + +import sbt.internal.util.StringVirtualFile1 +import sbt.io.IO +import sbt.io.syntax.* +import verify.BasicTestSuite +import java.nio.file.Paths +import xsbti.VirtualFile + +object ActionCacheTest extends BasicTestSuite: + val tags = CacheLevelTag.all.toList + + test("Disk cache can hold a blob"): + withDiskCache(testHoldBlob) + + def testHoldBlob(cache: ActionCacheStore): Unit = + val in = StringVirtualFile1("a.txt", "foo") + val hashRefs = cache.putBlobs(in :: Nil) + assert(hashRefs.size == 1) + IO.withTemporaryDirectory: tempDir => + val actual = cache.syncBlobs(hashRefs, tempDir.toPath()).head + assert(actual.getFileName().toString() == "a.txt") + + test("In-memory cache can hold action value"): + withInMemoryCache(testActionCacheBasic) + + test("Disk cache can hold action value"): + withDiskCache(testActionCacheBasic) + + def testActionCacheBasic(cache: ActionCacheStore): Unit = + import sjsonnew.BasicJsonProtocol.* + var called = 0 + val action: ((Int, Int)) => (Int, Seq[VirtualFile]) = { case (a, b) => + called += 1 + (a + b, Nil) + } + IO.withTemporaryDirectory: (tempDir) => + val config = BuildWideCacheConfiguration(cache, tempDir.toPath()) + val v1 = + ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config) + assert(v1 == 2) + val v2 = + ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config) + assert(v2 == 2) + // check that the action has been invoked only once + assert(called == 1) + + test("Disk cache can hold action value with blob"): + withDiskCache(testActionCacheWithBlob) + + def testActionCacheWithBlob(cache: ActionCacheStore): Unit = + import sjsonnew.BasicJsonProtocol.* + var called = 0 + val action: ((Int, Int)) => (Int, Seq[VirtualFile]) = { case (a, b) => + called += 1 + val out = StringVirtualFile1("a.txt", (a + b).toString) + (a + b, Seq(out)) + } + IO.withTemporaryDirectory: (tempDir) => + val config = BuildWideCacheConfiguration(cache, tempDir.toPath()) + val v1 = + ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config) + assert(v1 == 2) + // ActionResult only contains the reference to the files. + // To retrieve them, separately call readBlobs or syncBlobs. + val file1 = tempDir / "a.txt" + assert(file1.exists()) + val content = IO.read(file1) + assert(content == "2") + + val v2 = + ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config) + assert(v2 == 2) + // check that the action has been invoked only once + assert(called == 1) + + def withInMemoryCache(f: InMemoryActionCacheStore => Unit): Unit = + val cache = InMemoryActionCacheStore() + f(cache) + + def withDiskCache(f: DiskActionCacheStore => Unit): Unit = + IO.withTemporaryDirectory( + { tempDir0 => + val tempDir = tempDir0.toPath + val cache = DiskActionCacheStore(tempDir) + f(cache) + }, + keepDirectory = false + ) +end ActionCacheTest diff --git a/util-cache/src/test/scala/sbt/util/DigestTest.scala b/util-cache/src/test/scala/sbt/util/DigestTest.scala new file mode 100644 index 000000000..477ade642 --- /dev/null +++ b/util-cache/src/test/scala/sbt/util/DigestTest.scala @@ -0,0 +1,42 @@ +package sbt.util + +object DigestTest extends verify.BasicTestSuite: + test("murmur3") { + val d = Digest("murmur3-00000000000000000000000000000000") + val dummy = Digest.dummy(0L) + assert(d == dummy) + } + + test("md5") { + val d = Digest("md5-d41d8cd98f00b204e9800998ecf8427e") + } + + test("sha1") { + val d = Digest("sha1-da39a3ee5e6b4b0d3255bfef95601890afd80709") + } + + test("sha256") { + val hashOfNull = Digest.sha256Hash(Array[Byte]()) + val d = Digest("sha256-e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") + assert(hashOfNull == d) + } + + test("sha384") { + val d = Digest( + "sha384-38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b" + ) + } + + test("sha512") { + val d = Digest( + "sha512-cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" + ) + } + + test("digest composition") { + val dummy1 = Digest.dummy(0L) + val dummy2 = Digest.dummy(0L) + val expected = Digest("sha256-66687aadf862bd776c8fc18b8e9f8e20089714856ee233b3902a591d0d5f2925") + assert(Digest.sha256Hash(dummy1, dummy2) == expected) + } +end DigestTest diff --git a/util-cache/src/test/scala/sbt/util/HasherTest.scala b/util-cache/src/test/scala/sbt/util/HasherTest.scala new file mode 100644 index 000000000..15e069591 --- /dev/null +++ b/util-cache/src/test/scala/sbt/util/HasherTest.scala @@ -0,0 +1,50 @@ +package sbt.util + +import java.io.File +import sbt.internal.util.StringVirtualFile1 +import sjsonnew.BasicJsonProtocol +import sjsonnew.support.murmurhash.Hasher +import verify.BasicTestSuite +import xsbti.{ BasicVirtualFileRef, HashedVirtualFileRef, VirtualFile } + +object HasherTest extends BasicTestSuite: + import BasicJsonProtocol.implicitHashWriter + + final val blankContentHash = -7286425919675154353L + val blankContentHashStr = "farm64-9ae16a3b2f90404f" + final val blankATxtHash = 1166939303L + + test("The IntJsonFormat should convert an Int to an int hash") { + import BasicJsonProtocol.given + val actual = Hasher.hashUnsafe[Int](1) + assert(actual == 1527037976) + } + + test("StringString hashing from the implicit scope") { + import StringStrings.StringString + val x = StringString("a.txt", blankContentHashStr) + val actual = Hasher.hashUnsafe(x) + assert(actual == blankATxtHash) + } + + test("HashedVirtualFileRef") { + import PathHashWriters.given + val x = HashedVirtualFileRef.of("a.txt", blankContentHashStr) + val actual = Hasher.hashUnsafe(x) + assert(actual == blankATxtHash) + } + + test("VirtualFile hash") { + import PathHashWriters.given + val x = StringVirtualFile1("a.txt", "") + val actual = Hasher.hashUnsafe(x) + assert(actual == blankATxtHash) + } + + test("tuple") { + import BasicJsonProtocol.given + val x = (1, 1) + val actual = Hasher.hashUnsafe(x) + assert(actual == 1975280389) + } +end HasherTest diff --git a/util-collection/src/main/scala/sbt/internal/util/Attributes.scala b/util-collection/src/main/scala/sbt/internal/util/Attributes.scala index 65bea142e..bf38f3972 100644 --- a/util-collection/src/main/scala/sbt/internal/util/Attributes.scala +++ b/util-collection/src/main/scala/sbt/internal/util/Attributes.scala @@ -10,15 +10,12 @@ package sbt.internal.util import Types._ import scala.reflect.ClassTag import sbt.util.OptJsonWriter +import sjsonnew.* // T must be invariant to work properly. // Because it is sealed and the only instances go through AttributeKey.apply, // a single AttributeKey instance cannot conform to AttributeKey[T] for different Ts -/** - * A key in an [[AttributeMap]] that constrains its associated value to be of type `T`. The key is - * uniquely defined by its `label` and type `T`, represented at runtime by `manifest`. - */ sealed trait AttributeKey[A]: /** The runtime evidence for `A`. */ @@ -52,6 +49,11 @@ sealed trait AttributeKey[A]: end AttributeKey +opaque type StringAttributeKey = String +object StringAttributeKey: + def apply(s: String): StringAttributeKey = s +end StringAttributeKey + private[sbt] abstract class SharedAttributeKey[A] extends AttributeKey[A]: override final def toString = label override final def hashCode = label.hashCode @@ -256,6 +258,11 @@ private class BasicAttributeMap(private val backing: Map[AttributeKey[_], Any]) override def toString = entries.mkString("(", ", ", ")") } +/** + * An immutable map where both key and value are String. + */ +type StringAttributeMap = scala.collection.immutable.Map[StringAttributeKey, String] + // type inference required less generality /** A map entry where `key` is constrained to only be associated with a fixed value of type `T`. */ final case class AttributeEntry[T](key: AttributeKey[T], value: T) { @@ -263,22 +270,19 @@ final case class AttributeEntry[T](key: AttributeKey[T], value: T) { } /** Associates a `metadata` map with `data`. */ -final case class Attributed[D](data: D)(val metadata: AttributeMap) { - +final case class Attributed[A1](data: A1)(val metadata: StringAttributeMap): /** Retrieves the associated value of `key` from the metadata. */ - def get[T](key: AttributeKey[T]): Option[T] = metadata.get(key) + def get(key: StringAttributeKey): Option[String] = metadata.get(key) /** Defines a mapping `key -> value` in the metadata. */ - def put[T](key: AttributeKey[T], value: T): Attributed[D] = - Attributed(data)(metadata.put(key, value)) + def put(key: StringAttributeKey, value: String): Attributed[A1] = + Attributed(data)(metadata.updated(key, value)) /** Transforms the data by applying `f`. */ - def map[T](f: D => T): Attributed[T] = Attributed(f(data))(metadata) - -} - -object Attributed { + def map[A2](f: A1 => A2): Attributed[A2] = Attributed(f(data))(metadata) +end Attributed +object Attributed: /** Extracts the underlying data from the sequence `in`. */ def data[T](in: Seq[Attributed[T]]): Seq[T] = in.map(_.data) @@ -286,6 +290,26 @@ object Attributed { def blankSeq[T](in: Seq[T]): Seq[Attributed[T]] = in map blank /** Associates an empty metadata map with `data`. */ - def blank[T](data: T): Attributed[T] = Attributed(data)(AttributeMap.empty) + def blank[T](data: T): Attributed[T] = Attributed(data)(Map.empty) -} + import sjsonnew.BasicJsonProtocol.* + given JsonFormat[StringAttributeMap] = projectFormat( + (m: StringAttributeMap) => + m.toSeq.map: entry => + (entry._1.toString, entry._2), + (entries: Seq[(String, String)]) => + Map((entries.map: entry => + (StringAttributeKey(entry._1), entry._2)): _*), + ) + + given [A1: ClassTag: JsonFormat] + : IsoLList.Aux[Attributed[A1], A1 :*: StringAttributeMap :*: LNil] = + LList.iso( + { (a: Attributed[A1]) => + ("data", a.data) :*: ("metadata", a.metadata) :*: LNil + }, + { (in: A1 :*: StringAttributeMap :*: LNil) => + Attributed(in.head)(in.tail.head) + } + ) +end Attributed