From 0005705eb6bba9c1c48ab03fb9b69a7c411ae0a0 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 14:33:31 +0100 Subject: [PATCH 01/31] Fix += on sourceGenerators --- main-settings/src/main/scala/sbt/Def.scala | 2 +- main-settings/src/main/scala/sbt/Structure.scala | 2 +- main-settings/src/main/scala/sbt/std/TaskMacro.scala | 7 ++++--- sbt-app/src/sbt-test/actions/clean-managed/build.sbt | 2 ++ sbt-app/src/sbt-test/actions/clean-managed/pending | 6 ------ sbt-app/src/sbt-test/actions/clean-managed/test | 6 ++++++ sbt-app/src/sbt-test/actions/generator/build.sbt | 2 +- sbt-app/src/sbt-test/actions/generator/pending | 2 -- sbt-app/src/sbt-test/actions/generator/test | 2 ++ 9 files changed, 17 insertions(+), 14 deletions(-) delete mode 100644 sbt-app/src/sbt-test/actions/clean-managed/pending create mode 100644 sbt-app/src/sbt-test/actions/clean-managed/test delete mode 100644 sbt-app/src/sbt-test/actions/generator/pending create mode 100644 sbt-app/src/sbt-test/actions/generator/test diff --git a/main-settings/src/main/scala/sbt/Def.scala b/main-settings/src/main/scala/sbt/Def.scala index 6b05794d5..739c2f7f7 100644 --- a/main-settings/src/main/scala/sbt/Def.scala +++ b/main-settings/src/main/scala/sbt/Def.scala @@ -319,7 +319,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits: inline def value: A1 = InputWrapper.`wrapInitTask_\u2603\u2603`[A1](in) /** - * This treats the `Initailize[Task[A]]` as a setting that returns the Task value, + * This treats the `Initialize[Task[A]]` as a setting that returns the Task value, * instead of evaluating the task. */ inline def taskValue: Task[A1] = InputWrapper.`wrapInit_\u2603\u2603`[Task[A1]](in) diff --git a/main-settings/src/main/scala/sbt/Structure.scala b/main-settings/src/main/scala/sbt/Structure.scala index 3e507179e..5539a155c 100644 --- a/main-settings/src/main/scala/sbt/Structure.scala +++ b/main-settings/src/main/scala/sbt/Structure.scala @@ -82,7 +82,7 @@ sealed abstract class SettingKey[A1] final inline def :=(inline v: A1): Setting[A1] = ${ TaskMacro.settingAssignMacroImpl('this, 'v) } - final inline def +=[A2](inline v: A2)(using Append.Value[A1, A2]): Setting[A1] = + final inline def +=[A2](inline v: A2)(using inline ev: Append.Value[A1, A2]): Setting[A1] = ${ TaskMacro.settingAppend1Impl[A1, A2]('this, 'v) } final inline def append1[A2](v: Initialize[A2])(using diff --git a/main-settings/src/main/scala/sbt/std/TaskMacro.scala b/main-settings/src/main/scala/sbt/std/TaskMacro.scala index a5fcaf8d5..3bcf57f5e 100644 --- a/main-settings/src/main/scala/sbt/std/TaskMacro.scala +++ b/main-settings/src/main/scala/sbt/std/TaskMacro.scala @@ -93,7 +93,7 @@ object TaskMacro: '{ InputWrapper.`wrapInitTask_\u2603\u2603`[Option[A1]](Previous.runtime[A1]($t)($ev)) } - case _ => report.errorAndAbort(s"JsonFormat[${Type.of[A1]}] missing") + case _ => report.errorAndAbort(s"JsonFormat[${Type.show[A1]}] missing") /** Implementation of := macro for settings. */ def settingAssignMacroImpl[A1: Type](rec: Expr[Scoped.DefinableSetting[A1]], v: Expr[A1])(using @@ -147,7 +147,7 @@ object TaskMacro: $rec.+=($v2.taskValue)(using $ev) } case _ => - report.errorAndAbort(s"Append.Value[${Type.of[A1]}, ${Type.of[Task[a]]}] missing") + report.errorAndAbort(s"Append.Value[${Type.show[A1]}, ${Type.show[Task[a]]}] missing") case _ => Expr.summon[Append.Value[A1, A2]] match case Some(ev) => @@ -155,7 +155,8 @@ object TaskMacro: '{ $rec.append1[A2]($init)(using $ev) } - case _ => report.errorAndAbort(s"Append.Value[${Type.of[A1]}, ${Type.of[A2]}] missing") + case _ => + report.errorAndAbort(s"Append.Value[${Type.show[A1]}, ${Type.show[A2]}] missing") /* private[this] def transformMacroImpl[A](using qctx: Quotes)(init: Expr[A])( diff --git a/sbt-app/src/sbt-test/actions/clean-managed/build.sbt b/sbt-app/src/sbt-test/actions/clean-managed/build.sbt index 1505d55f0..83eb19f2f 100644 --- a/sbt-app/src/sbt-test/actions/clean-managed/build.sbt +++ b/sbt-app/src/sbt-test/actions/clean-managed/build.sbt @@ -1,5 +1,7 @@ import sbt.nio.file.Glob +name := "clean-managed" +scalaVersion := "3.3.1" Compile / sourceGenerators += { Def.task { val files = Seq(sourceManaged.value / "foo.txt", sourceManaged.value / "bar.txt") diff --git a/sbt-app/src/sbt-test/actions/clean-managed/pending b/sbt-app/src/sbt-test/actions/clean-managed/pending deleted file mode 100644 index f6fd6ce8f..000000000 --- a/sbt-app/src/sbt-test/actions/clean-managed/pending +++ /dev/null @@ -1,6 +0,0 @@ -> compile -$ exists target/scala-2.12/src_managed/foo.txt target/scala-2.12/src_managed/bar.txt - -> clean -$ absent target/scala-2.12/src_managed/foo.txt -$ exists target/scala-2.12/src_managed/bar.txt diff --git a/sbt-app/src/sbt-test/actions/clean-managed/test b/sbt-app/src/sbt-test/actions/clean-managed/test new file mode 100644 index 000000000..f9cf78153 --- /dev/null +++ b/sbt-app/src/sbt-test/actions/clean-managed/test @@ -0,0 +1,6 @@ +> compile +$ exists target/out/jvm/scala-3.3.1/clean-managed/src_managed/foo.txt target/out/jvm/scala-3.3.1/clean-managed/src_managed/bar.txt + +> clean +$ absent target/out/jvm/scala-3.3.1/clean-managed/src_managed/foo.txt +$ exists target/out/jvm/scala-3.3.1/clean-managed/src_managed/bar.txt diff --git a/sbt-app/src/sbt-test/actions/generator/build.sbt b/sbt-app/src/sbt-test/actions/generator/build.sbt index 56dbe41e1..5d88c6637 100644 --- a/sbt-app/src/sbt-test/actions/generator/build.sbt +++ b/sbt-app/src/sbt-test/actions/generator/build.sbt @@ -10,5 +10,5 @@ lazy val root = (project in file(".")) file :: Nil }, Compile / sourceGenerators += buildInfo, - Compile / sourceGenerators += Def.task { Nil }, + Compile / sourceGenerators += Def.task { Seq.empty[File] }, ) diff --git a/sbt-app/src/sbt-test/actions/generator/pending b/sbt-app/src/sbt-test/actions/generator/pending deleted file mode 100644 index b7ff4b2e4..000000000 --- a/sbt-app/src/sbt-test/actions/generator/pending +++ /dev/null @@ -1,2 +0,0 @@ -> compile -$ exists target/scala-2.12/src_managed/BuildInfo.scala diff --git a/sbt-app/src/sbt-test/actions/generator/test b/sbt-app/src/sbt-test/actions/generator/test new file mode 100644 index 000000000..b568c3497 --- /dev/null +++ b/sbt-app/src/sbt-test/actions/generator/test @@ -0,0 +1,2 @@ +> compile +$ exists target/out/jvm/scala-2.12.12/root/src_managed/BuildInfo.scala From 84b933802e0b1a2c1176d01c2799bff793375fb2 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 14:35:01 +0100 Subject: [PATCH 02/31] Fix compile-clean partially There is still no auto-definition of clean on tasks that return Seq[Path] --- .../sbt-test/actions/compile-clean/build.sbt | 3 ++- .../sbt-test/actions/compile-clean/pending | 22 ------------------- .../src/sbt-test/actions/compile-clean/test | 22 +++++++++++++++++++ 3 files changed, 24 insertions(+), 23 deletions(-) delete mode 100644 sbt-app/src/sbt-test/actions/compile-clean/pending create mode 100644 sbt-app/src/sbt-test/actions/compile-clean/test diff --git a/sbt-app/src/sbt-test/actions/compile-clean/build.sbt b/sbt-app/src/sbt-test/actions/compile-clean/build.sbt index 75c6a9b44..e6774e270 100644 --- a/sbt-app/src/sbt-test/actions/compile-clean/build.sbt +++ b/sbt-app/src/sbt-test/actions/compile-clean/build.sbt @@ -1,5 +1,6 @@ import sbt.nio.file.Glob -ThisBuild / scalaVersion := "2.12.17" +name := "compile-clean" +scalaVersion := "2.12.17" Compile / cleanKeepGlobs += Glob((Compile / compile / classDirectory).value, "X.class") diff --git a/sbt-app/src/sbt-test/actions/compile-clean/pending b/sbt-app/src/sbt-test/actions/compile-clean/pending deleted file mode 100644 index 2e805ffd0..000000000 --- a/sbt-app/src/sbt-test/actions/compile-clean/pending +++ /dev/null @@ -1,22 +0,0 @@ -$ touch target/cant-touch-this - -> Test/compile -$ exists target/scala-2.12/classes/A.class -$ exists target/scala-2.12/test-classes/B.class - -> Test/clean -$ exists target/cant-touch-this -# it should clean only compile classes -$ exists target/scala-2.12/classes/A.class -$ exists target/scala-2.12/classes/X.class -$ absent target/scala-2.12/test-classes/B.class - -# compiling everything again, but now cleaning only compile classes -> Test/compile -> Compile/clean -$ exists target/cant-touch-this -# it should clean only compile classes -$ absent target/scala-2.12/classes/A.class -$ exists target/scala-2.12/test-classes/B.class -# and X has to be kept, because of the cleanKeepFiles override -$ exists target/scala-2.12/classes/X.class diff --git a/sbt-app/src/sbt-test/actions/compile-clean/test b/sbt-app/src/sbt-test/actions/compile-clean/test new file mode 100644 index 000000000..a1289b6b1 --- /dev/null +++ b/sbt-app/src/sbt-test/actions/compile-clean/test @@ -0,0 +1,22 @@ +$ touch target/cant-touch-this + +> Test/products +$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/A.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class + +> Test/clean +$ exists target/cant-touch-this +# TODO it should clean only test classes +# $ exists target/out/jvm/scala-2.12.17/compile-clean/classes/A.class +# $ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class +$ absent target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class + +# compiling everything again, but now cleaning only compile classes +> Test/products +> Compile/clean +$ exists target/cant-touch-this +# TODO it should clean only compile classes +$ absent target/out/jvm/scala-2.12.17/compile-clean/classes/A.class +# $ exists target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class +# TODO and X has to be kept, because of the cleanKeepFiles override +# $ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class From d001e3053028d1adf5314c6d23ec89931dce134b Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 14:35:18 +0100 Subject: [PATCH 03/31] Fix compile-time-only --- sbt-app/src/sbt-test/actions/compile-time-only/build.sbt | 4 +--- .../src/sbt-test/actions/compile-time-only/{pending => test} | 0 2 files changed, 1 insertion(+), 3 deletions(-) rename sbt-app/src/sbt-test/actions/compile-time-only/{pending => test} (100%) diff --git a/sbt-app/src/sbt-test/actions/compile-time-only/build.sbt b/sbt-app/src/sbt-test/actions/compile-time-only/build.sbt index 108396ca6..e4ebcd167 100644 --- a/sbt-app/src/sbt-test/actions/compile-time-only/build.sbt +++ b/sbt-app/src/sbt-test/actions/compile-time-only/build.sbt @@ -1,8 +1,6 @@ -ThisBuild / libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always - +ThisBuild / evictionErrorLevel := Level.Info libraryDependencies += "org.scala-sbt" % "sbt" % sbtVersion.value - lazy val expectErrorNotCrash = taskKey[Unit]("Ensures that sbt properly set types on Trees so that the compiler doesn't crash on a bad reference to .value, but gives a proper error instead.") expectErrorNotCrash := { diff --git a/sbt-app/src/sbt-test/actions/compile-time-only/pending b/sbt-app/src/sbt-test/actions/compile-time-only/test similarity index 100% rename from sbt-app/src/sbt-test/actions/compile-time-only/pending rename to sbt-app/src/sbt-test/actions/compile-time-only/test From ab9f29590e57c9ae8aa4bd3a5db2418c99c1f017 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 14:37:02 +0100 Subject: [PATCH 04/31] fix cross-advanced --- sbt-app/src/sbt-test/actions/cross-advanced/test | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/sbt-app/src/sbt-test/actions/cross-advanced/test b/sbt-app/src/sbt-test/actions/cross-advanced/test index d4cb7fb0f..a85909358 100644 --- a/sbt-app/src/sbt-test/actions/cross-advanced/test +++ b/sbt-app/src/sbt-test/actions/cross-advanced/test @@ -19,9 +19,9 @@ ## for command cross building you do need crossScalaVerions on root > set root/crossScalaVersions := Seq("2.12.17", "2.13.1") > + build -# $ exists foo/target/scala-2.12 -# $ exists foo/target/scala-2.13 -# $ exists bar/target/scala-2.12 -# $ exists bar/target/scala-2.13 -# $ exists client/target/scala-2.12 -# $ exists client/target/scala-2.13 +$ exists target/out/jvm/scala-2.12.17/foo +$ exists target/out/jvm/scala-2.13.1/foo +$ exists target/out/jvm/scala-2.12.17/bar +$ exists target/out/jvm/scala-2.13.1/bar +$ exists target/out/jvm/scala-2.12.17/client +$ exists target/out/jvm/scala-2.13.1/client From e2c7a294f8f6010b46467ecb17901ffe7b9d83a4 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 14:37:10 +0100 Subject: [PATCH 05/31] minor change --- main/src/main/scala/sbt/Defaults.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index b78b94a63..b8e999ccd 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -4815,8 +4815,8 @@ trait BuildExtra extends BuildCommon with DefExtra { baseArguments: String* ): Vector[Setting[_]] = { Vector( - scoped := (Def - .input((s: State) => Def.spaceDelimited()) + scoped := Def + .input(_ => Def.spaceDelimited()) .flatMapTask { result => initScoped( scoped.scopedKey, @@ -4829,7 +4829,7 @@ trait BuildExtra extends BuildCommon with DefExtra { r.run(mainClass, cp.files, baseArguments ++ args, s.log).get } } - }) + } .evaluated ) ++ inTask(scoped)((config / forkOptions) := forkOptionsTask.value) } From 654d35d1badc1bbcd91ff11aea8472f3b8a0a4b6 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 14:37:41 +0100 Subject: [PATCH 06/31] Fix triggeredBy and runBefore --- .../sbt/internal/EvaluateConfigurations.scala | 31 +++++++------------ .../src/main/scala/sbt/std/Instances.scala | 17 +++++----- .../src/sbt-test/actions/task-map/build.sbt | 6 ++-- sbt-app/src/sbt-test/actions/task-map/pending | 7 ----- sbt-app/src/sbt-test/actions/task-map/test | 7 +++++ .../main/scala/sbt/internal/util/INode.scala | 15 ++++----- .../src/main/scala/sbt/util/Applicative.scala | 2 ++ 7 files changed, 40 insertions(+), 45 deletions(-) delete mode 100644 sbt-app/src/sbt-test/actions/task-map/pending create mode 100644 sbt-app/src/sbt-test/actions/task-map/test diff --git a/buildfile/src/main/scala/sbt/internal/EvaluateConfigurations.scala b/buildfile/src/main/scala/sbt/internal/EvaluateConfigurations.scala index b74e4b831..d18e3ec64 100644 --- a/buildfile/src/main/scala/sbt/internal/EvaluateConfigurations.scala +++ b/buildfile/src/main/scala/sbt/internal/EvaluateConfigurations.scala @@ -398,27 +398,18 @@ object Index { def triggers(ss: Settings[Scope]): Triggers = { val runBefore = new TriggerMap val triggeredBy = new TriggerMap - ss.data.values foreach ( - _.entries foreach { - case AttributeEntry(_, value: Task[?]) => - val as = value.info.attributes - update(runBefore, value, as.get(Def.runBefore.asInstanceOf)) - update(triggeredBy, value, as.get(Def.triggeredBy.asInstanceOf)) - case _ => () - } - ) - val onComplete = (GlobalScope / Def.onComplete) get ss getOrElse (() => ()) + for + a <- ss.data.values + case AttributeEntry(_, base: Task[?]) <- a.entries + do + def update(map: TriggerMap, key: AttributeKey[Seq[Task[?]]]): Unit = + base.info.attributes.get(key).getOrElse(Seq.empty).foreach { task => + map(task) = base +: map.getOrElse(task, Nil) + } + update(runBefore, Def.runBefore) + update(triggeredBy, Def.triggeredBy) + val onComplete = (GlobalScope / Def.onComplete).get(ss).getOrElse(() => ()) new Triggers(runBefore, triggeredBy, map => { onComplete(); map }) } - private[this] def update( - map: TriggerMap, - base: Task[?], - tasksOpt: Option[Seq[Task[?]]] - ): Unit = - for { - tasks <- tasksOpt - task <- tasks - } - map(task) = base +: map.getOrElse(task, Nil) } diff --git a/main-settings/src/main/scala/sbt/std/Instances.scala b/main-settings/src/main/scala/sbt/std/Instances.scala index 223372d65..8d3d0c964 100644 --- a/main-settings/src/main/scala/sbt/std/Instances.scala +++ b/main-settings/src/main/scala/sbt/std/Instances.scala @@ -25,13 +25,6 @@ object InitializeInstance: Def.flatMap[A1, A2](fa)(f) end InitializeInstance -private[std] object ComposeInstance: - import InitializeInstance.initializeMonad - val InitInstance = summon[Applicative[Initialize]] - val F1F2: Applicative[[a] =>> Initialize[Task[a]]] = - summon[Applicative[[a] =>> Initialize[Task[a]]]] -end ComposeInstance - object ParserInstance: type F1[x] = State => Parser[x] // import sbt.internal.util.Classes.Applicative @@ -59,11 +52,17 @@ object FullInstance: KeyRanks.DTask ) - given Monad[Initialize] = InitializeInstance.initializeMonad - val F1F2: Applicative[[a] =>> Initialize[Task[a]]] = ComposeInstance.F1F2 + val F1F2: Applicative[[a] =>> Initialize[Task[a]]] = Applicative.given_Applicative_F1(using + InitializeInstance.initializeMonad, + Task.taskMonad + ) given initializeTaskMonad: Monad[[a] =>> Initialize[Task[a]]] with type F[x] = Initialize[Task[x]] override def pure[A1](x: () => A1): Initialize[Task[A1]] = F1F2.pure(x) + + override def map[A1, A2](fa: Initialize[Task[A1]])(f: A1 => A2): Initialize[Task[A2]] = + F1F2.map(fa)(f) + override def ap[A1, A2](ff: Initialize[Task[A1 => A2]])( fa: Initialize[Task[A1]] ): Initialize[Task[A2]] = diff --git a/sbt-app/src/sbt-test/actions/task-map/build.sbt b/sbt-app/src/sbt-test/actions/task-map/build.sbt index b642bcf02..72b1d8047 100644 --- a/sbt-app/src/sbt-test/actions/task-map/build.sbt +++ b/sbt-app/src/sbt-test/actions/task-map/build.sbt @@ -4,6 +4,8 @@ val taskB = taskKey[File]("") val taskE = taskKey[File]("") val taskF = taskKey[File]("") +scalaVersion := "3.3.1" +name := "task-map" taskA := touch(target.value / "a") taskB := touch(target.value / "b") @@ -14,13 +16,13 @@ taskF := touch(target.value / "f") // means "a" will be triggered by "b" // said differently, invoking "b" will run "b" and then run "a" -taskA := (taskA triggeredBy taskB).value +taskA := taskA.triggeredBy(taskB).value // e <<= e runBefore f // means "e" will be run before running "f" // said differently, invoking "f" will run "e" and then run "f" -taskE := (taskE runBefore taskF).value +taskE := taskE.runBefore(taskF).value // test utils def touch(f: File): File = { IO.touch(f); f } diff --git a/sbt-app/src/sbt-test/actions/task-map/pending b/sbt-app/src/sbt-test/actions/task-map/pending deleted file mode 100644 index 3a88d40a7..000000000 --- a/sbt-app/src/sbt-test/actions/task-map/pending +++ /dev/null @@ -1,7 +0,0 @@ -> taskB -$ exists target/b -$ exists target/a - -> taskF -$ exists target/e -$ exists target/f diff --git a/sbt-app/src/sbt-test/actions/task-map/test b/sbt-app/src/sbt-test/actions/task-map/test new file mode 100644 index 000000000..f22910038 --- /dev/null +++ b/sbt-app/src/sbt-test/actions/task-map/test @@ -0,0 +1,7 @@ +> taskB +$ exists target/out/jvm/scala-3.3.1/task-map/b +$ exists target/out/jvm/scala-3.3.1/task-map/a + +> taskF +$ exists target/out/jvm/scala-3.3.1/task-map/e +$ exists target/out/jvm/scala-3.3.1/task-map/f diff --git a/util-collection/src/main/scala/sbt/internal/util/INode.scala b/util-collection/src/main/scala/sbt/internal/util/INode.scala index a81a223f6..6e647f4ab 100644 --- a/util-collection/src/main/scala/sbt/internal/util/INode.scala +++ b/util-collection/src/main/scala/sbt/internal/util/INode.scala @@ -40,7 +40,7 @@ abstract class EvaluateSettings[ScopeType]: case k: Keyed[s, A] => single(getStatic(k.scopedKey), k.transform) case u: Uniform[s, A] => UniformNode(u.inputs.map(transform[s]), u.f) case a: Apply[k, A] => - MixedNode[k, A](TupleMapExtension.transform(a.inputs) { transform }, a.f) + MixedNode[k, A](TupleMapExtension.transform(a.inputs)(transform), a.f) case b: Bind[s, A] => BindNode[s, A](transform(b.in), x => transform(b.f(x))) case v: Value[A] => constant(v.value) case v: ValidationCapture[a] => strictConstant(v.key: A) @@ -89,8 +89,7 @@ abstract class EvaluateSettings[ScopeType]: private[this] def submit(work: => Unit): Unit = startWork() - // new Runnable { def run = if (!cancel.get()) run0(work) } - executor.execute(() => if !cancel.get() then run0(work) else ()) + executor.execute(() => if !cancel.get() then run0(work)) private[this] def run0(work: => Unit): Unit = try { @@ -102,7 +101,6 @@ abstract class EvaluateSettings[ScopeType]: private[this] def workComplete(): Unit = if running.decrementAndGet() == 0 then complete.put(None) - else () private[this] sealed abstract class INode[A1]: private[this] var state: EvaluationState = New @@ -116,9 +114,12 @@ abstract class EvaluateSettings[ScopeType]: keyString private[this] def keyString = - (static.toSeq.flatMap { case (key, value) => - if (value eq this) init.showFullKey.show(key) :: Nil else List.empty[String] - }).headOption getOrElse "non-static" + static.toSeq + .flatMap { case (key, value) => + if (value eq this) init.showFullKey.show(key) :: Nil else Nil + } + .headOption + .getOrElse("non-static") final def get: A1 = synchronized { assert(value != null, toString + " not evaluated") diff --git a/util-collection/src/main/scala/sbt/util/Applicative.scala b/util-collection/src/main/scala/sbt/util/Applicative.scala index 088ee3559..16d558d46 100644 --- a/util-collection/src/main/scala/sbt/util/Applicative.scala +++ b/util-collection/src/main/scala/sbt/util/Applicative.scala @@ -23,6 +23,8 @@ object Applicative: val F1 = summon[Applicative[F1]] val F2 = summon[Applicative[F2]] override def pure[A1](x: () => A1): F1[F2[A1]] = F1.pure(() => F2.pure(x)) + override def map[A1, A2](fa: F1[F2[A1]])(f: A1 => A2): F1[F2[A2]] = + F1.map(fa)(f2 => F2.map(f2)(f)) override def ap[A1, A2](f1f2f: F1[F2[A1 => A2]])(f1f2a: F1[F2[A1]]): F1[F2[A2]] = F1.ap(F1.map(f1f2f) { (f2f: F2[A1 => A2]) => (f2a: F2[A1]) => F2.ap(f2f)(f2a) })(f1f2a) From 85943bc33b6957174c054a469b04a666e9f61203 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 14:38:03 +0100 Subject: [PATCH 07/31] fix src-dep-plugin --- .../compiler-project/src-dep-plugin/build.sbt | 11 ++++++----- .../compiler-project/src-dep-plugin/def/build.sbt | 1 + .../compiler-project/src-dep-plugin/{pending => test} | 0 3 files changed, 7 insertions(+), 5 deletions(-) rename sbt-app/src/sbt-test/compiler-project/src-dep-plugin/{pending => test} (100%) diff --git a/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/build.sbt b/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/build.sbt index ef1a97259..fe8b5f5c7 100644 --- a/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/build.sbt +++ b/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/build.sbt @@ -1,5 +1,6 @@ -import Configurations.{CompilerPlugin => CPlugin} - -lazy val use = project.dependsOn(RootProject(file("def")) % CPlugin).settings( - autoCompilerPlugins := true -) +lazy val use = project + .dependsOn(RootProject(file("def")) % Configurations.CompilerPlugin) + .settings( + scalaVersion := "2.12.17", + autoCompilerPlugins := true + ) diff --git a/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/def/build.sbt b/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/def/build.sbt index 0830ec789..5b1499023 100644 --- a/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/def/build.sbt +++ b/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/def/build.sbt @@ -4,6 +4,7 @@ name := "demo-compiler-plugin" version := "0.1" +scalaVersion := "2.12.17" libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value % "provided" exportJars := true \ No newline at end of file diff --git a/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/pending b/sbt-app/src/sbt-test/compiler-project/src-dep-plugin/test similarity index 100% rename from sbt-app/src/sbt-test/compiler-project/src-dep-plugin/pending rename to sbt-app/src/sbt-test/compiler-project/src-dep-plugin/test From 3d3b347c8fc0af63204ca8353ed21bd298bf31ff Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 16:01:57 +0100 Subject: [PATCH 08/31] Fix pom-advanced --- .../dependency-management/pom-advanced/build.sbt | 6 ++++-- .../dependency-management/pom-advanced/pending | 10 ---------- .../sbt-test/dependency-management/pom-advanced/test | 10 ++++++++++ 3 files changed, 14 insertions(+), 12 deletions(-) delete mode 100644 sbt-app/src/sbt-test/dependency-management/pom-advanced/pending create mode 100644 sbt-app/src/sbt-test/dependency-management/pom-advanced/test diff --git a/sbt-app/src/sbt-test/dependency-management/pom-advanced/build.sbt b/sbt-app/src/sbt-test/dependency-management/pom-advanced/build.sbt index 2da229089..15ed8b529 100644 --- a/sbt-app/src/sbt-test/dependency-management/pom-advanced/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/pom-advanced/build.sbt @@ -6,15 +6,17 @@ lazy val root = (project in file(".")). settings( resolvers ++= Seq(local, Resolver.sonatypeRepo("releases"), Resolver.sonatypeRepo("snapshots")), InputKey[Unit]("checkPom") := { + val converter = fileConverter.value val result = spaceDelimited("").parsed - checkPomRepositories(makePom.value, result, streams.value) + val pomFile = converter.toPath(makePom.value) + checkPomRepositories(pomFile.toFile, result, streams.value) }, makePomConfiguration := { val conf = makePomConfiguration.value conf .withFilterRepositories(pomIncludeRepository(baseDirectory.value, conf.filterRepositories)) }, - ivyPaths := baseDirectory( dir => IvyPaths(dir, Some(dir / "ivy-home"))).value + ivyPaths := baseDirectory(dir => IvyPaths(dir.toString, Some((dir / "ivy-home").toString))).value ) val local = "local-maven-repo" at "file://" + (Path.userHome / ".m2" /"repository").absolutePath diff --git a/sbt-app/src/sbt-test/dependency-management/pom-advanced/pending b/sbt-app/src/sbt-test/dependency-management/pom-advanced/pending deleted file mode 100644 index d639e4ae6..000000000 --- a/sbt-app/src/sbt-test/dependency-management/pom-advanced/pending +++ /dev/null @@ -1,10 +0,0 @@ -# write the default pom. The only repositories should be Scala Tools Releases and Snapshots -> checkPom https://scala-ci.typesafe.com/artifactory/scala-integration/ https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/ https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/ - -# include file:// repositories. The generated repositories section should include the local Maven repository as well -$ touch repo.all -> checkPom https://scala-ci.typesafe.com/artifactory/scala-integration/ https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/ https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/ file://*.m2/repository/ - -$ delete repo.all -$ touch repo.none -> checkPom diff --git a/sbt-app/src/sbt-test/dependency-management/pom-advanced/test b/sbt-app/src/sbt-test/dependency-management/pom-advanced/test new file mode 100644 index 000000000..28688bcc7 --- /dev/null +++ b/sbt-app/src/sbt-test/dependency-management/pom-advanced/test @@ -0,0 +1,10 @@ +# write the default pom. The only repositories should be Scala Tools Releases and Snapshots +> checkPom https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/ + +# include file:// repositories. The generated repositories section should include the local Maven repository as well +$ touch repo.all +> checkPom https://oss.sonatype.org/service/local/repositories/releases/content/ https://oss.sonatype.org/content/repositories/snapshots/ file://*.m2/repository/ + +$ delete repo.all +$ touch repo.none +> checkPom From 5c2b95980b71b9f362045428a221a6b69b18d2d6 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 16:14:12 +0100 Subject: [PATCH 09/31] Fix exclude-dependencies and exclude-dependencies2 --- sbt-app/src/main/scala/sbt/Import.scala | 4 ++++ .../exclude-dependencies/{pending => test} | 0 .../exclude-dependencies2/{pending => test} | 0 3 files changed, 4 insertions(+) rename sbt-app/src/sbt-test/dependency-management/exclude-dependencies/{pending => test} (100%) rename sbt-app/src/sbt-test/dependency-management/exclude-dependencies2/{pending => test} (100%) diff --git a/sbt-app/src/main/scala/sbt/Import.scala b/sbt-app/src/main/scala/sbt/Import.scala index 48ce25a28..563819660 100644 --- a/sbt-app/src/main/scala/sbt/Import.scala +++ b/sbt-app/src/main/scala/sbt/Import.scala @@ -295,6 +295,10 @@ trait Import { type IvyScala = sbt.librarymanagement.ScalaModuleInfo val JCenterRepository = sbt.librarymanagement.Resolver.JCenterRepository val JavaNet2Repository = sbt.librarymanagement.Resolver.JavaNet2Repository + import sbt.librarymanagement.{ InclExclRule, DependencyBuilders } + given Conversion[String, InclExclRule] = InclExclRule.stringToExclusionRule + given Conversion[DependencyBuilders.OrganizationArtifactName, InclExclRule] = + InclExclRule.organizationArtifactNameToExclusionRule // todo: fix // val License = sbt.librarymanagement.License diff --git a/sbt-app/src/sbt-test/dependency-management/exclude-dependencies/pending b/sbt-app/src/sbt-test/dependency-management/exclude-dependencies/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/exclude-dependencies/pending rename to sbt-app/src/sbt-test/dependency-management/exclude-dependencies/test diff --git a/sbt-app/src/sbt-test/dependency-management/exclude-dependencies2/pending b/sbt-app/src/sbt-test/dependency-management/exclude-dependencies2/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/exclude-dependencies2/pending rename to sbt-app/src/sbt-test/dependency-management/exclude-dependencies2/test From 9c425ea5814ccb3bf58feea8d9fdb8a805c176dd Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 16:15:58 +0100 Subject: [PATCH 10/31] Fix version-interval --- .../dependency-management/version-interval/build.sbt | 8 +------- .../version-interval/{pending => test} | 0 2 files changed, 1 insertion(+), 7 deletions(-) rename sbt-app/src/sbt-test/dependency-management/version-interval/{pending => test} (100%) diff --git a/sbt-app/src/sbt-test/dependency-management/version-interval/build.sbt b/sbt-app/src/sbt-test/dependency-management/version-interval/build.sbt index efeaf236e..c3af2dd11 100644 --- a/sbt-app/src/sbt-test/dependency-management/version-interval/build.sbt +++ b/sbt-app/src/sbt-test/dependency-management/version-interval/build.sbt @@ -1,23 +1,18 @@ libraryDependencies += "org.json4s" %% "json4s-native" % "[3.3.0,3.5.0)" - +scalaVersion := "2.12.17" lazy val actualVersionCheck = taskKey[Unit]("") - actualVersionCheck := { - val log = streams.value.log - val configReport = update.value .configuration(Compile) .getOrElse { sys.error("compile configuration not found in update report") } - val modules = configReport .modules .map(_.module) - assert(modules.nonEmpty) assert(modules.exists(_.name.startsWith("json4s-native"))) @@ -25,7 +20,6 @@ actualVersionCheck := { val v = m.revision v.contains("[") || v.contains("]") || v.contains("(") || v.contains(")") } - if (wrongModules.nonEmpty) { log.error("Found unexpected intervals in revisions") for (m <- wrongModules) diff --git a/sbt-app/src/sbt-test/dependency-management/version-interval/pending b/sbt-app/src/sbt-test/dependency-management/version-interval/test similarity index 100% rename from sbt-app/src/sbt-test/dependency-management/version-interval/pending rename to sbt-app/src/sbt-test/dependency-management/version-interval/test From 222410009b6c8bab08e30fe6fb468338eb3e929c Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 16:17:07 +0100 Subject: [PATCH 11/31] fix no-scala-tool --- main/src/main/scala/sbt/Defaults.scala | 3 ++- sbt-app/src/sbt-test/java/no-scala-tool/{pending => test} | 0 2 files changed, 2 insertions(+), 1 deletion(-) rename sbt-app/src/sbt-test/java/no-scala-tool/{pending => test} (100%) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index b8e999ccd..b52fca223 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -724,7 +724,8 @@ object Defaults extends BuildCommon { clean := clean.dependsOnTask(cleanIvy).value, scalaCompilerBridgeBinaryJar := Def.settingDyn { val sv = scalaVersion.value - if (ScalaArtifacts.isScala3(sv)) fetchBridgeBinaryJarTask(sv) + val managed = managedScalaInstance.value + if (ScalaArtifacts.isScala3(sv) && managed) fetchBridgeBinaryJarTask(sv) else Def.task[Option[File]](None) }.value, scalaCompilerBridgeSource := ZincLmUtil.getDefaultBridgeSourceModule(scalaVersion.value), diff --git a/sbt-app/src/sbt-test/java/no-scala-tool/pending b/sbt-app/src/sbt-test/java/no-scala-tool/test similarity index 100% rename from sbt-app/src/sbt-test/java/no-scala-tool/pending rename to sbt-app/src/sbt-test/java/no-scala-tool/test From 6e44d9df2ab6ab3dbcf9fed1c0668c2e701b5731 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 16:20:06 +0100 Subject: [PATCH 12/31] fix auto-plugins-default-requires-jvmplugin --- .../auto-plugins-default-requires-jvmplugin/build.sbt | 4 ++-- .../project/TestP.scala | 4 ++-- .../auto-plugins-default-requires-jvmplugin/{pending => test} | 0 3 files changed, 4 insertions(+), 4 deletions(-) rename sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/{pending => test} (100%) diff --git a/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/build.sbt b/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/build.sbt index a6062b24c..6d3062486 100644 --- a/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/build.sbt +++ b/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/build.sbt @@ -1,7 +1,7 @@ val test123 = project in file(".") enablePlugins TestP settings( Compile / resourceGenerators += Def.task { - streams.value.log info "resource generated in settings" - Nil + streams.value.log.info("resource generated in settings") + Seq.empty[File] } ) diff --git a/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/project/TestP.scala b/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/project/TestP.scala index 7b4cad897..a1c75e45b 100644 --- a/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/project/TestP.scala +++ b/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/project/TestP.scala @@ -3,8 +3,8 @@ import sbt._, Keys._ object TestP extends AutoPlugin { override def projectSettings: Seq[Setting[_]] = Seq( Compile / resourceGenerators += Def.task { - streams.value.log info "resource generated in plugin" - Nil + streams.value.log.info("resource generated in plugin") + Seq.empty[File] } ) } diff --git a/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/pending b/sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/test similarity index 100% rename from sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/pending rename to sbt-app/src/sbt-test/project1/auto-plugins-default-requires-jvmplugin/test From c88a341021e4f03ab17aa994de4b32fbcbf3d2ff Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 16:24:24 +0100 Subject: [PATCH 13/31] Fix binary-plugin --- .../sbt-test/project1/binary-plugin/changes/define/build.sbt | 3 +++ sbt-app/src/sbt-test/project1/binary-plugin/{pending => test} | 0 2 files changed, 3 insertions(+) rename sbt-app/src/sbt-test/project1/binary-plugin/{pending => test} (100%) diff --git a/sbt-app/src/sbt-test/project1/binary-plugin/changes/define/build.sbt b/sbt-app/src/sbt-test/project1/binary-plugin/changes/define/build.sbt index f8a8d32b8..74ac4143d 100644 --- a/sbt-app/src/sbt-test/project1/binary-plugin/changes/define/build.sbt +++ b/sbt-app/src/sbt-test/project1/binary-plugin/changes/define/build.sbt @@ -1,3 +1,6 @@ sbtPlugin := true name := "demo-plugin" + +// TODO fix doc task +Compile / doc / sources := Seq.empty diff --git a/sbt-app/src/sbt-test/project1/binary-plugin/pending b/sbt-app/src/sbt-test/project1/binary-plugin/test similarity index 100% rename from sbt-app/src/sbt-test/project1/binary-plugin/pending rename to sbt-app/src/sbt-test/project1/binary-plugin/test From a92b97503251c32de20deddf88dd71f4b38cfd3c Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 26 Mar 2024 16:31:13 +0100 Subject: [PATCH 14/31] fix derived --- sbt-app/src/sbt-test/project1/derived/{pending => test} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename sbt-app/src/sbt-test/project1/derived/{pending => test} (100%) diff --git a/sbt-app/src/sbt-test/project1/derived/pending b/sbt-app/src/sbt-test/project1/derived/test similarity index 100% rename from sbt-app/src/sbt-test/project1/derived/pending rename to sbt-app/src/sbt-test/project1/derived/test From 4a946a68cde88a92f5aa1620170a5ab7ff5aa2e3 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 10:37:44 +0100 Subject: [PATCH 15/31] Fix project1/extra --- .../java/sbt/internal/MetaBuildLoader.java | 60 +++++++++---------- sbt-app/src/sbt-test/project1/extra/build.sbt | 17 +++--- .../sbt-test/project1/extra/{pending => test} | 0 3 files changed, 36 insertions(+), 41 deletions(-) rename sbt-app/src/sbt-test/project1/extra/{pending => test} (100%) diff --git a/main/src/main/java/sbt/internal/MetaBuildLoader.java b/main/src/main/java/sbt/internal/MetaBuildLoader.java index 05cef0f02..f8ce8ceea 100644 --- a/main/src/main/java/sbt/internal/MetaBuildLoader.java +++ b/main/src/main/java/sbt/internal/MetaBuildLoader.java @@ -81,41 +81,27 @@ public final class MetaBuildLoader extends URLClassLoader { jnaJars); final Pattern pattern = Pattern.compile(fullPattern); final File[] cp = appProvider.mainClasspath(); - final URL[] interfaceURLs = new URL[3]; - final URL[] jlineURLs = new URL[7]; + final Set interfaceFiles = new LinkedHashSet<>(); + final Set jlineFiles = new LinkedHashSet<>(); final File[] extra = appProvider.id().classpathExtra() == null ? new File[0] : appProvider.id().classpathExtra(); final Set bottomClasspath = new LinkedHashSet<>(); - { - int interfaceIndex = 0; - int jlineIndex = 0; - for (final File file : cp) { - final String name = file.getName(); - if ((name.contains("test-interface") - || name.contains("compiler-interface") - || name.contains("util-interface")) - && pattern.matcher(name).find()) { - interfaceURLs[interfaceIndex] = file.toURI().toURL(); - interfaceIndex += 1; - } else if (pattern.matcher(name).find()) { - jlineURLs[jlineIndex] = file.toURI().toURL(); - jlineIndex += 1; - } else { - bottomClasspath.add(file); - } - } - for (final File file : extra) { + for (final File file : cp) { + final String name = file.getName(); + if ((name.contains("test-interface") + || name.contains("compiler-interface") + || name.contains("util-interface")) + && pattern.matcher(name).find()) { + interfaceFiles.add(file); + } else if (pattern.matcher(name).find()) { + jlineFiles.add(file); + } else { bottomClasspath.add(file); } } - final URL[] rest = new URL[bottomClasspath.size()]; - { - int i = 0; - for (final File file : bottomClasspath) { - rest[i] = file.toURI().toURL(); - i += 1; - } + for (final File file : extra) { + bottomClasspath.add(file); } final ScalaProvider scalaProvider = appProvider.scalaProvider(); ClassLoader topLoader = scalaProvider.launcher().topLoader(); @@ -148,8 +134,9 @@ public final class MetaBuildLoader extends URLClassLoader { } }; - final SbtInterfaceLoader interfaceLoader = new SbtInterfaceLoader(interfaceURLs, topLoader); - final JLineLoader jlineLoader = new JLineLoader(jlineURLs, interfaceLoader); + final SbtInterfaceLoader interfaceLoader = + new SbtInterfaceLoader(toURLArray(interfaceFiles), topLoader); + final JLineLoader jlineLoader = new JLineLoader(toURLArray(jlineFiles), interfaceLoader); final File[] siJars = scalaProvider.jars(); final URL[] lib = new URL[1]; int scalaRestCount = siJars.length - 1; @@ -175,6 +162,17 @@ public final class MetaBuildLoader extends URLClassLoader { assert lib[0] != null : "no scala-library.jar"; final ScalaLibraryClassLoader libraryLoader = new ScalaLibraryClassLoader(lib, jlineLoader); final FullScalaLoader fullScalaLoader = new FullScalaLoader(scalaRest, libraryLoader); - return new MetaBuildLoader(rest, fullScalaLoader, libraryLoader, interfaceLoader, jlineLoader); + return new MetaBuildLoader( + toURLArray(bottomClasspath), fullScalaLoader, libraryLoader, interfaceLoader, jlineLoader); + } + + private static URL[] toURLArray(Set files) throws java.net.MalformedURLException { + URL[] urls = new URL[files.size()]; + int i = 0; + for (final File file : files) { + urls[i] = file.toURI().toURL(); + i += 1; + } + return urls; } } diff --git a/sbt-app/src/sbt-test/project1/extra/build.sbt b/sbt-app/src/sbt-test/project1/extra/build.sbt index ecc1b85e9..6c6e89e57 100644 --- a/sbt-app/src/sbt-test/project1/extra/build.sbt +++ b/sbt-app/src/sbt-test/project1/extra/build.sbt @@ -24,22 +24,19 @@ def checkExtra = s } -def addExtra1(s: State, extra: Seq[File]): State = - { - val cs = s.configuration.provider.components() - val copied = cs.addToComponent("extra", extra.toArray) - if(copied) s.reload else s - } +def addExtra1(s: State, extra: Seq[File]): State = { + val cs = s.configuration.provider.components() + val copied = cs.addToComponent("extra", extra.toArray) + if(copied) s.reload else s +} def addExtra2(s: State, extra: Seq[File]): State = { val reload = State.defaultReload(s) val currentID = reload.app val currentExtra = currentID.classpathExtra val newExtra = (currentExtra ++ extra).distinct - if(newExtra.length == currentExtra.length) - s - else - { + if(newExtra.length == currentExtra.length) s + else { val newID = ApplicationID(currentID).copy(extra = extra) s.setNext(new State.Return(reload.copy(app = newID))) } diff --git a/sbt-app/src/sbt-test/project1/extra/pending b/sbt-app/src/sbt-test/project1/extra/test similarity index 100% rename from sbt-app/src/sbt-test/project1/extra/pending rename to sbt-app/src/sbt-test/project1/extra/test From d9b024cf917d9f0476adec7b8dc4d82cedfc16ea Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 14:01:04 +0100 Subject: [PATCH 16/31] fix project1/ignore-hidden-build-files --- main/src/main/scala/sbt/internal/Load.scala | 3 +-- .../project1/ignore-hidden-build-files/{pending => test} | 0 2 files changed, 1 insertion(+), 2 deletions(-) rename sbt-app/src/sbt-test/project1/ignore-hidden-build-files/{pending => test} (100%) diff --git a/main/src/main/scala/sbt/internal/Load.scala b/main/src/main/scala/sbt/internal/Load.scala index 2efafdfbd..d1f387409 100755 --- a/main/src/main/scala/sbt/internal/Load.scala +++ b/main/src/main/scala/sbt/internal/Load.scala @@ -1217,6 +1217,7 @@ private[sbt] object Load { // Default sbt files to read, if needed lazy val defaultSbtFiles = configurationSources(projectBase) + .filterNot(_.isHidden) .map(_.getAbsoluteFile().toPath) .map(converter.toVirtualFile) lazy val sbtFiles = defaultSbtFiles ++ extraSbtFiles @@ -1264,11 +1265,9 @@ private[sbt] object Load { // case sf: SbtFiles => // sf.files // .map(f => IO.resolve(projectBase, f)) - // .filterNot(_.isHidden) // .map(_.toPath) case sf: DefaultSbtFiles => sbtFiles.filter(sf.include) - // .filterNot(_.isHidden) // .map(_.toPath) case q: Sequence => q.sequence.foldLeft(Seq.empty[VirtualFile]) { (b, add) => diff --git a/sbt-app/src/sbt-test/project1/ignore-hidden-build-files/pending b/sbt-app/src/sbt-test/project1/ignore-hidden-build-files/test similarity index 100% rename from sbt-app/src/sbt-test/project1/ignore-hidden-build-files/pending rename to sbt-app/src/sbt-test/project1/ignore-hidden-build-files/test From 2139a6a203abe6e0b8ae7c2dcc8bc893e4f78bce Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 14:09:30 +0100 Subject: [PATCH 17/31] enable run/fork-loader --- .../src/sbt-test/run/fork-loader/CheckLoader.scala | 12 ++++++------ .../run/fork-loader/src/test/scala/TestForked.scala | 10 +++++----- .../src/sbt-test/run/fork-loader/{disabled => test} | 0 3 files changed, 11 insertions(+), 11 deletions(-) rename sbt-app/src/sbt-test/run/fork-loader/{disabled => test} (100%) diff --git a/sbt-app/src/sbt-test/run/fork-loader/CheckLoader.scala b/sbt-app/src/sbt-test/run/fork-loader/CheckLoader.scala index a90d11af2..926358332 100644 --- a/sbt-app/src/sbt-test/run/fork-loader/CheckLoader.scala +++ b/sbt-app/src/sbt-test/run/fork-loader/CheckLoader.scala @@ -1,8 +1,8 @@ object CheckLoader { - def main(args: Array[String]): Unit = apply() - def apply(): Unit = { - val loader = getClass.getClassLoader - val appLoader = ClassLoader.getSystemClassLoader - assert(loader eq appLoader, "Application classes not loaded in the system class loader") - } + def main(args: Array[String]): Unit = apply() + def apply(): Unit = { + val loader = getClass.getClassLoader + val appLoader = ClassLoader.getSystemClassLoader + assert(loader eq appLoader, "Application classes not loaded in the system class loader") + } } diff --git a/sbt-app/src/sbt-test/run/fork-loader/src/test/scala/TestForked.scala b/sbt-app/src/sbt-test/run/fork-loader/src/test/scala/TestForked.scala index cb1b191dc..4c785ce42 100644 --- a/sbt-app/src/sbt-test/run/fork-loader/src/test/scala/TestForked.scala +++ b/sbt-app/src/sbt-test/run/fork-loader/src/test/scala/TestForked.scala @@ -1,8 +1,8 @@ import org.scalacheck._ object TestForked extends Properties("Forked loader") { - property("Loaded from application loader") = Prop.secure { - CheckLoader() - true - } -} \ No newline at end of file + property("Loaded from application loader") = Prop.secure { + CheckLoader() + true + } +} diff --git a/sbt-app/src/sbt-test/run/fork-loader/disabled b/sbt-app/src/sbt-test/run/fork-loader/test similarity index 100% rename from sbt-app/src/sbt-test/run/fork-loader/disabled rename to sbt-app/src/sbt-test/run/fork-loader/test From 3c04787777e4426d083518c52f48471f8cb78488 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 14:11:48 +0100 Subject: [PATCH 18/31] fix run/non-local-main --- sbt-app/src/sbt-test/run/non-local-main/build.sbt | 2 ++ sbt-app/src/sbt-test/run/non-local-main/{pending => test} | 0 2 files changed, 2 insertions(+) rename sbt-app/src/sbt-test/run/non-local-main/{pending => test} (100%) diff --git a/sbt-app/src/sbt-test/run/non-local-main/build.sbt b/sbt-app/src/sbt-test/run/non-local-main/build.sbt index d7f046b6e..68cd3d83d 100644 --- a/sbt-app/src/sbt-test/run/non-local-main/build.sbt +++ b/sbt-app/src/sbt-test/run/non-local-main/build.sbt @@ -1,3 +1,5 @@ +ThisBuild / scalaVersion := "2.12.17" + lazy val main = project.settings( organization := "org.scala-sbt.testsuite.example", name := "has-main", diff --git a/sbt-app/src/sbt-test/run/non-local-main/pending b/sbt-app/src/sbt-test/run/non-local-main/test similarity index 100% rename from sbt-app/src/sbt-test/run/non-local-main/pending rename to sbt-app/src/sbt-test/run/non-local-main/test From debc5a39a7c823f3b386d89e246615ee4fe35601 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 14:15:03 +0100 Subject: [PATCH 19/31] enable run/spawn-exit --- sbt-app/src/sbt-test/run/spawn-exit/disabled | 1 - sbt-app/src/sbt-test/run/spawn-exit/test | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 sbt-app/src/sbt-test/run/spawn-exit/disabled create mode 100644 sbt-app/src/sbt-test/run/spawn-exit/test diff --git a/sbt-app/src/sbt-test/run/spawn-exit/disabled b/sbt-app/src/sbt-test/run/spawn-exit/disabled deleted file mode 100644 index 9e4c2371c..000000000 --- a/sbt-app/src/sbt-test/run/spawn-exit/disabled +++ /dev/null @@ -1 +0,0 @@ -> run \ No newline at end of file diff --git a/sbt-app/src/sbt-test/run/spawn-exit/test b/sbt-app/src/sbt-test/run/spawn-exit/test new file mode 100644 index 000000000..62ea636c1 --- /dev/null +++ b/sbt-app/src/sbt-test/run/spawn-exit/test @@ -0,0 +1 @@ +> run From ed12ebc5ccf225904067bfd0d6a033ef5c54fcf5 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 14:40:45 +0100 Subject: [PATCH 20/31] fix test/scala-instance-classloader --- .../tests/scala-instance-classloader/build.sbt | 15 ++++++++------- .../tests/scala-instance-classloader/pending | 1 - .../tests/scala-instance-classloader/test | 1 + 3 files changed, 9 insertions(+), 8 deletions(-) delete mode 100644 sbt-app/src/sbt-test/tests/scala-instance-classloader/pending create mode 100644 sbt-app/src/sbt-test/tests/scala-instance-classloader/test diff --git a/sbt-app/src/sbt-test/tests/scala-instance-classloader/build.sbt b/sbt-app/src/sbt-test/tests/scala-instance-classloader/build.sbt index 6dde312bb..ce5ebab94 100644 --- a/sbt-app/src/sbt-test/tests/scala-instance-classloader/build.sbt +++ b/sbt-app/src/sbt-test/tests/scala-instance-classloader/build.sbt @@ -11,19 +11,20 @@ lazy val root = (project in file(".")) libraryDependencies += { "org.scala-lang" % "scala-compiler" % scalaVersion.value % OtherScala.name }, - OtherScala / managedClasspath := Classpaths.managedJars(OtherScala, classpathTypes.value, update.value), + OtherScala / managedClasspath := + Classpaths.managedJars(OtherScala, classpathTypes.value, update.value, fileConverter.value), // Hack in the scala instance scalaInstance := { - val rawJars = (OtherScala / managedClasspath).value.map(_.data) + val converter = fileConverter.value + val rawJars = (OtherScala / managedClasspath).value.map(c => converter.toPath(c.data).toFile) val scalaHome = (target.value / "scala-home") - def removeVersion(name: String): String = - name.replaceAll("\\-2.12.11", "") + val sv = scalaVersion.value + def removeVersion(name: String): String = name.replaceAll(s"\\-$sv", "") for(jar <- rawJars) { - val tjar = scalaHome / s"lib/${removeVersion(jar.getName)}" - IO.copyFile(jar, tjar) + IO.copyFile(jar, scalaHome / s"lib" / removeVersion(jar.getName)) } - IO.listFiles(scalaHome).foreach(f => System.err.println(s" * $f}")) + IO.listFiles(scalaHome / "lib").foreach(f => System.err.println(s" * $f")) ScalaInstance(scalaHome, appConfiguration.value.provider.scalaProvider.launcher) }, diff --git a/sbt-app/src/sbt-test/tests/scala-instance-classloader/pending b/sbt-app/src/sbt-test/tests/scala-instance-classloader/pending deleted file mode 100644 index a270b7b48..000000000 --- a/sbt-app/src/sbt-test/tests/scala-instance-classloader/pending +++ /dev/null @@ -1 +0,0 @@ -> test \ No newline at end of file diff --git a/sbt-app/src/sbt-test/tests/scala-instance-classloader/test b/sbt-app/src/sbt-test/tests/scala-instance-classloader/test new file mode 100644 index 000000000..dfffb838b --- /dev/null +++ b/sbt-app/src/sbt-test/tests/scala-instance-classloader/test @@ -0,0 +1 @@ +> test From 129b38f55b541f3ca215edd660bc7f5c89284df9 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 15:05:29 +0100 Subject: [PATCH 21/31] Fix tests/serial --- main/src/main/scala/sbt/ProjectExtra.scala | 17 +++++++++-------- sbt-app/src/sbt-test/tests/serial/build.sbt | 12 +++--------- .../src/sbt-test/tests/serial/{pending => test} | 0 3 files changed, 12 insertions(+), 17 deletions(-) rename sbt-app/src/sbt-test/tests/serial/{pending => test} (100%) diff --git a/main/src/main/scala/sbt/ProjectExtra.scala b/main/src/main/scala/sbt/ProjectExtra.scala index b7c956adc..08f845816 100755 --- a/main/src/main/scala/sbt/ProjectExtra.scala +++ b/main/src/main/scala/sbt/ProjectExtra.scala @@ -56,6 +56,7 @@ import sjsonnew.JsonFormat import scala.annotation.targetName import scala.concurrent.{ Await, TimeoutException } import scala.concurrent.duration.* +import ClasspathDep.* /* sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject { @@ -637,13 +638,12 @@ trait ProjectExtra extends Scoped.Syntax: given classpathDependency[A](using Conversion[A, ProjectReference] ): Conversion[A, ClasspathDep[ProjectReference]] = - (a: A) => ClasspathDep.ClasspathDependency(a, None) + (a: A) => ClasspathDependency(a, None) extension (p: ProjectReference) - def %(conf: Configuration): ClasspathDep.ClasspathDependency = %(conf.name) + def %(conf: Configuration): ClasspathDependency = %(conf.name) @targetName("percentString") - def %(conf: String): ClasspathDep.ClasspathDependency = - ClasspathDep.ClasspathDependency(p, Some(conf)) + def %(conf: String): ClasspathDependency = ClasspathDependency(p, Some(conf)) extension [A1](in: Def.Initialize[Task[A1]]) def updateState(f: (State, A1) => State): Def.Initialize[Task[A1]] = @@ -712,12 +712,13 @@ trait ProjectExtra extends Scoped.Syntax: p: T )(implicit ev: T => ProjectReference): Constructor = new Constructor(p) - - implicit def classpathDependency[T]( - p: T - )(implicit ev: T => ProjectReference): ClasspathDependency = ClasspathDependency(p, None) */ + implicit def classpathDependency[T](p: T)(implicit + ev: T => ProjectReference + ): ClasspathDependency = + ClasspathDependency(ev(p), None) + // Duplicated with Structure // These used to be in Project so that they didn't need to get imported (due to Initialize being nested in Project). diff --git a/sbt-app/src/sbt-test/tests/serial/build.sbt b/sbt-app/src/sbt-test/tests/serial/build.sbt index 11e892129..5f999ecb6 100644 --- a/sbt-app/src/sbt-test/tests/serial/build.sbt +++ b/sbt-app/src/sbt-test/tests/serial/build.sbt @@ -10,20 +10,14 @@ val commonSettings = Seq( lazy val root = (project in file(".")) .aggregate(sub1, sub2) - .settings( - commonSettings - ) + .settings(commonSettings) lazy val rootRef = LocalProject("root") lazy val sub1 = project .dependsOn(rootRef) - .settings( - commonSettings - ) + .settings(commonSettings) lazy val sub2 = project .dependsOn(rootRef) - .settings( - commonSettings - ) + .settings(commonSettings) diff --git a/sbt-app/src/sbt-test/tests/serial/pending b/sbt-app/src/sbt-test/tests/serial/test similarity index 100% rename from sbt-app/src/sbt-test/tests/serial/pending rename to sbt-app/src/sbt-test/tests/serial/test From 52fd2b442773eca176b01f121ab0df4fef71629c Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 27 Mar 2024 16:18:16 +0100 Subject: [PATCH 22/31] mute unused nowarn --- project/HouseRulesPlugin.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/HouseRulesPlugin.scala b/project/HouseRulesPlugin.scala index 9657a6e5b..3314c25fd 100644 --- a/project/HouseRulesPlugin.scala +++ b/project/HouseRulesPlugin.scala @@ -32,7 +32,7 @@ object HouseRulesPlugin extends AutoPlugin { scalacOptions ++= "-Ywarn-unused-import".ifScala2x(v => 11 <= v && v <= 12).value.toList, scalacOptions ++= { scalaPartV.value match { - case Some((3, _)) => Seq("-Wunused:imports,implicits,nowarn") + case Some((3, _)) => Seq("-Wunused:imports,implicits") // ,nowarn case Some((2, _)) => Seq("-Ywarn-unused:-privates,-locals,-explicits") case _ => Seq.empty } From b0f3cb0a8e9beb520684c8ec516d772b081cbdf0 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Thu, 28 Mar 2024 11:59:55 +0100 Subject: [PATCH 23/31] Fix actions/completions When loading a scripted test, sbt creates a jar file and loads it. The path of the jar file is the same for all the batched tests. We must prevent the JDK from caching this jar file to force a reload after each test. Otherwise sbt tries to load the auto-plugins of a previous test and fails. --- .../java/sbt/internal/XMainConfiguration.java | 26 ++++++++++++++++++- main/src/main/scala/sbt/EvaluateTask.scala | 3 +-- .../scala/sbt/internal/PluginDiscovery.scala | 7 ++--- 3 files changed, 30 insertions(+), 6 deletions(-) diff --git a/main/src/main/java/sbt/internal/XMainConfiguration.java b/main/src/main/java/sbt/internal/XMainConfiguration.java index d6cf69af2..674d39008 100644 --- a/main/src/main/java/sbt/internal/XMainConfiguration.java +++ b/main/src/main/java/sbt/internal/XMainConfiguration.java @@ -8,10 +8,14 @@ package sbt.internal; import java.io.File; +import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.net.MalformedURLException; import java.net.URL; +import java.net.URLConnection; +import java.nio.file.Paths; import xsbti.*; /** @@ -25,6 +29,10 @@ public class XMainConfiguration { public xsbti.MainResult run(String moduleName, xsbti.AppConfiguration configuration) throws Throwable { try { + boolean isScripted = Boolean.parseBoolean(System.getProperty("sbt.scripted")); + // in batch scripted tests, we disable caching of JAR URL connections to avoid interference + // between tests + if (isScripted) disableCachingOfURLConnections(); ClassLoader topLoader = configuration.provider().scalaProvider().launcher().topLoader(); xsbti.AppConfiguration updatedConfiguration = null; try { @@ -56,7 +64,7 @@ public class XMainConfiguration { clw.getMethod("warmup").invoke(clw.getField("MODULE$").get(null)); return (xsbti.MainResult) runMethod.invoke(instance, updatedConfiguration); } catch (InvocationTargetException e) { - // This propogates xsbti.FullReload to the launcher + // This propagates xsbti.FullReload to the launcher throw e.getCause(); } } catch (ReflectiveOperationException e) { @@ -104,6 +112,22 @@ public class XMainConfiguration { } } + private class FakeURLConnection extends URLConnection { + public FakeURLConnection(URL url) { + super(url); + } + + public void connect() throws IOException {} + } + + private void disableCachingOfURLConnections() { + try { + URLConnection conn = new FakeURLConnection(Paths.get(".").toUri().toURL()); + conn.setDefaultUseCaches(false); + } catch (MalformedURLException e) { + } + } + /* * Replaces the AppProvider.loader method with a new loader that puts the sbt test interface * jar ahead of the rest of the sbt classpath in the classloading hierarchy. diff --git a/main/src/main/scala/sbt/EvaluateTask.scala b/main/src/main/scala/sbt/EvaluateTask.scala index 8c8f42a66..06e5b9105 100644 --- a/main/src/main/scala/sbt/EvaluateTask.scala +++ b/main/src/main/scala/sbt/EvaluateTask.scala @@ -337,10 +337,9 @@ object EvaluateTask { def evalPluginDef(pluginDef: BuildStructure, state: State): PluginData = { val root = ProjectRef(pluginDef.root, Load.getRootProject(pluginDef.units)(pluginDef.root)) - val pluginKey = pluginData val config = extractedTaskConfig(Project.extract(state), pluginDef, state) val evaluated = - apply(pluginDef, ScopedKey(pluginKey.scope, pluginKey.key), state, root, config) + apply(pluginDef, ScopedKey(pluginData.scope, pluginData.key), state, root, config) val (newS, result) = evaluated getOrElse sys.error( "Plugin data does not exist for plugin definition at " + pluginDef.root ) diff --git a/main/src/main/scala/sbt/internal/PluginDiscovery.scala b/main/src/main/scala/sbt/internal/PluginDiscovery.scala index 79f3aba11..35afdd3fb 100644 --- a/main/src/main/scala/sbt/internal/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/internal/PluginDiscovery.scala @@ -136,9 +136,10 @@ object PluginDiscovery: .getResources(resourceName) .asScala .toSeq - .filter(onClasspath(classpath, converter)) flatMap { u => - IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty) - } + .filter(onClasspath(classpath, converter)) + .flatMap { u => + IO.readLinesURL(u).map(_.trim).filter(!_.isEmpty) + } /** Returns `true` if `url` is an entry in `classpath`. */ def onClasspath(classpath: Def.Classpath, converter: FileConverter)(url: URL): Boolean = From 1c03463e190f7e4e89c11df97a421b160260e543 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Thu, 4 Apr 2024 12:28:52 +0200 Subject: [PATCH 24/31] Fix test-quick --- main/src/main/scala/sbt/Defaults.scala | 43 ++++++++----------- main/src/main/scala/sbt/RemoteCache.scala | 28 +++++------- .../src/sbt-test/tests/test-quick/build.sbt | 2 + .../tests/test-quick/{pending => test} | 2 +- 4 files changed, 33 insertions(+), 42 deletions(-) rename sbt-app/src/sbt-test/tests/test-quick/{pending => test} (98%) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index b52fca223..1caf725f4 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -1427,40 +1427,35 @@ object Defaults extends BuildCommon { Def.task { val cp = (test / fullClasspath).value val s = (test / streams).value - val ans: Seq[Analysis] = cp + val analyses: Seq[Analysis] = cp .flatMap(_.metadata.get(Keys.analysis)) .map: str => RemoteCache.getCachedAnalysis(str).asInstanceOf[Analysis] val succeeded = TestStatus.read(succeededFile(s.cacheDirectory)) val stamps = collection.mutable.Map.empty[String, Long] - def stamp(dep: String): Long = { - val stamps = for (a <- ans) yield intlStamp(dep, a, Set.empty) - if (stamps.isEmpty) Long.MinValue - else stamps.max - } - def intlStamp(c: String, analysis: Analysis, s: Set[String]): Long = { - if (s contains c) Long.MinValue + def stamp(dep: String): Option[Long] = + analyses.flatMap(internalStamp(dep, _, Set.empty)).maxOption + def internalStamp(c: String, analysis: Analysis, alreadySeen: Set[String]): Option[Long] = { + if (alreadySeen.contains(c)) None else - stamps.getOrElse( - c, { - val x = { - import analysis.{ apis, relations } - relations.internalClassDeps(c).map(intlStamp(_, analysis, s + c)) ++ - relations.externalDeps(c).map(stamp) ++ - relations.productClassName.reverse(c).flatMap { pc => - apis.internal.get(pc).map(_.compilationTimestamp) - } + Long.MinValue - }.max - if (x != Long.MinValue) { - stamps(c) = x - } - x + def computeAndStoreStamp: Option[Long] = { + import analysis.{ apis, relations } + val internalDeps = relations + .internalClassDeps(c) + .flatMap(internalStamp(_, analysis, alreadySeen + c)) + val externalDeps = relations.externalDeps(c).flatMap(stamp) + val classStamps = relations.productClassName.reverse(c).flatMap { pc => + apis.internal.get(pc).map(_.compilationTimestamp) } - ) + val maxStamp = (internalDeps ++ externalDeps ++ classStamps).maxOption + maxStamp.foreach(maxStamp => stamps(c) = maxStamp) + maxStamp + } + stamps.get(c).orElse(computeAndStoreStamp) } def noSuccessYet(test: String) = succeeded.get(test) match { case None => true - case Some(ts) => stamps.synchronized(stamp(test)) > ts + case Some(ts) => stamps.synchronized(stamp(test)).exists(_ > ts) } args => for (filter <- selectedFilter(args)) diff --git a/main/src/main/scala/sbt/RemoteCache.scala b/main/src/main/scala/sbt/RemoteCache.scala index 91801f920..a4c593a89 100644 --- a/main/src/main/scala/sbt/RemoteCache.scala +++ b/main/src/main/scala/sbt/RemoteCache.scala @@ -22,16 +22,10 @@ import sbt.ProjectExtra.* import sbt.ScopeFilter.Make._ import sbt.SlashSyntax0._ import sbt.coursierint.LMCoursier -import sbt.internal.inc.{ - CompileOutput, - FileAnalysisStore, - HashUtil, - JarUtils, - MappedFileConverter -} +import sbt.internal.inc.{ CompileOutput, HashUtil, JarUtils, MappedFileConverter } import sbt.internal.librarymanagement._ import sbt.internal.remotecache._ -import sbt.internal.inc.Analysis +import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler } import sbt.io.IO import sbt.io.syntax._ import sbt.librarymanagement._ @@ -50,7 +44,7 @@ import sbt.util.{ } import sjsonnew.JsonFormat import xsbti.{ HashedVirtualFileRef, VirtualFileRef } -import xsbti.compile.{ AnalysisContents, CompileAnalysis, MiniSetup, MiniOptions } +import xsbti.compile.{ AnalysisContents, AnalysisStore, CompileAnalysis, MiniSetup, MiniOptions } import scala.collection.mutable @@ -83,13 +77,10 @@ object RemoteCache { private[sbt] def getCachedAnalysis(ref: HashedVirtualFileRef): CompileAnalysis = analysisStore.getOrElseUpdate( ref, { - val vfs = cacheStore.getBlobs(ref :: Nil) - if vfs.nonEmpty then - val outputDirectory = Def.cacheConfiguration.outputDirectory - cacheStore.syncBlobs(vfs, outputDirectory).headOption match - case Some(file) => FileAnalysisStore.binary(file.toFile()).get.get.getAnalysis - case None => Analysis.empty - else Analysis.empty + val outputDirectory = Def.cacheConfiguration.outputDirectory + cacheStore.syncBlobs(ref :: Nil, outputDirectory).headOption match + case Some(file) => analysisStore(file).get.get.getAnalysis + case None => Analysis.empty } ) @@ -106,7 +97,7 @@ object RemoteCache { false, Array() ) - FileAnalysisStore.binary(file).set(AnalysisContents.create(analysis, setup)) + analysisStore(file.toPath).set(AnalysisContents.create(analysis, setup)) val vf = tempConverter.toVirtualFile(file.toPath) val refs = cacheStore.putBlobs(vf :: Nil) refs.headOption match @@ -115,6 +106,9 @@ object RemoteCache { Some(ref) case None => None + private def analysisStore(file: Path): AnalysisStore = + MixedAnalyzingCompiler.staticCachedStore(file, true) + private[sbt] def artifactToStr(art: Artifact): String = { import LibraryManagementCodec._ import sjsonnew.support.scalajson.unsafe._ diff --git a/sbt-app/src/sbt-test/tests/test-quick/build.sbt b/sbt-app/src/sbt-test/tests/test-quick/build.sbt index 9fd396d27..9153720ef 100644 --- a/sbt-app/src/sbt-test/tests/test-quick/build.sbt +++ b/sbt-app/src/sbt-test/tests/test-quick/build.sbt @@ -1,3 +1,5 @@ +Global / cacheStores := Seq.empty + val scalatest = "org.scalatest" %% "scalatest" % "3.0.5" ThisBuild / scalaVersion := "2.12.12" diff --git a/sbt-app/src/sbt-test/tests/test-quick/pending b/sbt-app/src/sbt-test/tests/test-quick/test similarity index 98% rename from sbt-app/src/sbt-test/tests/test-quick/pending rename to sbt-app/src/sbt-test/tests/test-quick/test index b3afce6e3..c86f0276f 100644 --- a/sbt-app/src/sbt-test/tests/test-quick/pending +++ b/sbt-app/src/sbt-test/tests/test-quick/test @@ -9,7 +9,7 @@ $ copy-file changed/A.scala src/main/scala/A.scala > compile $ sleep 2000 # Create is run. Delete is not since it doesn't have src/main dependency. -> testQuick +-> testQuick > testOnly Delete # Previous run of Create failed, re-run. > testQuick Create From 85fcb014f0ed34b822ae0841fee506b064cbafe1 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Thu, 4 Apr 2024 15:16:10 +0200 Subject: [PATCH 25/31] Fix data race to remote cache on classpath analyses The attributed classpath should point to the existing analysis file in the target folder to avoid data racing to the remote cache. --- main-settings/src/main/scala/sbt/Def.scala | 2 +- main/src/main/scala/sbt/Defaults.scala | 82 +++++++++---------- main/src/main/scala/sbt/RemoteCache.scala | 58 ++----------- .../main/scala/sbt/internal/BuildDef.scala | 12 +-- .../scala/sbt/internal/ClasspathImpl.scala | 69 +++++++--------- .../scala/sbt/internal/PluginDiscovery.scala | 2 +- .../sbt/internal/server/Definition.scala | 2 +- .../scala/sbt/util/ActionCacheStore.scala | 2 +- 8 files changed, 83 insertions(+), 146 deletions(-) diff --git a/main-settings/src/main/scala/sbt/Def.scala b/main-settings/src/main/scala/sbt/Def.scala index 739c2f7f7..89b6c9083 100644 --- a/main-settings/src/main/scala/sbt/Def.scala +++ b/main-settings/src/main/scala/sbt/Def.scala @@ -229,7 +229,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits: import language.experimental.macros - // These are here, as opposed to RemoteCahe, since we need them from TaskMacro etc + // These are here, as opposed to RemoteCache, since we need them from TaskMacro etc private[sbt] var _cacheStore: ActionCacheStore = InMemoryActionCacheStore() def cacheStore: ActionCacheStore = _cacheStore private[sbt] var _outputDirectory: Option[Path] = None diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index 1caf725f4..ee10744d2 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -102,6 +102,7 @@ import sbt.SlashSyntax0._ import sbt.internal.inc.{ Analysis, AnalyzingCompiler, + FileAnalysisStore, ManagedLoggedReporter, MixedAnalyzingCompiler, ScalaInstance @@ -143,22 +144,17 @@ object Defaults extends BuildCommon { def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = LibraryManagement.lock(app) - def extractAnalysis[A1](a: Attributed[A1]): (A1, CompileAnalysis) = - ( - a.data, - a.metadata.get(Keys.analysis) match - case Some(ref) => RemoteCache.getCachedAnalysis(ref) - case None => Analysis.Empty - ) - - def analysisMap[T](cp: Seq[Attributed[T]]): T => Option[CompileAnalysis] = { - val m = (for { - a <- cp - ref <- a.metadata.get(Keys.analysis) - an = RemoteCache.getCachedAnalysis(ref) - } yield (a.data, an)).toMap - m.get(_) - } + private[sbt] def extractAnalysis( + metadata: StringAttributeMap, + converter: FileConverter + ): Option[CompileAnalysis] = + def asBinary(file: File) = FileAnalysisStore.binary(file).get.asScala + def asText(file: File) = FileAnalysisStore.text(file).get.asScala + for + ref <- metadata.get(Keys.analysis) + file = converter.toPath(VirtualFileRef.of(ref)).toFile + content <- asBinary(file).orElse(asText(file)) + yield content.getAnalysis private[sbt] def globalDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] = Def.defaultSettings(inScope(GlobalScope)(ss)) @@ -1427,10 +1423,10 @@ object Defaults extends BuildCommon { Def.task { val cp = (test / fullClasspath).value val s = (test / streams).value - val analyses: Seq[Analysis] = cp - .flatMap(_.metadata.get(Keys.analysis)) - .map: str => - RemoteCache.getCachedAnalysis(str).asInstanceOf[Analysis] + val converter = fileConverter.value + val analyses = cp + .flatMap(a => extractAnalysis(a.metadata, converter)) + .collect { case analysis: Analysis => analysis } val succeeded = TestStatus.read(succeededFile(s.cacheDirectory)) val stamps = collection.mutable.Map.empty[String, Long] def stamp(dep: String): Option[Long] = @@ -2396,7 +2392,7 @@ object Defaults extends BuildCommon { val _ = compileIncremental.value val exportP = exportPipelining.value // Save analysis midway if pipelining is enabled - val store = analysisStore + val store = analysisStore(compileAnalysisFile) val contents = store.unsafeGet() if (exportP) { // this stores the eary analysis (again) in case the subproject contains a macro @@ -2421,7 +2417,7 @@ object Defaults extends BuildCommon { .debug(s"${name.value}: compileEarly: blocking on earlyOutputPing") earlyOutputPing.await.value }) { - val store = earlyAnalysisStore + val store = analysisStore(earlyCompileAnalysisFile) store.get.toOption match { case Some(contents) => contents.getAnalysis case _ => Analysis.empty @@ -2433,7 +2429,7 @@ object Defaults extends BuildCommon { def compileTask: Initialize[Task[CompileAnalysis]] = Def.task { val setup: Setup = compileIncSetup.value - val store = analysisStore + val store = analysisStore(compileAnalysisFile) val c = fileConverter.value // TODO - expose bytecode manipulation phase. val analysisResult: CompileResult = manipulateBytecode.value @@ -2456,7 +2452,7 @@ object Defaults extends BuildCommon { val bspTask = (compile / bspCompileTask).value val result = cachedCompileIncrementalTask.result.value val reporter = (compile / bspReporter).value - val store = analysisStore + val store = analysisStore(compileAnalysisFile) val ci = (compile / compileInputs).value result match case Result.Value(res) => @@ -2489,7 +2485,7 @@ object Defaults extends BuildCommon { val ci2 = (compile / compileInputs2).value val ping = (TaskZero / earlyOutputPing).value val setup: Setup = (TaskZero / compileIncSetup).value - val store = analysisStore + val store = analysisStore(compileAnalysisFile) val c = fileConverter.value // TODO - Should readAnalysis + saveAnalysis be scoped by the compile task too? val analysisResult = Retry(compileIncrementalTaskImpl(bspTask, s, ci, ping)) @@ -2570,17 +2566,22 @@ object Defaults extends BuildCommon { def compileIncSetupTask = Def.task { val cp = dependencyPicklePath.value + val converter = fileConverter.value + val cachedAnalysisMap: Map[VirtualFile, CompileAnalysis] = ( + for + attributed <- cp + analysis <- extractAnalysis(attributed.metadata, converter) + yield (converter.toVirtualFile(attributed.data), analysis) + ).toMap + val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass = + Keys.classpathEntryDefinesClassVF.value val lookup = new PerClasspathEntryLookup: - private val cachedAnalysisMap: VirtualFile => Option[CompileAnalysis] = - analysisMap(cp) - private val cachedPerEntryDefinesClassLookup: VirtualFile => DefinesClass = - Keys.classpathEntryDefinesClassVF.value override def analysis(classpathEntry: VirtualFile): Optional[CompileAnalysis] = - cachedAnalysisMap(classpathEntry).toOptional + cachedAnalysisMap.get(classpathEntry).toOptional override def definesClass(classpathEntry: VirtualFile): DefinesClass = cachedPerEntryDefinesClassLookup(classpathEntry) val extra = extraIncOptions.value.map(t2) - val store = earlyAnalysisStore + val store = analysisStore(earlyCompileAnalysisFile) val eaOpt = if exportPipelining.value then Some(store) else None Setup.of( lookup, @@ -2685,7 +2686,7 @@ object Defaults extends BuildCommon { def compileAnalysisSettings: Seq[Setting[_]] = Seq( previousCompile := { val setup = compileIncSetup.value - val store = analysisStore + val store = analysisStore(compileAnalysisFile) val prev = store.get().toOption match { case Some(contents) => val analysis = Option(contents.getAnalysis).toOptional @@ -2697,17 +2698,11 @@ object Defaults extends BuildCommon { } ) - private inline def analysisStore: AnalysisStore = { - val setup = compileIncSetup.value - val useBinary = enableBinaryCompileAnalysis.value - MixedAnalyzingCompiler.staticCachedStore(setup.cacheFile.toPath, !useBinary) - } - - private inline def earlyAnalysisStore: AnalysisStore = { - val earlyAnalysisPath = earlyCompileAnalysisFile.value.toPath - val useBinary = enableBinaryCompileAnalysis.value - MixedAnalyzingCompiler.staticCachedStore(earlyAnalysisPath, !useBinary) - } + private inline def analysisStore(inline analysisFile: TaskKey[File]): AnalysisStore = + MixedAnalyzingCompiler.staticCachedStore( + analysisFile.value.toPath, + !enableBinaryCompileAnalysis.value + ) def printWarningsTask: Initialize[Task[Unit]] = Def.task { @@ -4232,7 +4227,6 @@ object Classpaths { new RawRepository(resolver, resolver.getName) } - def analyzed[T](data: T, analysis: CompileAnalysis) = ClasspathImpl.analyzed[T](data, analysis) def makeProducts: Initialize[Task[Seq[File]]] = Def.task { val c = fileConverter.value Def.unit(copyResources.value) diff --git a/main/src/main/scala/sbt/RemoteCache.scala b/main/src/main/scala/sbt/RemoteCache.scala index a4c593a89..f2581004a 100644 --- a/main/src/main/scala/sbt/RemoteCache.scala +++ b/main/src/main/scala/sbt/RemoteCache.scala @@ -22,10 +22,9 @@ import sbt.ProjectExtra.* import sbt.ScopeFilter.Make._ import sbt.SlashSyntax0._ import sbt.coursierint.LMCoursier -import sbt.internal.inc.{ CompileOutput, HashUtil, JarUtils, MappedFileConverter } +import sbt.internal.inc.{ HashUtil, JarUtils } import sbt.internal.librarymanagement._ import sbt.internal.remotecache._ -import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler } import sbt.io.IO import sbt.io.syntax._ import sbt.librarymanagement._ @@ -35,16 +34,10 @@ import sbt.nio.FileStamp import sbt.nio.Keys.{ inputFileStamps, outputFileStamps } import sbt.std.TaskExtra._ import sbt.util.InterfaceUtil.toOption -import sbt.util.{ - ActionCacheStore, - AggregateActionCacheStore, - CacheImplicits, - DiskActionCacheStore, - Logger -} +import sbt.util.{ ActionCacheStore, AggregateActionCacheStore, DiskActionCacheStore, Logger } import sjsonnew.JsonFormat import xsbti.{ HashedVirtualFileRef, VirtualFileRef } -import xsbti.compile.{ AnalysisContents, AnalysisStore, CompileAnalysis, MiniSetup, MiniOptions } +import xsbti.compile.CompileAnalysis import scala.collection.mutable @@ -72,43 +65,6 @@ object RemoteCache { val tempDiskCache = (s.baseDir / "target" / "bootcache").toPath() Def._cacheStore = DiskActionCacheStore(tempDiskCache) - private[sbt] def getCachedAnalysis(ref: String): CompileAnalysis = - getCachedAnalysis(CacheImplicits.strToHashedVirtualFileRef(ref)) - private[sbt] def getCachedAnalysis(ref: HashedVirtualFileRef): CompileAnalysis = - analysisStore.getOrElseUpdate( - ref, { - val outputDirectory = Def.cacheConfiguration.outputDirectory - cacheStore.syncBlobs(ref :: Nil, outputDirectory).headOption match - case Some(file) => analysisStore(file).get.get.getAnalysis - case None => Analysis.empty - } - ) - - private[sbt] val tempConverter: MappedFileConverter = MappedFileConverter.empty - private[sbt] def postAnalysis(analysis: CompileAnalysis): Option[HashedVirtualFileRef] = - IO.withTemporaryFile("analysis", ".tmp", true): file => - val output = CompileOutput.empty - val option = MiniOptions.of(Array(), Array(), Array()) - val setup = MiniSetup.of( - output, - option, - "", - xsbti.compile.CompileOrder.Mixed, - false, - Array() - ) - analysisStore(file.toPath).set(AnalysisContents.create(analysis, setup)) - val vf = tempConverter.toVirtualFile(file.toPath) - val refs = cacheStore.putBlobs(vf :: Nil) - refs.headOption match - case Some(ref) => - analysisStore(ref) = analysis - Some(ref) - case None => None - - private def analysisStore(file: Path): AnalysisStore = - MixedAnalyzingCompiler.staticCachedStore(file, true) - private[sbt] def artifactToStr(art: Artifact): String = { import LibraryManagementCodec._ import sjsonnew.support.scalajson.unsafe._ @@ -556,10 +512,10 @@ object RemoteCache { key: SettingKey[A], pkgTasks: Seq[TaskKey[HashedVirtualFileRef]] ): Def.Initialize[Seq[A]] = - (Classpaths.forallIn(key, pkgTasks) zipWith - Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks))(_ zip _ collect { case (a, true) => - a - }) + Classpaths + .forallIn(key, pkgTasks) + .zipWith(Classpaths.forallIn(pushRemoteCacheArtifact, pkgTasks)) + .apply(_.zip(_).collect { case (a, true) => a }) private def extractHash(inputs: Seq[(Path, FileStamp)]): Vector[String] = inputs.toVector map { case (_, stamp0) => diff --git a/main/src/main/scala/sbt/internal/BuildDef.scala b/main/src/main/scala/sbt/internal/BuildDef.scala index 785db3e30..6085b780e 100644 --- a/main/src/main/scala/sbt/internal/BuildDef.scala +++ b/main/src/main/scala/sbt/internal/BuildDef.scala @@ -15,6 +15,7 @@ import Def.Setting import sbt.io.Hash import sbt.internal.util.Attributed import sbt.internal.inc.ReflectUtilities +import xsbti.FileConverter trait BuildDef { def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects @@ -72,10 +73,9 @@ private[sbt] object BuildDef { autoGeneratedProject := true ) - def analyzed(in: Seq[Attributed[_]]): Seq[xsbti.compile.CompileAnalysis] = - in.flatMap: a => - a.metadata - .get(Keys.analysis) - .map: str => - RemoteCache.getCachedAnalysis(str) + def analyzed( + in: Seq[Attributed[_]], + converter: FileConverter + ): Seq[xsbti.compile.CompileAnalysis] = + in.flatMap(a => Defaults.extractAnalysis(a.metadata, converter)) } diff --git a/main/src/main/scala/sbt/internal/ClasspathImpl.scala b/main/src/main/scala/sbt/internal/ClasspathImpl.scala index 7e2f81dd9..ff1aabcdf 100644 --- a/main/src/main/scala/sbt/internal/ClasspathImpl.scala +++ b/main/src/main/scala/sbt/internal/ClasspathImpl.scala @@ -15,15 +15,13 @@ import sbt.Keys._ import sbt.nio.Keys._ import sbt.nio.file.{ Glob, RecursiveGlob } import sbt.Def.Initialize -import sbt.internal.inc.Analysis -import sbt.internal.inc.JavaInterfaceUtil._ import sbt.internal.util.{ Attributed, Dag, Settings } import sbt.librarymanagement.{ Configuration, TrackLevel } import sbt.librarymanagement.Configurations.names import sbt.std.TaskExtra._ import sbt.util._ import scala.jdk.CollectionConverters.* -import xsbti.{ HashedVirtualFileRef, VirtualFileRef } +import xsbti.{ HashedVirtualFileRef, VirtualFile, VirtualFileRef } import xsbti.compile.CompileAnalysis private[sbt] object ClasspathImpl { @@ -38,10 +36,13 @@ private[sbt] object ClasspathImpl { val config = configuration.value val products = pickleProducts.value val analysis = compileEarly.value - val xs = products map { _ -> analysis } + val converter = fileConverter.value + val analysisFile = converter.toVirtualFile(earlyCompileAnalysisFile.value.toPath) + + val xs = products.map(_ -> analysis) for (f, analysis) <- xs yield APIMappings - .store(analyzed(f, analysis), apiURL.value) + .store(analyzed(f, analysisFile), apiURL.value) .put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module)) .put(Keys.configurationStr, config.name) else exportedProducts.value @@ -55,7 +56,7 @@ private[sbt] object ClasspathImpl { val config = configuration.value for (f, analysis) <- trackedExportedProductsImplTask(track).value yield APIMappings - .store(analyzed[HashedVirtualFileRef](f, analysis), apiURL.value) + .store(analyzed(f, analysis), apiURL.value) .put(Keys.artifactStr, RemoteCache.artifactToStr(art)) .put(Keys.moduleIDStr, Classpaths.moduleIdJsonKeyFormat.write(module)) .put(Keys.configurationStr, config.name) @@ -67,7 +68,6 @@ private[sbt] object ClasspathImpl { val art = (packageBin / artifact).value val module = projectID.value val config = configuration.value - val converter = fileConverter.value for (f, analysis) <- trackedJarProductsImplTask(track).value yield APIMappings .store(analyzed(f, analysis), apiURL.value) @@ -78,7 +78,7 @@ private[sbt] object ClasspathImpl { private[this] def trackedExportedProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] = Def.taskIf { if { val _ = (packageBin / dynamicDependency).value @@ -89,44 +89,38 @@ private[sbt] object ClasspathImpl { private[this] def trackedNonJarProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] = - (Def + ): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] = + Def .task { val dirs = productDirectories.value val view = fileTreeView.value (TrackLevel.intersection(track, exportToInternal.value), dirs, view) - }) + } .flatMapTask { case (TrackLevel.TrackAlways, _, _) => Def.task { val converter = fileConverter.value - val a = compile.value - products.value - .map { x => converter.toVirtualFile(x.toPath()) } - .map { (_, a) } + val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath) + products.value.map(x => (converter.toVirtualFile(x.toPath()), analysisFile)) } case (TrackLevel.TrackIfMissing, dirs, view) if view.list(dirs.map(Glob(_, RecursiveGlob / "*.class"))).isEmpty => Def.task { val converter = fileConverter.value - val a = compile.value - products.value - .map { x => converter.toVirtualFile(x.toPath()) } - .map { (_, a) } + val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath) + products.value.map(x => (converter.toVirtualFile(x.toPath()), analysisFile)) } case (_, dirs, _) => Def.task { val converter = fileConverter.value - val analysis = previousCompile.value.analysis.toOption.getOrElse(Analysis.empty) - dirs - .map { x => converter.toVirtualFile(x.toPath()) } - .map(_ -> analysis) + val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath) + dirs.map { x => (converter.toVirtualFile(x.toPath()), analysisFile) } } } private[this] def trackedJarProductsImplTask( track: TrackLevel - ): Initialize[Task[Seq[(HashedVirtualFileRef, CompileAnalysis)]]] = + ): Initialize[Task[Seq[(HashedVirtualFileRef, VirtualFile)]]] = (Def .task { val converter = fileConverter.value @@ -137,23 +131,21 @@ private[sbt] object ClasspathImpl { .flatMapTask { case (TrackLevel.TrackAlways, _, _) => Def.task { - Seq((packageBin.value, compile.value)) + val converter = fileConverter.value + val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath) + Seq((packageBin.value, analysisFile)) } case (TrackLevel.TrackIfMissing, _, jar) if !jar.toFile().exists => Def.task { - Seq((packageBin.value, compile.value)) + val converter = fileConverter.value + val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath) + Seq((packageBin.value, analysisFile)) } case (_, vf, _) => Def.task { val converter = fileConverter.value - val analysisOpt = previousCompile.value.analysis.toOption - Seq(vf).map(converter.toPath).map(converter.toVirtualFile).map { x => - ( - x, - if (analysisOpt.isDefined) analysisOpt.get - else Analysis.empty - ) - } + val analysisFile = converter.toVirtualFile(compileAnalysisFile.value.toPath) + Seq(vf).map { x => (converter.toVirtualFile(x), analysisFile) } } } @@ -351,13 +343,8 @@ private[sbt] object ClasspathImpl { (tasks.toSeq.join).map(_.flatten.distinct) } - def analyzed[A](data: A, analysis: CompileAnalysis) = - RemoteCache.postAnalysis(analysis) match - case Some(ref) => - Attributed - .blank(data) - .put(Keys.analysis, CacheImplicits.hashedVirtualFileRefToStr(ref)) - case None => Attributed.blank(data) + def analyzed[A](data: A, analysisFile: VirtualFile): Attributed[A] = + Attributed.blank(data).put(Keys.analysis, analysisFile.id) def interSort( projectRef: ProjectRef, diff --git a/main/src/main/scala/sbt/internal/PluginDiscovery.scala b/main/src/main/scala/sbt/internal/PluginDiscovery.scala index 35afdd3fb..4bbae320b 100644 --- a/main/src/main/scala/sbt/internal/PluginDiscovery.scala +++ b/main/src/main/scala/sbt/internal/PluginDiscovery.scala @@ -105,7 +105,7 @@ object PluginDiscovery: ): Seq[String] = ( binaryModuleNames(classpath, converter, loader, resourceName) ++ - (analyzed(classpath) flatMap (a => sourceModuleNames(a, subclasses: _*))) + analyzed(classpath, converter).flatMap(a => sourceModuleNames(a, subclasses: _*)) ).distinct /** Discovers top-level modules in `analysis` that inherit from any of `subclasses`. */ diff --git a/main/src/main/scala/sbt/internal/server/Definition.scala b/main/src/main/scala/sbt/internal/server/Definition.scala index 6514027fa..4b92e29b5 100644 --- a/main/src/main/scala/sbt/internal/server/Definition.scala +++ b/main/src/main/scala/sbt/internal/server/Definition.scala @@ -199,7 +199,7 @@ private[sbt] object Definition { } def collectAnalysesTask = Def.task { - val cacheFile: String = compileIncSetup.value.cacheFile.getAbsolutePath + val cacheFile: String = compileAnalysisFile.value.getAbsolutePath val useBinary = enableBinaryCompileAnalysis.value val s = state.value s.log.debug(s"analysis location ${cacheFile -> useBinary}") diff --git a/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala b/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala index c1bf2ef23..adaf2804b 100644 --- a/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala +++ b/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala @@ -13,7 +13,7 @@ import sbt.io.syntax.* import xsbti.{ HashedVirtualFileRef, PathBasedFile, VirtualFile } /** - * An abstration of a remote or local cache store. + * An abstraction of a remote or local cache store. */ trait ActionCacheStore: /** From e1cf43c6bd15b059e20d58d14b7b2e5c31f03dac Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Mon, 8 Apr 2024 15:53:12 +0200 Subject: [PATCH 26/31] Add jar file to compileOutputs --- main/src/main/scala/sbt/Defaults.scala | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index ee10744d2..f28d95b57 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -899,11 +899,9 @@ object Defaults extends BuildCommon { compileOutputs := { import scala.jdk.CollectionConverters.* val c = fileConverter.value - val classFiles = - manipulateBytecode.value.analysis.readStamps.getAllProductStamps.keySet.asScala - (classFiles.toSeq map { x => - c.toPath(x) - }) :+ compileAnalysisFile.value.toPath + val (_, jarFile) = compileIncremental.value + val classFiles = compile.value.readStamps.getAllProductStamps.keySet.asScala + classFiles.toSeq.map(c.toPath) :+ compileAnalysisFile.value.toPath :+ c.toPath(jarFile) }, compileOutputs := compileOutputs.triggeredBy(compile).value, tastyFiles := Def.taskIf { From ab1aa6d001a7bcaaa0da7ff9b0516689871fb2cb Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Mon, 8 Apr 2024 16:27:35 +0200 Subject: [PATCH 27/31] Fix todos in actions/compile-clean --- .../src/main/scala/sbt/std/KeyMacro.scala | 86 +++++++----------- main/src/main/scala/sbt/ScopedKeyData.scala | 1 + main/src/main/scala/sbt/internal/Clean.scala | 6 ++ main/src/main/scala/sbt/nio/FileStamp.scala | 91 ++++++++----------- main/src/main/scala/sbt/nio/Settings.scala | 84 ++++++++++------- .../src/sbt-test/actions/compile-clean/test | 11 +-- .../scala/sbt/internal/util/Attributes.scala | 2 + 7 files changed, 135 insertions(+), 146 deletions(-) diff --git a/main-settings/src/main/scala/sbt/std/KeyMacro.scala b/main-settings/src/main/scala/sbt/std/KeyMacro.scala index 2d93c047e..500ce4c25 100644 --- a/main-settings/src/main/scala/sbt/std/KeyMacro.scala +++ b/main-settings/src/main/scala/sbt/std/KeyMacro.scala @@ -12,70 +12,54 @@ import java.io.File import scala.quoted.* import scala.reflect.ClassTag -import sbt.util.OptJsonWriter +import sbt.util.{ NoJsonWriter, OptJsonWriter } +import sbt.internal.util.{ AttributeKey, KeyTag } private[sbt] object KeyMacro: - def settingKeyImpl[A1: Type]( - description: Expr[String] - )(using qctx: Quotes): Expr[SettingKey[A1]] = - keyImpl2[A1, SettingKey[A1]]("settingKey") { (name, mf, ojw) => - val n = Expr(name) - '{ - SettingKey[A1]($n, $description)($mf, $ojw) - } - } + def settingKeyImpl[A1: Type](description: Expr[String])(using Quotes): Expr[SettingKey[A1]] = + val name = definingValName(errorMsg("settingKey")) + val tag = '{ KeyTag.Setting[A1](${ summonRuntimeClass[A1] }) } + val ojw = Expr + .summon[OptJsonWriter[A1]] + .getOrElse(errorAndAbort(s"OptJsonWriter[A] not found for ${Type.show[A1]}")) + '{ SettingKey(AttributeKey($name, $description, Int.MaxValue)(using $tag, $ojw)) } - def taskKeyImpl[A1: Type](description: Expr[String])(using qctx: Quotes): Expr[TaskKey[A1]] = - keyImpl[A1, TaskKey[A1]]("taskKey") { (name, mf) => - val n = Expr(name) - '{ - TaskKey[A1]($n, $description)($mf) - } - } + def taskKeyImpl[A1: Type](description: Expr[String])(using Quotes): Expr[TaskKey[A1]] = + val name = definingValName(errorMsg("taskKey")) + val tag: Expr[KeyTag[Task[A1]]] = Type.of[A1] match + case '[Seq[a]] => + '{ KeyTag.SeqTask(${ summonRuntimeClass[a] }) } + case _ => '{ KeyTag.Task(${ summonRuntimeClass[A1] }) } + '{ TaskKey(AttributeKey($name, $description, Int.MaxValue)(using $tag, NoJsonWriter())) } - def inputKeyImpl[A1: Type](description: Expr[String])(using qctx: Quotes): Expr[InputKey[A1]] = - keyImpl[A1, InputKey[A1]]("inputKey") { (name, mf) => - val n = Expr(name) - '{ - InputKey[A1]($n, $description)($mf) - } - } + def inputKeyImpl[A1: Type](description: Expr[String])(using Quotes): Expr[InputKey[A1]] = + val name = definingValName(errorMsg("inputTaskKey")) + val tag: Expr[KeyTag[InputTask[A1]]] = '{ KeyTag.InputTask(${ summonRuntimeClass[A1] }) } + '{ InputKey(AttributeKey($name, $description, Int.MaxValue)(using $tag, NoJsonWriter())) } - private def keyImpl[A1: Type, A2: Type](methodName: String)( - f: (String, Expr[ClassTag[A1]]) => Expr[A2] - )(using qctx: Quotes): Expr[A2] = - val tpe = summon[Type[A1]] - f( - definingValName(errorMsg(methodName)), - Expr.summon[ClassTag[A1]].getOrElse(sys.error("ClassTag[A] not found for $tpe")) - ) + def projectImpl(using Quotes): Expr[Project] = + val name = definingValName(errorMsg2) + '{ Project($name, new File($name)) } - private def keyImpl2[A1: Type, A2: Type](methodName: String)( - f: (String, Expr[ClassTag[A1]], Expr[OptJsonWriter[A1]]) => Expr[A2] - )(using qctx: Quotes): Expr[A2] = - val tpe = summon[Type[A1]] - f( - definingValName(errorMsg(methodName)), - Expr.summon[ClassTag[A1]].getOrElse(sys.error("ClassTag[A] not found for $tpe")), - Expr.summon[OptJsonWriter[A1]].getOrElse(sys.error("OptJsonWriter[A] not found for $tpe")), - ) + private def summonRuntimeClass[A: Type](using Quotes): Expr[Class[?]] = + val classTag = Expr + .summon[ClassTag[A]] + .getOrElse(errorAndAbort(s"ClassTag[${Type.show[A]}] not found")) + '{ $classTag.runtimeClass } - def projectImpl(using qctx: Quotes): Expr[Project] = - val name = Expr(definingValName(errorMsg2("project"))) - '{ - Project($name, new File($name)) - } + private def errorAndAbort(msg: String)(using q: Quotes): Nothing = + q.reflect.report.errorAndAbort(msg) private def errorMsg(methodName: String): String = s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`.""" - private def errorMsg2(methodName: String): String = - s"""$methodName must be directly assigned to a val, such as `val x = ($methodName in file("core"))`.""" + private def errorMsg2: String = + """project must be directly assigned to a val, such as `val x = project.in(file("core"))`.""" - private def definingValName(errorMsg: String)(using qctx: Quotes): String = + private def definingValName(errorMsg: String)(using qctx: Quotes): Expr[String] = val term = enclosingTerm - if term.isValDef then term.name - else sys.error(errorMsg) + if term.isValDef then Expr(term.name) + else errorAndAbort(errorMsg) def enclosingTerm(using qctx: Quotes) = import qctx.reflect._ diff --git a/main/src/main/scala/sbt/ScopedKeyData.scala b/main/src/main/scala/sbt/ScopedKeyData.scala index 76bafd558..c17701c9d 100644 --- a/main/src/main/scala/sbt/ScopedKeyData.scala +++ b/main/src/main/scala/sbt/ScopedKeyData.scala @@ -21,6 +21,7 @@ final case class ScopedKeyData[A](scoped: ScopedKey[A], value: Any) { def description: String = key.tag match case KeyTag.Task(typeArg) => s"Task: $typeArg" + case KeyTag.SeqTask(typeArg) => s"Task: Seq[$typeArg]" case KeyTag.InputTask(typeArg) => s"Input task: $typeArg" case KeyTag.Setting(typeArg) => s"Setting: $typeArg = $value" } diff --git a/main/src/main/scala/sbt/internal/Clean.scala b/main/src/main/scala/sbt/internal/Clean.scala index 15febbddb..e766a678a 100644 --- a/main/src/main/scala/sbt/internal/Clean.scala +++ b/main/src/main/scala/sbt/internal/Clean.scala @@ -24,6 +24,7 @@ import sbt.nio.file.Glob.{ GlobOps } import sbt.util.Level import sjsonnew.JsonFormat import scala.annotation.nowarn +import xsbti.{ PathBasedFile, VirtualFileRef } private[sbt] object Clean { @@ -142,8 +143,13 @@ private[sbt] object Clean { private[sbt] object ToSeqPath: given identitySeqPath: ToSeqPath[Seq[Path]] = identity[Seq[Path]](_) given seqFile: ToSeqPath[Seq[File]] = _.map(_.toPath) + given virtualFileRefSeq: ToSeqPath[Seq[VirtualFileRef]] = + _.collect { case f: PathBasedFile => f.toPath } given path: ToSeqPath[Path] = _ :: Nil given file: ToSeqPath[File] = _.toPath :: Nil + given virtualFileRef: ToSeqPath[VirtualFileRef] = + case f: PathBasedFile => Seq(f.toPath) + case _ => Nil end ToSeqPath private[this] implicit class ToSeqPathOps[T](val t: T) extends AnyVal { diff --git a/main/src/main/scala/sbt/nio/FileStamp.scala b/main/src/main/scala/sbt/nio/FileStamp.scala index 36d78d9c2..3146f92f5 100644 --- a/main/src/main/scala/sbt/nio/FileStamp.scala +++ b/main/src/main/scala/sbt/nio/FileStamp.scala @@ -16,6 +16,7 @@ import sbt.io.IO import sbt.nio.file.FileAttributes import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError } import xsbti.compile.analysis.{ Stamp => XStamp } +import xsbti.VirtualFileRef /** * A trait that indicates what file stamping implementation should be used to track the state of @@ -102,65 +103,49 @@ object FileStamp { private[sbt] final case class Error(exception: IOException) extends FileStamp object Formats { - implicit val seqPathJsonFormatter: JsonFormat[Seq[Path]] = new JsonFormat[Seq[Path]] { - override def write[J](obj: Seq[Path], builder: Builder[J]): Unit = { - builder.beginArray() - obj.foreach { path => - builder.writeString(path.toString) + implicit val seqPathJsonFormatter: JsonFormat[Seq[Path]] = + asStringArray(_.toString, Paths.get(_)) + implicit val seqFileJsonFormatter: JsonFormat[Seq[File]] = + asStringArray(_.toString, new File(_)) + implicit val seqVirtualFileRefJsonFormatter: JsonFormat[Seq[VirtualFileRef]] = + asStringArray(_.id, VirtualFileRef.of) + + implicit val fileJsonFormatter: JsonFormat[File] = fromSeqJsonFormat[File] + implicit val pathJsonFormatter: JsonFormat[Path] = fromSeqJsonFormat[Path] + implicit val virtualFileRefJsonFormatter: JsonFormat[VirtualFileRef] = + fromSeqJsonFormat[VirtualFileRef] + + private def asStringArray[T](toStr: T => String, fromStr: String => T): JsonFormat[Seq[T]] = + new JsonFormat[Seq[T]] { + override def write[J](obj: Seq[T], builder: Builder[J]): Unit = { + builder.beginArray() + obj.foreach { x => builder.writeString(toStr(x)) } + builder.endArray() } - builder.endArray() + + override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[T] = + jsOpt match { + case Some(js) => + val size = unbuilder.beginArray(js) + val res = (1 to size) map { _ => + fromStr(unbuilder.readString(unbuilder.nextElement)) + } + unbuilder.endArray() + res + case None => + deserializationError("Expected JsArray but found None") + } } - override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[Path] = - jsOpt match { - case Some(js) => - val size = unbuilder.beginArray(js) - val res = (1 to size) map { _ => - Paths.get(unbuilder.readString(unbuilder.nextElement)) - } - unbuilder.endArray() - res - case None => - deserializationError("Expected JsArray but found None") - } - } + private def fromSeqJsonFormat[T](using seqJsonFormat: JsonFormat[Seq[T]]): JsonFormat[T] = + new JsonFormat[T] { + override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): T = + seqJsonFormat.read(jsOpt, unbuilder).head - implicit val seqFileJsonFormatter: JsonFormat[Seq[File]] = new JsonFormat[Seq[File]] { - override def write[J](obj: Seq[File], builder: Builder[J]): Unit = { - builder.beginArray() - obj.foreach { file => - builder.writeString(file.toString) - } - builder.endArray() + override def write[J](obj: T, builder: Builder[J]): Unit = + seqJsonFormat.write(obj :: Nil, builder) } - override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Seq[File] = - jsOpt match { - case Some(js) => - val size = unbuilder.beginArray(js) - val res = (1 to size) map { _ => - new File(unbuilder.readString(unbuilder.nextElement)) - } - unbuilder.endArray() - res - case None => - deserializationError("Expected JsArray but found None") - } - } - implicit val fileJsonFormatter: JsonFormat[File] = new JsonFormat[File] { - override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): File = - seqFileJsonFormatter.read(jsOpt, unbuilder).head - - override def write[J](obj: File, builder: Builder[J]): Unit = - seqFileJsonFormatter.write(obj :: Nil, builder) - } - implicit val pathJsonFormatter: JsonFormat[Path] = new JsonFormat[Path] { - override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Path = - seqPathJsonFormatter.read(jsOpt, unbuilder).head - - override def write[J](obj: Path, builder: Builder[J]): Unit = - seqPathJsonFormatter.write(obj :: Nil, builder) - } implicit val seqPathFileStampJsonFormatter: JsonFormat[Seq[(Path, FileStamp)]] = new JsonFormat[Seq[(Path, FileStamp)]] { override def write[J](obj: Seq[(Path, FileStamp)], builder: Builder[J]): Unit = { diff --git a/main/src/main/scala/sbt/nio/Settings.scala b/main/src/main/scala/sbt/nio/Settings.scala index c39a4b909..f2e7488d6 100644 --- a/main/src/main/scala/sbt/nio/Settings.scala +++ b/main/src/main/scala/sbt/nio/Settings.scala @@ -25,6 +25,8 @@ import sjsonnew.JsonFormat import scala.annotation.nowarn import scala.collection.immutable.VectorBuilder +import java.io.File +import xsbti.VirtualFileRef private[sbt] object Settings { private[sbt] def inject(transformed: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = { @@ -68,45 +70,55 @@ private[sbt] object Settings { setting: Def.Setting[_], fileOutputScopes: Set[Scope] ): List[Def.Setting[_]] = { - val attributeKey = setting.key.key - attributeKey.tag match { + setting.key.key.tag match { case tag: KeyTag.Task[?] => - def default: List[Def.Setting[_]] = { - val scope = setting.key.scope.copy(task = Select(attributeKey)) - if (fileOutputScopes.contains(scope)) { - val sk = setting.asInstanceOf[Def.Setting[Task[Any]]].key - val scopedKey = Keys.dynamicFileOutputs in (sk.scope in sk.key) - addTaskDefinition { - val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ => Nil)) - Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos) - } :: allOutputPathsImpl(scope) :: outputFileStampsImpl(scope) :: cleanImpl(scope) :: Nil - } else Nil - } - def mkSetting[T: JsonFormat: ToSeqPath]: List[Def.Setting[_]] = { - val sk = setting.asInstanceOf[Def.Setting[Task[T]]].key - val taskKey = TaskKey(sk.key) in sk.scope - // We create a previous reference so that clean automatically works without the - // user having to explicitly call previous anywhere. - val init = Previous.runtime(taskKey).zip(taskKey) { case (_, t) => - t.map(implicitly[ToSeqPath[T]].apply) - } - val key = Def.ScopedKey(taskKey.scope in taskKey.key, Keys.dynamicFileOutputs.key) - addTaskDefinition(Def.setting[Task[Seq[Path]]](key, init, setting.pos)) :: - outputsAndStamps(taskKey) - } - if seqClass.isAssignableFrom(tag.typeArg) then - // TODO fix this: maybe using the taskKey macro to convey the information - // t.typeArguments match { - // case p :: Nil if pathClass.isAssignableFrom(p.runtimeClass) => mkSetting[Seq[Path]] - // case _ => default - // } - default - else if pathClass.isAssignableFrom(tag.typeArg) then mkSetting[Path] - else default + if pathClass.isAssignableFrom(tag.typeArg) then addOutputAndStampTasks[Path](setting) + else if fileClass.isAssignableFrom(tag.typeArg) then addOutputAndStampTasks[File](setting) + else if virtualFileRefClass.isAssignableFrom(tag.typeArg) then + addOutputAndStampTasks[VirtualFileRef](setting) + else addDefaultTasks(setting, fileOutputScopes) + case tag: KeyTag.SeqTask[?] => + if pathClass.isAssignableFrom(tag.typeArg) then addOutputAndStampTasks[Seq[Path]](setting) + else if fileClass.isAssignableFrom(tag.typeArg) then + addOutputAndStampTasks[Seq[File]](setting) + else if virtualFileRefClass.isAssignableFrom(tag.typeArg) then + addOutputAndStampTasks[Seq[VirtualFileRef]](setting) + else addDefaultTasks(setting, fileOutputScopes) case _ => Nil } } + @nowarn + private def addDefaultTasks( + setting: Def.Setting[_], + fileOutputScopes: Set[Scope] + ): List[Def.Setting[_]] = { + val scope = setting.key.scope.copy(task = Select(setting.key.key)) + if (fileOutputScopes.contains(scope)) { + val sk = setting.asInstanceOf[Def.Setting[Task[Any]]].key + val scopedKey = Keys.dynamicFileOutputs in (sk.scope in sk.key) + val init: Def.Initialize[Task[Seq[Path]]] = sk(_.map(_ => Nil)) + addTaskDefinition(Def.setting[Task[Seq[Path]]](scopedKey, init, setting.pos)) :: + allOutputPathsImpl(scope) :: outputFileStampsImpl(scope) :: cleanImpl(scope) :: Nil + } else Nil + } + + @nowarn + private def addOutputAndStampTasks[T: JsonFormat: ToSeqPath]( + setting: Def.Setting[_] + ): List[Def.Setting[_]] = { + val sk = setting.asInstanceOf[Def.Setting[Task[T]]].key + val taskKey = TaskKey(sk.key) in sk.scope + // We create a previous reference so that clean automatically works without the + // user having to explicitly call previous anywhere. + val init = Previous.runtime(taskKey).zip(taskKey) { case (_, t) => + t.map(implicitly[ToSeqPath[T]].apply) + } + val key = Def.ScopedKey(taskKey.scope in taskKey.key, Keys.dynamicFileOutputs.key) + addTaskDefinition(Def.setting[Task[Seq[Path]]](key, init, setting.pos)) :: + outputsAndStamps(taskKey) + } + private[sbt] val inject: Def.ScopedKey[_] => Seq[Def.Setting[_]] = scopedKey => scopedKey.key match { case transitiveDynamicInputs.key => @@ -161,7 +173,9 @@ private[sbt] object Settings { } private[this] val seqClass = classOf[Seq[_]] - private[this] val pathClass = classOf[java.nio.file.Path] + private[this] val pathClass = classOf[Path] + private val fileClass = classOf[File] + private val virtualFileRefClass = classOf[VirtualFileRef] /** * Returns all of the paths for the regular files described by a glob. Directories and hidden diff --git a/sbt-app/src/sbt-test/actions/compile-clean/test b/sbt-app/src/sbt-test/actions/compile-clean/test index a1289b6b1..14bce965a 100644 --- a/sbt-app/src/sbt-test/actions/compile-clean/test +++ b/sbt-app/src/sbt-test/actions/compile-clean/test @@ -6,17 +6,14 @@ $ exists target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class > Test/clean $ exists target/cant-touch-this -# TODO it should clean only test classes -# $ exists target/out/jvm/scala-2.12.17/compile-clean/classes/A.class -# $ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/A.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class $ absent target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class # compiling everything again, but now cleaning only compile classes > Test/products > Compile/clean $ exists target/cant-touch-this -# TODO it should clean only compile classes $ absent target/out/jvm/scala-2.12.17/compile-clean/classes/A.class -# $ exists target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class -# TODO and X has to be kept, because of the cleanKeepFiles override -# $ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class diff --git a/util-collection/src/main/scala/sbt/internal/util/Attributes.scala b/util-collection/src/main/scala/sbt/internal/util/Attributes.scala index 48b0f0567..eaf34b8f1 100644 --- a/util-collection/src/main/scala/sbt/internal/util/Attributes.scala +++ b/util-collection/src/main/scala/sbt/internal/util/Attributes.scala @@ -15,11 +15,13 @@ import sjsonnew.* enum KeyTag[A]: case Setting[A](typeArg: Class[?]) extends KeyTag[A] case Task[A](typeArg: Class[?]) extends KeyTag[A] + case SeqTask[A](typeArg: Class[?]) extends KeyTag[A] case InputTask[A](typeArg: Class[?]) extends KeyTag[A] override def toString: String = this match case Setting(typeArg) => typeArg.toString case Task(typeArg) => s"Task[$typeArg]" + case SeqTask(typeArg) => s"Task[Seq[$typeArg]]" case InputTask(typeArg) => s"InputTask[$typeArg]" def typeArg: Class[?] From 8870cb6a823c6d4e5fba8f99f61171291da4e536 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 9 Apr 2024 09:53:32 +0200 Subject: [PATCH 28/31] Fix Java 8 compat --- server-test/src/test/scala/testpkg/BuildServerTest.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server-test/src/test/scala/testpkg/BuildServerTest.scala b/server-test/src/test/scala/testpkg/BuildServerTest.scala index 68f86e30c..3a55469f5 100644 --- a/server-test/src/test/scala/testpkg/BuildServerTest.scala +++ b/server-test/src/test/scala/testpkg/BuildServerTest.scala @@ -305,12 +305,12 @@ class BuildServerTest extends AbstractServerTest { test("workspace/reload: send diagnostic and respond with error") { // write an other-build.sbt file that does not compile val otherBuildFile = svr.baseDirectory.toPath.resolve("other-build.sbt") - Files.writeString( + Files.write( otherBuildFile, """|val someSettings = Seq( | scalacOptions ++= "-deprecation" |) - |""".stripMargin + |""".stripMargin.getBytes ) // reload reloadWorkspace(id = 52) @@ -331,12 +331,12 @@ class BuildServerTest extends AbstractServerTest { } ) // fix the other-build.sbt file and reload again - Files.writeString( + Files.write( otherBuildFile, """|val someSettings = Seq( | scalacOptions += "-deprecation" |) - |""".stripMargin + |""".stripMargin.getBytes ) reloadWorkspace(id = 52) // assert received an empty diagnostic From 9e6612a3f87203ddff591787f73a36f349b188bf Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 9 Apr 2024 10:59:48 +0200 Subject: [PATCH 29/31] Fix hashing of incrementalCompile --- main/src/main/scala/sbt/Keys.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index c9c044a67..61ee89863 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -254,6 +254,7 @@ object Keys { val compileAnalysisFilename = taskKey[String]("Defines the filename used for compileAnalysisFile.").withRank(DTask) val compileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting) val earlyCompileAnalysisTargetRoot = settingKey[File]("The output directory to produce Zinc Analysis files").withRank(DSetting) + @cacheLevel(include = Array.empty) val compileAnalysisFile = taskKey[File]("Zinc analysis storage.").withRank(DSetting) val earlyCompileAnalysisFile = taskKey[File]("Zinc analysis storage for early compilation").withRank(DSetting) From eda67a05fcfabdf5b1eefe2e159f1e2175f5d22f Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 9 Apr 2024 13:42:06 +0200 Subject: [PATCH 30/31] Use fileConverter in cacheStore Otherwise the store cannot sync files that are not in the out folder. --- main/src/main/scala/sbt/Defaults.scala | 64 ++++++++++--------- main/src/main/scala/sbt/RemoteCache.scala | 30 +++++---- .../main/scala/sbt/plugins/IvyPlugin.scala | 2 - .../scala/sbt/util/ActionCacheStore.scala | 13 ++-- .../test/scala/sbt/util/ActionCacheTest.scala | 34 ++++++---- 5 files changed, 81 insertions(+), 62 deletions(-) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index f28d95b57..f41c7fda5 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -88,8 +88,6 @@ import sbt.util.CacheImplicits.given import sbt.util.InterfaceUtil.{ t2, toJavaFunction => f1 } import sbt.util._ import sjsonnew._ -import xsbti.compile.TastyFiles -import xsbti.{ FileConverter, Position } import scala.annotation.nowarn import scala.collection.immutable.ListMap @@ -108,7 +106,16 @@ import sbt.internal.inc.{ ScalaInstance } import sbt.internal.io.Retry -import xsbti.{ CompileFailed, CrossValue, HashedVirtualFileRef, VirtualFile, VirtualFileRef } +import xsbti.{ + AppConfiguration, + CompileFailed, + CrossValue, + FileConverter, + HashedVirtualFileRef, + Position, + VirtualFile, + VirtualFileRef +} import xsbti.compile.{ AnalysisContents, AnalysisStore, @@ -129,6 +136,7 @@ import xsbti.compile.{ PerClasspathEntryLookup, PreviousResult, Setup, + TastyFiles, TransactionalManagerType } @@ -232,8 +240,28 @@ object Defaults extends BuildCommon { closeClassLoaders :== SysProp.closeClassLoaders, allowZombieClassLoaders :== true, packageTimestamp :== Pkg.defaultTimestamp, + rootPaths := { + val app = appConfiguration.value + val coursierCache = csrCacheDirectory.value.toPath + val out = rootOutputDirectory.value + getRootPaths(out, app) + ("CSR_CACHE" -> coursierCache) + }, + fileConverter := MappedFileConverter(rootPaths.value, allowMachinePath.value) ) ++ BuildServerProtocol.globalSettings + private[sbt] def getRootPaths(out: NioPath, app: AppConfiguration): ListMap[String, NioPath] = + val base = app.baseDirectory.getCanonicalFile.toPath + val boot = app.provider.scalaProvider.launcher.bootDirectory.toPath + val ih = app.provider.scalaProvider.launcher.ivyHome.toPath + val javaHome = Paths.get(sys.props("java.home")) + ListMap( + "OUT" -> out, + "BASE" -> base, + "SBT_BOOT" -> boot, + "IVY_HOME" -> ih, + "JAVA_HOME" -> javaHome + ) + private[sbt] lazy val globalIvyCore: Seq[Setting[_]] = Seq( internalConfigurationMap :== Configurations.internalMap _, @@ -282,6 +310,10 @@ object Defaults extends BuildCommon { csrLogger := LMCoursier.coursierLoggerTask.value, csrMavenProfiles :== Set.empty, csrReconciliations :== LMCoursier.relaxedForAllModules, + csrCacheDirectory := { + if (useCoursier.value) LMCoursier.defaultCacheLocation + else Classpaths.dummyCoursierDirectory(appConfiguration.value) + } ) /** Core non-plugin settings for sbt builds. These *must* be on every build or the sbt engine will fail to run at all. */ @@ -410,24 +442,6 @@ object Defaults extends BuildCommon { private[sbt] lazy val buildLevelJvmSettings: Seq[Setting[_]] = Seq( exportPipelining := usePipelining.value, - rootPaths := { - val app = appConfiguration.value - val base = app.baseDirectory.getCanonicalFile - val boot = app.provider.scalaProvider.launcher.bootDirectory - val ih = app.provider.scalaProvider.launcher.ivyHome - val coursierCache = csrCacheDirectory.value - val javaHome = Paths.get(sys.props("java.home")) - val out = rootOutputDirectory.value - ListMap( - "OUT" -> out, - "BASE" -> base.toPath, - "SBT_BOOT" -> boot.toPath, - "CSR_CACHE" -> coursierCache.toPath, - "IVY_HOME" -> ih.toPath, - "JAVA_HOME" -> javaHome, - ) - }, - fileConverter := MappedFileConverter(rootPaths.value, allowMachinePath.value), sourcePositionMappers := Nil, // Never set a default sourcePositionMapper, see #6352! Whatever you are trying to solve, do it in the foldMappers method. // The virtual file value cache needs to be global or sbt will run out of direct byte buffer memory. classpathDefinesClassCache := VirtualFileValueCache.definesClassCache(fileConverter.value), @@ -526,14 +540,6 @@ object Defaults extends BuildCommon { .getOrElse(pos) } - // csrCacheDirectory is scoped to ThisBuild to allow customization. - private[sbt] lazy val buildLevelIvySettings: Seq[Setting[_]] = Seq( - csrCacheDirectory := { - if (useCoursier.value) LMCoursier.defaultCacheLocation - else Classpaths.dummyCoursierDirectory(appConfiguration.value) - }, - ) - def defaultTestTasks(key: Scoped): Seq[Setting[_]] = inTask(key)( Seq( diff --git a/main/src/main/scala/sbt/RemoteCache.scala b/main/src/main/scala/sbt/RemoteCache.scala index f2581004a..31c526c37 100644 --- a/main/src/main/scala/sbt/RemoteCache.scala +++ b/main/src/main/scala/sbt/RemoteCache.scala @@ -22,7 +22,7 @@ import sbt.ProjectExtra.* import sbt.ScopeFilter.Make._ import sbt.SlashSyntax0._ import sbt.coursierint.LMCoursier -import sbt.internal.inc.{ HashUtil, JarUtils } +import sbt.internal.inc.{ MappedFileConverter, HashUtil, JarUtils } import sbt.internal.librarymanagement._ import sbt.internal.remotecache._ import sbt.io.IO @@ -36,7 +36,7 @@ import sbt.std.TaskExtra._ import sbt.util.InterfaceUtil.toOption import sbt.util.{ ActionCacheStore, AggregateActionCacheStore, DiskActionCacheStore, Logger } import sjsonnew.JsonFormat -import xsbti.{ HashedVirtualFileRef, VirtualFileRef } +import xsbti.{ FileConverter, HashedVirtualFileRef, VirtualFileRef } import xsbti.compile.CompileAnalysis import scala.collection.mutable @@ -46,8 +46,6 @@ object RemoteCache { final val cachedTestClassifier = "cached-test" final val commitLength = 10 - def cacheStore: ActionCacheStore = Def.cacheStore - // TODO: cap with caffeine private[sbt] val analysisStore: mutable.Map[HashedVirtualFileRef, CompileAnalysis] = mutable.Map.empty @@ -56,14 +54,22 @@ object RemoteCache { // currently this is called twice so metabuild can call compile with a minimal setting private[sbt] def initializeRemoteCache(s: State): Unit = val outDir = - s.get(BasicKeys.rootOutputDirectory).getOrElse((s.baseDir / "target" / "out").toPath()) + s.get(BasicKeys.rootOutputDirectory).getOrElse((s.baseDir / "target" / "out").toPath) Def._outputDirectory = Some(outDir) - val caches = s.get(BasicKeys.cacheStores) - caches match - case Some(xs) if xs.nonEmpty => Def._cacheStore = AggregateActionCacheStore(xs) - case _ => - val tempDiskCache = (s.baseDir / "target" / "bootcache").toPath() - Def._cacheStore = DiskActionCacheStore(tempDiskCache) + def defaultCache = + val fileConverter = s + .get(Keys.fileConverter.key) + .getOrElse { + MappedFileConverter( + Defaults.getRootPaths(outDir, s.configuration), + allowMachinePath = true + ) + } + DiskActionCacheStore((s.baseDir / "target" / "bootcache").toPath, fileConverter) + Def._cacheStore = s + .get(BasicKeys.cacheStores) + .collect { case xs if xs.nonEmpty => AggregateActionCacheStore(xs) } + .getOrElse(defaultCache) private[sbt] def artifactToStr(art: Artifact): String = { import LibraryManagementCodec._ @@ -104,7 +110,7 @@ object RemoteCache { }, cacheStores := { List( - DiskActionCacheStore(localCacheDirectory.value.toPath()) + DiskActionCacheStore(localCacheDirectory.value.toPath(), fileConverter.value) ) }, ) diff --git a/main/src/main/scala/sbt/plugins/IvyPlugin.scala b/main/src/main/scala/sbt/plugins/IvyPlugin.scala index 97a7a1371..a459e1fa3 100644 --- a/main/src/main/scala/sbt/plugins/IvyPlugin.scala +++ b/main/src/main/scala/sbt/plugins/IvyPlugin.scala @@ -28,8 +28,6 @@ object IvyPlugin extends AutoPlugin { override lazy val globalSettings: Seq[Setting[_]] = Defaults.globalIvyCore - override lazy val buildSettings: Seq[Setting[_]] = - Defaults.buildLevelIvySettings override lazy val projectSettings: Seq[Setting[_]] = Classpaths.ivyPublishSettings ++ Classpaths.ivyBaseSettings diff --git a/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala b/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala index adaf2804b..1952cbc62 100644 --- a/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala +++ b/util-cache/src/main/scala/sbt/util/ActionCacheStore.scala @@ -10,7 +10,7 @@ import scala.reflect.ClassTag import scala.util.control.NonFatal import sbt.io.IO import sbt.io.syntax.* -import xsbti.{ HashedVirtualFileRef, PathBasedFile, VirtualFile } +import xsbti.{ FileConverter, HashedVirtualFileRef, PathBasedFile, VirtualFile } /** * An abstraction of a remote or local cache store. @@ -129,7 +129,7 @@ class InMemoryActionCacheStore extends ActionCacheStore: underlying.toString() end InMemoryActionCacheStore -class DiskActionCacheStore(base: Path) extends ActionCacheStore: +class DiskActionCacheStore(base: Path, fileConverter: FileConverter) extends ActionCacheStore: lazy val casBase: Path = { val dir = base.resolve("cas") IO.createDirectory(dir.toFile) @@ -181,13 +181,10 @@ class DiskActionCacheStore(base: Path) extends ActionCacheStore: else None override def syncBlobs(refs: Seq[HashedVirtualFileRef], outputDirectory: Path): Seq[Path] = - refs.flatMap: r => - val casFile = casBase.toFile / Digest(r.contentHashStr).toString + refs.flatMap: ref => + val casFile = casBase.toFile / Digest(ref.contentHashStr).toString if casFile.exists then - val shortPath = - if r.id.startsWith("${OUT}/") then r.id.drop(7) - else r.id - val outPath = outputDirectory.resolve(shortPath) + val outPath = fileConverter.toPath(ref) Files.createDirectories(outPath.getParent()) if outPath.toFile().exists() then IO.delete(outPath.toFile()) Some(Files.createSymbolicLink(outPath, casFile.toPath)) diff --git a/util-cache/src/test/scala/sbt/util/ActionCacheTest.scala b/util-cache/src/test/scala/sbt/util/ActionCacheTest.scala index 98af47ab8..18b785bfe 100644 --- a/util-cache/src/test/scala/sbt/util/ActionCacheTest.scala +++ b/util-cache/src/test/scala/sbt/util/ActionCacheTest.scala @@ -4,7 +4,13 @@ import sbt.internal.util.StringVirtualFile1 import sbt.io.IO import sbt.io.syntax.* import verify.BasicTestSuite +import xsbti.FileConverter import xsbti.VirtualFile +import xsbti.VirtualFileRef + +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths object ActionCacheTest extends BasicTestSuite: val tags = CacheLevelTag.all.toList @@ -13,10 +19,10 @@ object ActionCacheTest extends BasicTestSuite: withDiskCache(testHoldBlob) def testHoldBlob(cache: ActionCacheStore): Unit = - val in = StringVirtualFile1("a.txt", "foo") - val hashRefs = cache.putBlobs(in :: Nil) - assert(hashRefs.size == 1) IO.withTemporaryDirectory: tempDir => + val in = StringVirtualFile1(s"$tempDir/a.txt", "foo") + val hashRefs = cache.putBlobs(in :: Nil) + assert(hashRefs.size == 1) val actual = cache.syncBlobs(hashRefs, tempDir.toPath()).head assert(actual.getFileName().toString() == "a.txt") @@ -48,14 +54,14 @@ object ActionCacheTest extends BasicTestSuite: withDiskCache(testActionCacheWithBlob) def testActionCacheWithBlob(cache: ActionCacheStore): Unit = - import sjsonnew.BasicJsonProtocol.* - var called = 0 - val action: ((Int, Int)) => (Int, Seq[VirtualFile]) = { case (a, b) => - called += 1 - val out = StringVirtualFile1("a.txt", (a + b).toString) - (a + b, Seq(out)) - } IO.withTemporaryDirectory: (tempDir) => + import sjsonnew.BasicJsonProtocol.* + var called = 0 + val action: ((Int, Int)) => (Int, Seq[VirtualFile]) = { case (a, b) => + called += 1 + val out = StringVirtualFile1(s"$tempDir/a.txt", (a + b).toString) + (a + b, Seq(out)) + } val config = BuildWideCacheConfiguration(cache, tempDir.toPath()) val v1 = ActionCache.cache[(Int, Int), Int]((1, 1), Digest.zero, Digest.zero, tags)(action)(config) @@ -81,9 +87,15 @@ object ActionCacheTest extends BasicTestSuite: IO.withTemporaryDirectory( { tempDir0 => val tempDir = tempDir0.toPath - val cache = DiskActionCacheStore(tempDir) + val cache = DiskActionCacheStore(tempDir, fileConverter) f(cache) }, keepDirectory = false ) + + def fileConverter = new FileConverter: + override def toPath(ref: VirtualFileRef): Path = Paths.get(ref.id) + override def toVirtualFile(path: Path): VirtualFile = + val content = if Files.isRegularFile(path) then new String(Files.readAllBytes(path)) else "" + StringVirtualFile1(path.toString, content) end ActionCacheTest From 8865565004a4c1b855526a96b1a9c3b340bf9100 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Wed, 10 Apr 2024 11:50:59 +0200 Subject: [PATCH 31/31] Fix classloader-cache/resources --- main/src/main/scala/sbt/Defaults.scala | 37 +++++++++++++------ main/src/main/scala/sbt/Keys.scala | 2 +- sbt-app/src/sbt-test/actions/call/build.sbt | 7 ++-- sbt-app/src/sbt-test/actions/call/test | 2 - .../sbt-test/actions/compile-clean/build.sbt | 3 +- .../src/sbt-test/actions/compile-clean/test | 26 ++++++------- .../resources/{pending => test} | 2 +- .../sbt-test/java/track-anonymous/build.sbt | 1 + .../src/sbt-test/java/track-anonymous/test | 2 - .../source-dependencies/compactify/build.sbt | 10 +++-- .../test/scala/testpkg/BuildServerTest.scala | 2 +- 11 files changed, 53 insertions(+), 41 deletions(-) rename sbt-app/src/sbt-test/classloader-cache/resources/{pending => test} (97%) diff --git a/main/src/main/scala/sbt/Defaults.scala b/main/src/main/scala/sbt/Defaults.scala index f41c7fda5..93905ea88 100644 --- a/main/src/main/scala/sbt/Defaults.scala +++ b/main/src/main/scala/sbt/Defaults.scala @@ -663,7 +663,7 @@ object Defaults extends BuildCommon { classDirectory := target.value / (prefix(configuration.value.name) + "classes"), backendOutput := { val converter = fileConverter.value - val dir = classDirectory.value + val dir = target.value / (prefix(configuration.value.name) + "backend") converter.toVirtualFile(dir.toPath) }, earlyOutput / artifactPath := configArtifactPathSetting(artifact, "early").value, @@ -913,11 +913,16 @@ object Defaults extends BuildCommon { tastyFiles := Def.taskIf { if (ScalaArtifacts.isScala3(scalaVersion.value)) { val _ = compile.value - val tastyFiles = classDirectory.value.**("*.tasty").get() + val c = fileConverter.value + val dir = c.toPath(backendOutput.value).toFile + val tastyFiles = dir.**("*.tasty").get() tastyFiles.map(_.getAbsoluteFile) } else Nil }.value, - clean := (compileOutputs / clean).value, + clean := { + (compileOutputs / clean).value + (products / clean).value + }, earlyOutputPing := Def.promise[Boolean], compileProgress := { val s = streams.value @@ -2458,8 +2463,14 @@ object Defaults extends BuildCommon { val reporter = (compile / bspReporter).value val store = analysisStore(compileAnalysisFile) val ci = (compile / compileInputs).value + val c = fileConverter.value + val dir = c.toPath(backendOutput.value).toFile result match case Result.Value(res) => + val rawJarPath = c.toPath(res._2) + IO.delete(dir) + IO.unzip(rawJarPath.toFile, dir) + IO.delete(dir / "META-INF" / "MANIFEST.MF") val analysis = store.unsafeGet().getAnalysis() reporter.sendSuccessReport(analysis) bspTask.notifySuccess(analysis) @@ -2497,9 +2508,7 @@ object Defaults extends BuildCommon { val contents = AnalysisContents.create(analysisResult.analysis(), analysisResult.setup()) store.set(contents) Def.declareOutput(analysisOut) - val dir = classDirectory.value - if (dir / "META-INF" / "MANIFEST.MF").exists then IO.delete(dir / "META-INF" / "MANIFEST.MF") - // inline mappings + val dir = ci.options.classesDirectory.toFile() val mappings = Path .allSubpaths(dir) .filter(_._1.isFile()) @@ -4233,14 +4242,18 @@ object Classpaths { def makeProducts: Initialize[Task[Seq[File]]] = Def.task { val c = fileConverter.value - Def.unit(copyResources.value) - Def.unit(compile.value) - val dir = c.toPath(backendOutput.value) + val resources = copyResources.value.map(_._2).toSet + val dir = classDirectory.value val rawJar = compileIncremental.value._2 val rawJarPath = c.toPath(rawJar) - IO.unzip(rawJarPath.toFile, dir.toFile) - IO.delete(dir.toFile / "META-INF" / "MANIFEST.MF") - dir.toFile :: Nil + // delete outdated files + Path + .allSubpaths(dir) + .collect { case (f, _) if f.isFile() && !resources.contains(f) => f } + .foreach(IO.delete) + IO.unzip(rawJarPath.toFile, dir) + IO.delete(dir / "META-INF" / "MANIFEST.MF") + dir :: Nil } private[sbt] def makePickleProducts: Initialize[Task[Seq[VirtualFile]]] = Def.task { diff --git a/main/src/main/scala/sbt/Keys.scala b/main/src/main/scala/sbt/Keys.scala index 61ee89863..df617d8b4 100644 --- a/main/src/main/scala/sbt/Keys.scala +++ b/main/src/main/scala/sbt/Keys.scala @@ -174,7 +174,7 @@ object Keys { @cacheLevel(include = Array.empty) val classDirectory = settingKey[File]("Directory for compiled classes and copied resources.").withRank(AMinusSetting) val earlyOutput = settingKey[VirtualFile]("JAR file for pickles used for build pipelining") - val backendOutput = settingKey[VirtualFile]("Directory or JAR file for compiled classes and copied resources") + val backendOutput = settingKey[VirtualFile]("Output directory of the compiler backend") val cleanFiles = taskKey[Seq[File]]("The files to recursively delete during a clean.").withRank(BSetting) val cleanKeepFiles = settingKey[Seq[File]]("Files or directories to keep during a clean. Must be direct children of target.").withRank(CSetting) val cleanKeepGlobs = settingKey[Seq[Glob]]("Globs to keep during a clean. Must be direct children of target.").withRank(CSetting) diff --git a/sbt-app/src/sbt-test/actions/call/build.sbt b/sbt-app/src/sbt-test/actions/call/build.sbt index 720ea3346..c3ef319f3 100644 --- a/sbt-app/src/sbt-test/actions/call/build.sbt +++ b/sbt-app/src/sbt-test/actions/call/build.sbt @@ -3,7 +3,8 @@ sbtPlugin := true val copyOutputDir = taskKey[Unit]("Copies the compiled classes to a root-level directory") copyOutputDir := { - val cd = (Compile / classDirectory).value - val to = baseDirectory.value / "out spaced" - IO.copyDirectory(cd, to) + val _ = (Compile / products).value + val cd = (Compile / classDirectory).value + val to = baseDirectory.value / "out spaced" + IO.copyDirectory(cd, to) } diff --git a/sbt-app/src/sbt-test/actions/call/test b/sbt-app/src/sbt-test/actions/call/test index 30bf3e1e8..465f4f71d 100644 --- a/sbt-app/src/sbt-test/actions/call/test +++ b/sbt-app/src/sbt-test/actions/call/test @@ -1,5 +1,3 @@ -# compiles a new State => State instance -> compile # puts the classes in a stable location (out spaced/ to test escaping) > copyOutputDir diff --git a/sbt-app/src/sbt-test/actions/compile-clean/build.sbt b/sbt-app/src/sbt-test/actions/compile-clean/build.sbt index e6774e270..eb4421492 100644 --- a/sbt-app/src/sbt-test/actions/compile-clean/build.sbt +++ b/sbt-app/src/sbt-test/actions/compile-clean/build.sbt @@ -1,6 +1,7 @@ import sbt.nio.file.Glob +Global / cacheStores := Seq.empty name := "compile-clean" scalaVersion := "2.12.17" Compile / cleanKeepGlobs += - Glob((Compile / compile / classDirectory).value, "X.class") + Glob(target.value) / RecursiveGlob / "X.class" diff --git a/sbt-app/src/sbt-test/actions/compile-clean/test b/sbt-app/src/sbt-test/actions/compile-clean/test index 14bce965a..83a5a81db 100644 --- a/sbt-app/src/sbt-test/actions/compile-clean/test +++ b/sbt-app/src/sbt-test/actions/compile-clean/test @@ -1,19 +1,17 @@ -$ touch target/cant-touch-this +$ touch target/out/jvm/scala-2.12.17/compile-clean/backend/cant-touch-this -> Test/products -$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/A.class -$ exists target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class +> Test/compile +$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/A.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/X.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/test-backend/B.class > Test/clean -$ exists target/cant-touch-this -$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/A.class -$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class -$ absent target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/cant-touch-this +$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/A.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/X.class +$ absent target/out/jvm/scala-2.12.17/compile-clean/test-backend/B.class -# compiling everything again, but now cleaning only compile classes -> Test/products > Compile/clean -$ exists target/cant-touch-this -$ absent target/out/jvm/scala-2.12.17/compile-clean/classes/A.class -$ exists target/out/jvm/scala-2.12.17/compile-clean/test-classes/B.class -$ exists target/out/jvm/scala-2.12.17/compile-clean/classes/X.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/cant-touch-this +$ absent target/out/jvm/scala-2.12.17/compile-clean/backend/A.class +$ exists target/out/jvm/scala-2.12.17/compile-clean/backend/X.class diff --git a/sbt-app/src/sbt-test/classloader-cache/resources/pending b/sbt-app/src/sbt-test/classloader-cache/resources/test similarity index 97% rename from sbt-app/src/sbt-test/classloader-cache/resources/pending rename to sbt-app/src/sbt-test/classloader-cache/resources/test index 57e1f61f5..63aa35f05 100644 --- a/sbt-app/src/sbt-test/classloader-cache/resources/pending +++ b/sbt-app/src/sbt-test/classloader-cache/resources/test @@ -12,4 +12,4 @@ $ copy-file changes/updated-test.txt src/test/resources/bar.txt $ copy-file changes/UpdatedResourceTest.scala src/test/scala/scripted/ResourceTest.scala -> test \ No newline at end of file +> test diff --git a/sbt-app/src/sbt-test/java/track-anonymous/build.sbt b/sbt-app/src/sbt-test/java/track-anonymous/build.sbt index 1dacbc629..ede90b6c6 100644 --- a/sbt-app/src/sbt-test/java/track-anonymous/build.sbt +++ b/sbt-app/src/sbt-test/java/track-anonymous/build.sbt @@ -3,6 +3,7 @@ val parser = token(Space ~> ( ("exists" ^^^ true) | ("absent" ^^^ false) ) ) InputKey[Unit]("checkOutput") := { val shouldExist = parser.parsed + val _ = (Compile / products).value val dir = (Compile / classDirectory).value if((dir / "Anon.class").exists != shouldExist) sys.error("Top level class incorrect" ) diff --git a/sbt-app/src/sbt-test/java/track-anonymous/test b/sbt-app/src/sbt-test/java/track-anonymous/test index 2c48f4402..16f215752 100644 --- a/sbt-app/src/sbt-test/java/track-anonymous/test +++ b/sbt-app/src/sbt-test/java/track-anonymous/test @@ -1,6 +1,4 @@ $ copy-file changes/Anon.java src/main/java/Anon.java -> compile > checkOutput exists $ delete src/main/java/Anon.java -> compile > checkOutput absent diff --git a/sbt-app/src/sbt-test/source-dependencies/compactify/build.sbt b/sbt-app/src/sbt-test/source-dependencies/compactify/build.sbt index d6e89fe63..10eb6d602 100644 --- a/sbt-app/src/sbt-test/source-dependencies/compactify/build.sbt +++ b/sbt-app/src/sbt-test/source-dependencies/compactify/build.sbt @@ -1,7 +1,9 @@ -TaskKey[Unit]("outputEmpty") := ((Configurations.Compile / classDirectory) map { outputDirectory => - def classes = (outputDirectory ** "*.class").get() - if (!classes.isEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t")) else () -}).value +TaskKey[Unit]("outputEmpty") := { + val c = fileConverter.value + val dir = c.toPath((Compile / backendOutput).value).toFile() + def classes = dir.**("*.class").get() + if (!classes.isEmpty) sys.error("Classes existed:\n\t" + classes.mkString("\n\t")) +} // apparently Travis CI stopped allowing long file names // it fails with the default setting of 255 characters so diff --git a/server-test/src/test/scala/testpkg/BuildServerTest.scala b/server-test/src/test/scala/testpkg/BuildServerTest.scala index 3a55469f5..32a6f1f22 100644 --- a/server-test/src/test/scala/testpkg/BuildServerTest.scala +++ b/server-test/src/test/scala/testpkg/BuildServerTest.scala @@ -254,7 +254,7 @@ class BuildServerTest extends AbstractServerTest { test("buildTarget/cleanCache") { def classFile = svr.baseDirectory.toPath.resolve( - "target/out/jvm/scala-2.13.8/runandtest/classes/main/Main.class" + "target/out/jvm/scala-2.13.8/runandtest/backend/main/Main.class" ) val buildTarget = buildTargetUri("runAndTest", "Compile") compile(buildTarget, id = 43)