mirror of https://github.com/sbt/sbt.git
Merge branch 'develop' into 2.x-fuse-info
This commit is contained in:
commit
4c66e5a907
|
|
@ -9,19 +9,12 @@
|
|||
package sbt
|
||||
package internal
|
||||
|
||||
import sbt.internal.util.{
|
||||
AttributeEntry,
|
||||
AttributeKey,
|
||||
LineRange,
|
||||
MessageOnlyException,
|
||||
RangePosition,
|
||||
Settings
|
||||
}
|
||||
import sbt.internal.util.{ AttributeKey, LineRange, MessageOnlyException, RangePosition }
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.Path
|
||||
import sbt.internal.util.complete.DefaultParsers.validID
|
||||
import Def.{ ScopedKey, Setting }
|
||||
import Def.{ ScopedKey, Setting, Settings }
|
||||
import Scope.GlobalScope
|
||||
import sbt.SlashSyntax0.given
|
||||
import sbt.internal.parser.SbtParser
|
||||
|
|
@ -361,9 +354,6 @@ object Index {
|
|||
result.asScala.toSet
|
||||
}
|
||||
|
||||
def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[?]] =
|
||||
settings.data.values.flatMap(_.keys).toSet[AttributeKey[?]]
|
||||
|
||||
def stringToKeyMap(settings: Set[AttributeKey[?]]): Map[String, AttributeKey[?]] =
|
||||
stringToKeyMap0(settings)(_.label)
|
||||
|
||||
|
|
@ -385,19 +375,17 @@ object Index {
|
|||
|
||||
private type TriggerMap = collection.mutable.HashMap[TaskId[?], Seq[TaskId[?]]]
|
||||
|
||||
def triggers(ss: Settings[Scope]): Triggers = {
|
||||
def triggers(ss: Settings): Triggers = {
|
||||
val runBefore = new TriggerMap
|
||||
val triggeredBy = new TriggerMap
|
||||
for
|
||||
a <- ss.data.values
|
||||
case AttributeEntry(_, base: Task[?]) <- a.entries
|
||||
do
|
||||
ss.values.collect { case base: Task[?] =>
|
||||
def update(map: TriggerMap, key: AttributeKey[Seq[Task[?]]]): Unit =
|
||||
base.getOrElse(key, Seq.empty).foreach { task =>
|
||||
map(task) = base +: map.getOrElse(task, Nil)
|
||||
}
|
||||
update(runBefore, Def.runBefore)
|
||||
update(triggeredBy, Def.triggeredBy)
|
||||
}
|
||||
val onComplete = (GlobalScope / Def.onComplete).get(ss).getOrElse(() => ())
|
||||
new Triggers(runBefore, triggeredBy, map => { onComplete(); map })
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,11 +20,11 @@ object DependencyTreePlugin extends AutoPlugin {
|
|||
val configurations = Vector(Compile, Test, IntegrationTest, Runtime, Provided, Optional)
|
||||
|
||||
// MiniDependencyTreePlugin provides baseBasicReportingSettings for Compile and Test
|
||||
override def projectSettings: Seq[Def.Setting[?]] =
|
||||
((configurations diff Vector(Compile, Test)) flatMap { config =>
|
||||
override lazy val projectSettings: Seq[Def.Setting[?]] =
|
||||
configurations.diff(Vector(Compile, Test)).flatMap { config =>
|
||||
inConfig(config)(DependencyTreeSettings.baseBasicReportingSettings)
|
||||
}) ++
|
||||
(configurations flatMap { config =>
|
||||
} ++
|
||||
configurations.flatMap { config =>
|
||||
inConfig(config)(DependencyTreeSettings.baseFullReportingSettings)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -54,7 +54,8 @@ trait BuildSyntax:
|
|||
end BuildSyntax
|
||||
|
||||
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
|
||||
object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
|
||||
object Def extends BuildSyntax with Init with InitializeImplicits:
|
||||
type ScopeType = Scope
|
||||
type Classpath = Seq[Attributed[HashedVirtualFileRef]]
|
||||
|
||||
def settings(ss: SettingsDefinition*): Seq[Setting[?]] = ss.flatMap(_.settings)
|
||||
|
|
|
|||
|
|
@ -154,10 +154,9 @@ object Previous {
|
|||
|
||||
/** Public as a macro implementation detail. Do not call directly. */
|
||||
def runtime[T](skey: TaskKey[T])(implicit format: JsonFormat[T]): Initialize[Task[Option[T]]] = {
|
||||
val inputs = (Global / cache)
|
||||
.zip(Def.validated(skey, selfRefOk = true))
|
||||
.zip(Global / references)
|
||||
inputs { case ((prevTask, resolved), refs) =>
|
||||
type Inputs = (Task[Previous], ScopedKey[Task[T]], References)
|
||||
val inputs = (Global / cache, Def.validated(skey, selfRefOk = true), Global / references)
|
||||
Def.app[Inputs, Task[Option[T]]](inputs) { case (prevTask, resolved, refs) =>
|
||||
val key = Key(resolved, resolved)
|
||||
refs.recordReference(key, format) // always evaluated on project load
|
||||
prevTask.map(_.get(key)) // evaluated if this task is evaluated
|
||||
|
|
@ -168,14 +167,17 @@ object Previous {
|
|||
def runtimeInEnclosingTask[T](skey: TaskKey[T])(implicit
|
||||
format: JsonFormat[T]
|
||||
): Initialize[Task[Option[T]]] = {
|
||||
val inputs = (Global / cache)
|
||||
.zip(Def.validated(skey, selfRefOk = true))
|
||||
.zip(Global / references)
|
||||
.zip(Def.resolvedScoped)
|
||||
inputs { case (((prevTask, resolved), refs), inTask) =>
|
||||
type Inputs = (Task[Previous], ScopedKey[Task[T]], References, ScopedKey[?])
|
||||
val inputs = (
|
||||
Global / cache,
|
||||
Def.validated(skey, selfRefOk = true),
|
||||
Global / references,
|
||||
Def.resolvedScoped
|
||||
)
|
||||
Def.app[Inputs, Task[Option[T]]](inputs) { case (prevTask, resolved, refs, inTask) =>
|
||||
val key = Key(resolved, inTask.asInstanceOf[ScopedKey[Task[Any]]])
|
||||
refs.recordReference(key, format) // always evaluated on project load
|
||||
prevTask.map(_.get(key)) // evaluated if this task is evaluated
|
||||
prevTask.map(_.get(key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -334,7 +334,7 @@ object Project:
|
|||
ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key)
|
||||
|
||||
def mapScope(f: Scope => Scope): [a] => ScopedKey[a] => ScopedKey[a] =
|
||||
[a] => (k: ScopedKey[a]) => ScopedKey(f(k.scope), k.key)
|
||||
[a] => (k: ScopedKey[a]) => k.copy(scope = f(k.scope))
|
||||
|
||||
def transform(g: Scope => Scope, ss: Seq[Def.Setting[?]]): Seq[Def.Setting[?]] =
|
||||
// We use caching to avoid creating new Scope instances too many times
|
||||
|
|
@ -361,7 +361,10 @@ object Project:
|
|||
Project.transform(Scope.replaceThis(scope), ss)
|
||||
|
||||
private[sbt] def inScope[A](scope: Scope, i: Initialize[A]): Initialize[A] =
|
||||
i.mapReferenced(Project.mapScope(Scope.replaceThis(scope)))
|
||||
i.mapReferenced(replaceThis(scope))
|
||||
|
||||
private[sbt] def replaceThis(scope: Scope): Def.MapScoped =
|
||||
mapScope(Scope.replaceThis(scope))
|
||||
|
||||
/**
|
||||
* Normalize a String so that it is suitable for use as a dependency management module identifier.
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ package sbt
|
|||
import scala.annotation.targetName
|
||||
|
||||
import sbt.internal.util.Types.*
|
||||
import sbt.internal.util.{ AttributeKey, KeyTag, Settings, SourcePosition }
|
||||
import sbt.internal.util.{ AttributeKey, KeyTag, SourcePosition }
|
||||
import sbt.internal.util.TupleMapExtension.*
|
||||
import sbt.util.OptJsonWriter
|
||||
import sbt.ConcurrentRestrictions.Tag
|
||||
|
|
@ -303,8 +303,7 @@ object Scoped:
|
|||
setting(scopedKey, app, source)
|
||||
|
||||
/** From the given `Settings`, extract the value bound to this key. */
|
||||
final def get(settings: Settings[Scope]): Option[A1] =
|
||||
settings.get(scopedKey.scope, scopedKey.key)
|
||||
final def get(settings: Def.Settings): Option[A1] = settings.get(scopedKey)
|
||||
|
||||
/**
|
||||
* Creates an [[Def.Initialize]] with value `scala.None` if there was no previous definition of this key,
|
||||
|
|
@ -460,7 +459,7 @@ object Scoped:
|
|||
|
||||
def toSettingKey: SettingKey[Task[A1]] = scopedSetting(scope, key)
|
||||
|
||||
def get(settings: Settings[Scope]): Option[Task[A1]] = settings.get(scope, key)
|
||||
def get(settings: Def.Settings): Option[Task[A1]] = settings.get(scopedKey)
|
||||
|
||||
/**
|
||||
* Creates an [[Def.Initialize]] with value `scala.None` if there was no previous definition of this key,
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ end ParserInstance
|
|||
|
||||
/** Composes the Task and Initialize Instances to provide an Instance for [A1] Initialize[Task[A1]]. */
|
||||
object FullInstance:
|
||||
type SS = sbt.internal.util.Settings[Scope]
|
||||
type SS = Def.Settings
|
||||
val settingsData = TaskKey[SS](
|
||||
"settings-data",
|
||||
"Provides access to the project data for the build.",
|
||||
|
|
|
|||
|
|
@ -5,134 +5,136 @@
|
|||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
/*
|
||||
package sbt.test
|
||||
|
||||
import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._
|
||||
package sbt
|
||||
package test
|
||||
|
||||
import java.io.File
|
||||
import sbt.io.IO
|
||||
import sbt.{ Scope, ScopeAxis, Scoped, Select, This, Zero }
|
||||
import sbt.{
|
||||
BuildRef,
|
||||
LocalProject,
|
||||
LocalRootProject,
|
||||
ProjectRef,
|
||||
Reference,
|
||||
RootProject,
|
||||
ThisBuild,
|
||||
ThisProject
|
||||
}
|
||||
import sbt.ConfigKey
|
||||
import sbt.librarymanagement.syntax._
|
||||
import sbt.{ InputKey, SettingKey, TaskKey }
|
||||
import sbt.internal.util.{ AttributeKey, AttributeMap }
|
||||
import hedgehog.*
|
||||
import scala.annotation.nowarn
|
||||
import scala.reflect.ClassTag
|
||||
import _root_.sbt.io.IO
|
||||
import _root_.sbt.Scoped.ScopingSetting
|
||||
import _root_.sbt.librarymanagement.syntax.*
|
||||
import _root_.sbt.internal.util.{ AttributeKey, AttributeMap }
|
||||
|
||||
object BuildSettingsInstances {
|
||||
val genFile: Gen[File] = Gen.oneOf(new File("."), new File("/tmp")) // for now..
|
||||
object BuildSettingsInstances:
|
||||
type Key[A1] = ScopingSetting[?] & Scoped
|
||||
|
||||
implicit val arbBuildRef: Arbitrary[BuildRef] = Arbitrary(genFile map (f => BuildRef(IO toURI f)))
|
||||
|
||||
implicit val arbProjectRef: Arbitrary[ProjectRef] =
|
||||
Arbitrary(for (f <- genFile; id <- Gen.identifier) yield ProjectRef(f, id))
|
||||
|
||||
implicit val arbLocalProject: Arbitrary[LocalProject] =
|
||||
Arbitrary(arbitrary[String] map LocalProject)
|
||||
|
||||
implicit val arbRootProject: Arbitrary[RootProject] = Arbitrary(genFile map (RootProject(_)))
|
||||
|
||||
implicit val arbReference: Arbitrary[Reference] = Arbitrary {
|
||||
Gen.frequency(
|
||||
96 -> arbitrary[BuildRef],
|
||||
10271 -> ThisBuild,
|
||||
325 -> LocalRootProject,
|
||||
2283 -> arbitrary[ProjectRef],
|
||||
299 -> ThisProject,
|
||||
436 -> arbitrary[LocalProject],
|
||||
1133 -> arbitrary[RootProject],
|
||||
given Gen[Reference] =
|
||||
val genFile: Gen[File] =
|
||||
Gen.choice1(Gen.constant(new File(".")), Gen.constant(new File("/tmp")))
|
||||
given genBuildRef: Gen[BuildRef] = genFile.map: f =>
|
||||
BuildRef(IO.toURI(f))
|
||||
given genProjectRef: Gen[ProjectRef] =
|
||||
for
|
||||
f <- genFile
|
||||
id <- identifier
|
||||
yield ProjectRef(f, id)
|
||||
given genLocalProject: Gen[LocalProject] =
|
||||
identifier.map(LocalProject.apply)
|
||||
given genRootProject: Gen[RootProject] =
|
||||
genFile.map(RootProject.apply)
|
||||
Gen.frequency1(
|
||||
96 -> genBuildRef.map(x => x: Reference),
|
||||
10271 -> Gen.constant(ThisBuild),
|
||||
325 -> Gen.constant(LocalRootProject),
|
||||
2283 -> genProjectRef.map(x => x: Reference),
|
||||
299 -> Gen.constant(ThisProject),
|
||||
436 -> genLocalProject.map(x => x: Reference),
|
||||
1133 -> genRootProject.map(x => x: Reference),
|
||||
)
|
||||
}
|
||||
|
||||
@nowarn
|
||||
implicit def arbConfigKey: Arbitrary[ConfigKey] = Arbitrary {
|
||||
Gen.frequency(
|
||||
2 -> const[ConfigKey](Compile),
|
||||
2 -> const[ConfigKey](Test),
|
||||
1 -> const[ConfigKey](Runtime),
|
||||
1 -> const[ConfigKey](IntegrationTest),
|
||||
1 -> const[ConfigKey](Provided),
|
||||
)
|
||||
}
|
||||
|
||||
implicit def arbAttrKey[A: Manifest]: Arbitrary[AttributeKey[_]] =
|
||||
Arbitrary(Gen.identifier map (AttributeKey[A](_)))
|
||||
|
||||
implicit val arbAttributeMap: Arbitrary[AttributeMap] = Arbitrary {
|
||||
Gen.frequency(
|
||||
20 -> AttributeMap.empty,
|
||||
1 -> {
|
||||
for (name <- Gen.identifier; isModule <- arbitrary[Boolean])
|
||||
yield AttributeMap.empty
|
||||
.put(AttributeKey[String]("name"), name)
|
||||
.put(AttributeKey[Boolean]("isModule"), isModule)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
implicit def arbScopeAxis[A: Arbitrary]: Arbitrary[ScopeAxis[A]] =
|
||||
Arbitrary(Gen.oneOf[ScopeAxis[A]](This, Zero, arbitrary[A] map (Select(_))))
|
||||
|
||||
implicit def arbScope: Arbitrary[Scope] = Arbitrary(
|
||||
for {
|
||||
r <- arbitrary[ScopeAxis[Reference]]
|
||||
c <- arbitrary[ScopeAxis[ConfigKey]]
|
||||
t <- arbitrary[ScopeAxis[AttributeKey[_]]]
|
||||
e <- arbitrary[ScopeAxis[AttributeMap]]
|
||||
} yield Scope(r, c, t, e)
|
||||
given Gen[ConfigKey] = Gen.frequency1(
|
||||
2 -> Gen.constant[ConfigKey](Compile),
|
||||
2 -> Gen.constant[ConfigKey](Test),
|
||||
1 -> Gen.constant[ConfigKey](Runtime),
|
||||
1 -> Gen.constant[ConfigKey](IntegrationTest),
|
||||
1 -> Gen.constant[ConfigKey](Provided),
|
||||
)
|
||||
|
||||
type Key = K forSome { type K <: Scoped.ScopingSetting[K] with Scoped }
|
||||
given genSettingKey[A1: ClassTag]: Gen[SettingKey[A1]] =
|
||||
withScope(WithoutScope.genSettingKey)
|
||||
given genTaskKey[A1: ClassTag]: Gen[TaskKey[A1]] =
|
||||
withScope(WithoutScope.genTaskKey)
|
||||
given genInputKey[A1: ClassTag]: Gen[InputKey[A1]] =
|
||||
withScope(WithoutScope.genInputKey)
|
||||
given genScopeAxis[A1: Gen]: Gen[ScopeAxis[A1]] =
|
||||
Gen.choice1[ScopeAxis[A1]](
|
||||
Gen.constant(This),
|
||||
Gen.constant(Zero),
|
||||
summon[Gen[A1]].map(Select(_))
|
||||
)
|
||||
|
||||
final case class Label(value: String)
|
||||
val genLabel: Gen[Label] = Gen.identifier map Label
|
||||
implicit def arbLabel: Arbitrary[Label] = Arbitrary(genLabel)
|
||||
given genKey[A1: ClassTag]: Gen[Key[A1]] =
|
||||
def convert[A2](g: Gen[A2]) = g.asInstanceOf[Gen[Key[A1]]]
|
||||
Gen.frequency1(
|
||||
15431 -> convert(genInputKey),
|
||||
19645 -> convert(genSettingKey),
|
||||
22867 -> convert(genTaskKey),
|
||||
)
|
||||
|
||||
def genInputKey[A: Manifest]: Gen[InputKey[A]] = genLabel map (x => InputKey[A](x.value))
|
||||
def genSettingKey[A: Manifest]: Gen[SettingKey[A]] = genLabel map (x => SettingKey[A](x.value))
|
||||
def genTaskKey[A: Manifest]: Gen[TaskKey[A]] = genLabel map (x => TaskKey[A](x.value))
|
||||
given genAttrKey: Gen[AttributeKey[?]] =
|
||||
identifier.map(AttributeKey[Unit](_))
|
||||
|
||||
@nowarn
|
||||
def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Arbitrary[K] = Arbitrary {
|
||||
Gen.frequency(
|
||||
given genAttributeMap: Gen[AttributeMap] = Gen.frequency1(
|
||||
20 -> Gen.constant(AttributeMap.empty),
|
||||
1 ->
|
||||
(for
|
||||
name <- identifier
|
||||
isModule <- Gen.boolean
|
||||
yield AttributeMap.empty
|
||||
.put(AttributeKey[String]("name"), name)
|
||||
.put(AttributeKey[Boolean]("isModule"), isModule))
|
||||
)
|
||||
|
||||
given Gen[Scope] =
|
||||
for
|
||||
r <- summon[Gen[ScopeAxis[Reference]]]
|
||||
c <- summon[Gen[ScopeAxis[ConfigKey]]]
|
||||
t <- summon[Gen[ScopeAxis[AttributeKey[?]]]]
|
||||
e <- summon[Gen[ScopeAxis[AttributeMap]]]
|
||||
yield Scope(r, c, t, e)
|
||||
|
||||
def withScope[K <: Scoped.ScopingSetting[K]](keyGen: Gen[K]): Gen[K] =
|
||||
Gen.frequency1(
|
||||
5 -> keyGen,
|
||||
1 -> (for (key <- keyGen; scope <- arbitrary[Scope]) yield key in scope)
|
||||
1 -> (for
|
||||
key <- keyGen
|
||||
scope <- summon[Gen[Scope]]
|
||||
yield key.rescope(scope)),
|
||||
)
|
||||
}
|
||||
|
||||
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = withScope(genInputKey[A])
|
||||
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = withScope(genSettingKey[A])
|
||||
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = withScope(genTaskKey[A])
|
||||
case class Label(value: String)
|
||||
object Label:
|
||||
given genLabel: Gen[Label] = identifier.map(Label.apply)
|
||||
end Label
|
||||
|
||||
implicit def arbKey[A: Manifest](implicit
|
||||
arbInputKey: Arbitrary[InputKey[A]],
|
||||
arbSettingKey: Arbitrary[SettingKey[A]],
|
||||
arbTaskKey: Arbitrary[TaskKey[A]],
|
||||
): Arbitrary[Key] = Arbitrary {
|
||||
def convert[T](g: Gen[T]) = g.asInstanceOf[Gen[Key]]
|
||||
Gen.frequency(
|
||||
15431 -> convert(arbitrary[InputKey[A]]),
|
||||
19645 -> convert(arbitrary[SettingKey[A]]),
|
||||
22867 -> convert(arbitrary[TaskKey[A]]),
|
||||
object WithoutScope:
|
||||
def genSettingKey[A1: ClassTag]: Gen[SettingKey[A1]] =
|
||||
Label.genLabel.map: label =>
|
||||
SettingKey[A1](label.value)
|
||||
def genTaskKey[A1: ClassTag]: Gen[TaskKey[A1]] =
|
||||
Label.genLabel.map: label =>
|
||||
TaskKey[A1](label.value)
|
||||
def genInputKey[A1: ClassTag]: Gen[InputKey[A1]] =
|
||||
Label.genLabel.map: label =>
|
||||
InputKey[A1](label.value)
|
||||
end WithoutScope
|
||||
|
||||
def identifier: Gen[String] = for
|
||||
first <- Gen.char('a', 'z')
|
||||
length <- Gen.int(Range.linear(0, 20))
|
||||
rest <- Gen.list(
|
||||
Gen.frequency1(
|
||||
8 -> Gen.char('a', 'z'),
|
||||
8 -> Gen.char('A', 'Z'),
|
||||
5 -> Gen.char('0', '9'),
|
||||
1 -> Gen.constant('_')
|
||||
),
|
||||
Range.singleton(length)
|
||||
)
|
||||
}
|
||||
yield (first :: rest).mkString
|
||||
|
||||
object WithoutScope {
|
||||
implicit def arbInputKey[A: Manifest]: Arbitrary[InputKey[A]] = Arbitrary(genInputKey[A])
|
||||
implicit def arbSettingKey[A: Manifest]: Arbitrary[SettingKey[A]] = Arbitrary(genSettingKey[A])
|
||||
implicit def arbTaskKey[A: Manifest]: Arbitrary[TaskKey[A]] = Arbitrary(genTaskKey[A])
|
||||
}
|
||||
|
||||
implicit def arbScoped[A: Manifest]: Arbitrary[Scoped] = Arbitrary(arbitrary[Key])
|
||||
}
|
||||
*/
|
||||
end BuildSettingsInstances
|
||||
|
|
|
|||
|
|
@ -5,109 +5,391 @@
|
|||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
/*
|
||||
package sbt.test
|
||||
|
||||
import org.scalacheck.{ Test => _, _ }, Prop._
|
||||
package sbt
|
||||
package test
|
||||
|
||||
import sbt.SlashSyntax
|
||||
import sbt.{ Scope, ScopeAxis, Scoped }, Scope.{ Global, ThisScope }
|
||||
import sbt.Reference
|
||||
import sbt.ConfigKey
|
||||
import sbt.internal.util.AttributeKey
|
||||
import hedgehog.*
|
||||
import hedgehog.runner.*
|
||||
import Scope.{ Global, ThisScope }
|
||||
import SlashSyntax0.given
|
||||
import BuildSettingsInstances.given
|
||||
import _root_.sbt.internal.util.AttributeKey
|
||||
|
||||
import BuildSettingsInstances._
|
||||
import scala.annotation.nowarn
|
||||
object SlashSyntaxSpec extends Properties:
|
||||
override def tests: List[Test] = List(
|
||||
property("Global / key", propGlobalKey),
|
||||
property("Reference / key", propReferenceKey),
|
||||
property("Reference / Config / key", propReferenceConfigKey),
|
||||
property("Reference / task.key / key", propReferenceAttrKeyKey),
|
||||
property("Reference / task / key", propReferenceTaskKey),
|
||||
property("Reference / inputtask / key", propReferenceInputTaskKey),
|
||||
property("Reference / Config / task.key / key", propReferenceConfigAttrKeyKey),
|
||||
property("Reference / Config / task / key", propReferenceConfigTaskKey),
|
||||
property("Reference / Config / inputtask / key", propReferenceConfigInputTaskKey),
|
||||
property("Config / key", propConfigKey),
|
||||
property("Config / task.key / key", propConfigAttrKeyKey),
|
||||
property("Config / task / key", propConfigTaskKey),
|
||||
property("Config / inputtask / key", propConfigInputTaskKey),
|
||||
property("task.key / key", propAttrKeyKey),
|
||||
property("task / key", propTaskKey),
|
||||
property("inputtask / key", propInputTaskKey),
|
||||
property("Scope / key", propScopeKey),
|
||||
property("Reference? / key", propReferenceAxisKey),
|
||||
property("Reference? / Config? / key", propReferenceAxisConfigAxisKey),
|
||||
// property("Reference? / task.key? / key", propReferenceAxisAttrKeyAxisKey),
|
||||
property("Reference? / Config? / task.key? / key", propReferenceAxisConfigAxisAttrKeyAxisKey),
|
||||
)
|
||||
|
||||
@nowarn
|
||||
object SlashSyntaxSpec extends Properties("SlashSyntax") with SlashSyntax {
|
||||
property("Global / key == key in Global") = {
|
||||
forAll((k: Key) => expectValue(k in Global)(Global / k))
|
||||
}
|
||||
def gen[A1: Gen]: Gen[A1] = summon[Gen[A1]]
|
||||
|
||||
property("Reference / key == key in Reference") = {
|
||||
forAll((r: Reference, k: Key) => expectValue(k in r)(r / k))
|
||||
}
|
||||
|
||||
property("Reference / Config / key == key in Reference in Config") = {
|
||||
forAll((r: Reference, c: ConfigKey, k: Key) => expectValue(k in r in c)(r / c / k))
|
||||
}
|
||||
|
||||
property("Reference / task.key / key == key in Reference in task") = {
|
||||
forAll((r: Reference, t: Scoped, k: Key) => expectValue(k in (r, t))(r / t.key / k))
|
||||
}
|
||||
|
||||
property("Reference / task / key ~= key in Reference in task") = {
|
||||
import WithoutScope._
|
||||
forAll((r: Reference, t: Key, k: Key) => expectValue(k in (r, t))(r / t / k))
|
||||
}
|
||||
|
||||
property("Reference / Config / task.key / key == key in Reference in Config in task") = {
|
||||
forAll { (r: Reference, c: ConfigKey, t: Scoped, k: Key) =>
|
||||
expectValue(k in (r, c, t))(r / c / t.key / k)
|
||||
}
|
||||
}
|
||||
|
||||
property("Reference / Config / task / key ~= key in Reference in Config in task") = {
|
||||
import WithoutScope._
|
||||
forAll { (r: Reference, c: ConfigKey, t: Key, k: Key) =>
|
||||
expectValue(k in (r, c, t))(r / c / t / k)
|
||||
}
|
||||
}
|
||||
|
||||
property("Config / key == key in Config") = {
|
||||
forAll((c: ConfigKey, k: Key) => expectValue(k in c)(c / k))
|
||||
}
|
||||
|
||||
property("Config / task.key / key == key in Config in task") = {
|
||||
forAll((c: ConfigKey, t: Scoped, k: Key) => expectValue(k in c in t)(c / t.key / k))
|
||||
}
|
||||
|
||||
property("Config / task / key ~= key in Config in task") = {
|
||||
import WithoutScope._
|
||||
forAll((c: ConfigKey, t: Key, k: Key) => expectValue(k in c in t)(c / t / k))
|
||||
}
|
||||
|
||||
property("task.key / key == key in task") = {
|
||||
forAll((t: Scoped, k: Key) => expectValue(k in t)(t.key / k))
|
||||
}
|
||||
|
||||
property("task / key ~= key in task") = {
|
||||
import WithoutScope._
|
||||
forAll((t: Key, k: Key) => expectValue(k in t)(t / k))
|
||||
}
|
||||
|
||||
property("Scope / key == key in Scope") = {
|
||||
forAll((s: Scope, k: Key) => expectValue(k in s)(s / k))
|
||||
}
|
||||
|
||||
property("Reference? / key == key in ThisScope.copy(..)") = {
|
||||
forAll { (r: ScopeAxis[Reference], k: Key) =>
|
||||
expectValue(k in ThisScope.copy(project = r))(r / k)
|
||||
}
|
||||
}
|
||||
|
||||
property("Reference? / ConfigKey? / key == key in ThisScope.copy(..)") = {
|
||||
forAll((r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], k: Key) =>
|
||||
expectValue(k in ThisScope.copy(project = r, config = c))(r / c / k)
|
||||
def propGlobalKey: Property =
|
||||
for
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => Global / k
|
||||
case k: TaskKey[?] => Global / k
|
||||
case k: SettingKey[?] => Global / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
// Only if the incoming scope is This/This/This,
|
||||
// Global scoping is effective.
|
||||
(if k.scope == ThisScope then actual.scope == Global
|
||||
else true)
|
||||
)
|
||||
}
|
||||
|
||||
// property("Reference? / AttributeKey? / key == key in ThisScope.copy(..)") = {
|
||||
// forAll((r: ScopeAxis[Reference], t: ScopeAxis[AttributeKey[_]], k: AnyKey) =>
|
||||
// expectValue(k in ThisScope.copy(project = r, task = t))(r / t / k))
|
||||
// }
|
||||
def propReferenceKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / k
|
||||
case k: TaskKey[?] => ref / k
|
||||
case k: SettingKey[?] => ref / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true)
|
||||
)
|
||||
|
||||
property("Reference? / ConfigKey? / AttributeKey? / key == key in ThisScope.copy(..)") = {
|
||||
forAll {
|
||||
(r: ScopeAxis[Reference], c: ScopeAxis[ConfigKey], t: ScopeAxis[AttributeKey[_]], k: Key) =>
|
||||
expectValue(k in ThisScope.copy(project = r, config = c, task = t))(r / c / t / k)
|
||||
}
|
||||
}
|
||||
def propReferenceConfigKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
config <- gen[ConfigKey].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / config / k
|
||||
case k: TaskKey[?] => ref / config / k
|
||||
case k: SettingKey[?] => ref / config / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true) &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true)
|
||||
)
|
||||
|
||||
def expectValue(expected: Scoped)(x: Scoped) = {
|
||||
val equals = x.scope == expected.scope && x.key == expected.key
|
||||
if (equals) proved else falsified :| s"Expected $expected but got $x"
|
||||
}
|
||||
}
|
||||
*/
|
||||
def propReferenceAttrKeyKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
scoped <- genKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / scoped.key / k
|
||||
case k: TaskKey[?] => ref / scoped.key / k
|
||||
case k: SettingKey[?] => ref / scoped.key / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(scoped.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propReferenceTaskKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
t <- genTaskKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / t / k
|
||||
case k: TaskKey[?] => ref / t / k
|
||||
case k: SettingKey[?] => ref / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propReferenceInputTaskKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
t <- genInputKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / t / k
|
||||
case k: TaskKey[?] => ref / t / k
|
||||
case k: SettingKey[?] => ref / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propReferenceConfigAttrKeyKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
config <- gen[ConfigKey].forAll
|
||||
scoped <- genKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / config / scoped.key / k
|
||||
case k: TaskKey[?] => ref / config / scoped.key / k
|
||||
case k: SettingKey[?] => ref / config / scoped.key / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true) &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(scoped.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propReferenceConfigTaskKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
config <- gen[ConfigKey].forAll
|
||||
t <- genTaskKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / config / t / k
|
||||
case k: TaskKey[?] => ref / config / t / k
|
||||
case k: SettingKey[?] => ref / config / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true) &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propReferenceConfigInputTaskKey: Property =
|
||||
for
|
||||
ref <- gen[Reference].forAll
|
||||
config <- gen[ConfigKey].forAll
|
||||
t <- genInputKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / config / t / k
|
||||
case k: TaskKey[?] => ref / config / t / k
|
||||
case k: SettingKey[?] => ref / config / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == Select(ref)
|
||||
else true) &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propConfigKey: Property =
|
||||
for
|
||||
config <- gen[ConfigKey].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => config / k
|
||||
case k: TaskKey[?] => config / k
|
||||
case k: SettingKey[?] => config / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propConfigAttrKeyKey: Property =
|
||||
for
|
||||
config <- gen[ConfigKey].forAll
|
||||
scoped <- genKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => config / scoped.key / k
|
||||
case k: TaskKey[?] => config / scoped.key / k
|
||||
case k: SettingKey[?] => config / scoped.key / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(scoped.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propConfigTaskKey: Property =
|
||||
for
|
||||
config <- gen[ConfigKey].forAll
|
||||
t <- genTaskKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => config / t / k
|
||||
case k: TaskKey[?] => config / t / k
|
||||
case k: SettingKey[?] => config / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propConfigInputTaskKey: Property =
|
||||
for
|
||||
config <- gen[ConfigKey].forAll
|
||||
t <- genInputKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => config / t / k
|
||||
case k: TaskKey[?] => config / t / k
|
||||
case k: SettingKey[?] => config / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.config == This then actual.scope.config == Select(config)
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propAttrKeyKey: Property =
|
||||
for
|
||||
scoped <- genKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => scoped.key / k
|
||||
case k: TaskKey[?] => scoped.key / k
|
||||
case k: SettingKey[?] => scoped.key / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(scoped.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propTaskKey: Property =
|
||||
for
|
||||
t <- genTaskKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => t / k
|
||||
case k: TaskKey[?] => t / k
|
||||
case k: SettingKey[?] => t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propInputTaskKey: Property =
|
||||
for
|
||||
t <- genInputKey[Unit].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => t / k
|
||||
case k: TaskKey[?] => t / k
|
||||
case k: SettingKey[?] => t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.task == This then actual.scope.task == Select(t.key)
|
||||
else true)
|
||||
)
|
||||
|
||||
def propScopeKey: Property =
|
||||
for
|
||||
scope <- gen[Scope].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => scope / k
|
||||
case k: TaskKey[?] => scope / k
|
||||
case k: SettingKey[?] => scope / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
// Only if the incoming scope is This/This/This,
|
||||
// Global scoping is effective.
|
||||
(if k.scope == ThisScope then actual.scope == scope
|
||||
else true)
|
||||
)
|
||||
|
||||
def propReferenceAxisKey: Property =
|
||||
for
|
||||
ref <- gen[ScopeAxis[Reference]].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / k
|
||||
case k: TaskKey[?] => ref / k
|
||||
case k: SettingKey[?] => ref / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == ref
|
||||
else true)
|
||||
)
|
||||
|
||||
def propReferenceAxisConfigAxisKey: Property =
|
||||
for
|
||||
ref <- gen[ScopeAxis[Reference]].forAll
|
||||
config <- gen[ScopeAxis[ConfigKey]].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / config / k
|
||||
case k: TaskKey[?] => ref / config / k
|
||||
case k: SettingKey[?] => ref / config / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == ref
|
||||
else true) &&
|
||||
(if k.scope.config == This then actual.scope.config == config
|
||||
else true)
|
||||
)
|
||||
|
||||
/*
|
||||
def propReferenceAxisAttrKeyAxisKey: Property =
|
||||
for
|
||||
ref <- gen[ScopeAxis[Reference]].forAll
|
||||
t <- gen[ScopeAxis[AttributeKey[?]]].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / t / k
|
||||
case k: TaskKey[?] => ref / t / k
|
||||
case k: SettingKey[?] => ref / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == ref
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == t
|
||||
else true)
|
||||
)
|
||||
*/
|
||||
|
||||
def propReferenceAxisConfigAxisAttrKeyAxisKey: Property =
|
||||
for
|
||||
ref <- gen[ScopeAxis[Reference]].forAll
|
||||
config <- gen[ScopeAxis[ConfigKey]].forAll
|
||||
t <- gen[ScopeAxis[AttributeKey[?]]].forAll
|
||||
k <- genKey[Unit].forAll
|
||||
actual = k match
|
||||
case k: InputKey[?] => ref / config / t / k
|
||||
case k: TaskKey[?] => ref / config / t / k
|
||||
case k: SettingKey[?] => ref / config / t / k
|
||||
yield Result.assert(
|
||||
actual.key == k.key &&
|
||||
(if k.scope.project == This then actual.scope.project == ref
|
||||
else true) &&
|
||||
(if k.scope.config == This then actual.scope.config == config
|
||||
else true) &&
|
||||
(if k.scope.task == This then actual.scope.task == t
|
||||
else true)
|
||||
)
|
||||
end SlashSyntaxSpec
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ import sbt.Project.{
|
|||
// sbtRichTaskPromise
|
||||
}
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.Scope.{ GlobalScope, ThisScope, fillTaskAxis }
|
||||
import sbt.Scope.{ GlobalScope, ThisBuildScope, ThisScope, fillTaskAxis }
|
||||
import sbt.State.StateOpsImpl
|
||||
import sbt.coursierint._
|
||||
import sbt.internal.CommandStrings.ExportStream
|
||||
|
|
@ -156,9 +156,9 @@ object Defaults extends BuildCommon {
|
|||
private[sbt] def globalDefaults(ss: Seq[Setting[?]]): Seq[Setting[?]] =
|
||||
Def.defaultSettings(inScope(GlobalScope)(ss))
|
||||
|
||||
def buildCore: Seq[Setting[?]] = thisBuildCore ++ globalCore
|
||||
def thisBuildCore: Seq[Setting[?]] =
|
||||
inScope(GlobalScope.copy(project = Select(ThisBuild)))(
|
||||
lazy val buildCore: Seq[Setting[?]] = thisBuildCore ++ globalCore
|
||||
private def thisBuildCore: Seq[Setting[?]] =
|
||||
inScope(ThisBuildScope)(
|
||||
Seq(
|
||||
managedDirectory := baseDirectory.value / "lib_managed"
|
||||
)
|
||||
|
|
@ -652,8 +652,6 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
},
|
||||
)
|
||||
// This exists for binary compatibility and probably never should have been public.
|
||||
def addBaseSources: Seq[Def.Setting[Task[Seq[File]]]] = Nil
|
||||
lazy val outputConfigPaths: Seq[Setting[?]] = Seq(
|
||||
classDirectory := target.value / (prefix(configuration.value.name) + "classes"),
|
||||
backendOutput := {
|
||||
|
|
@ -717,13 +715,21 @@ object Defaults extends BuildCommon {
|
|||
},
|
||||
crossSbtVersions := Vector((pluginCrossBuild / sbtVersion).value),
|
||||
crossTarget := target.value,
|
||||
scalaCompilerBridgeBinaryJar := Def.settingDyn {
|
||||
scalaCompilerBridgeBinaryJar := {
|
||||
val sv = scalaVersion.value
|
||||
val managed = managedScalaInstance.value
|
||||
val hasSbtBridge = ScalaArtifacts.isScala3(sv) || ZincLmUtil.hasScala2SbtBridge(sv)
|
||||
if (hasSbtBridge && managed) fetchBridgeBinaryJarTask(sv)
|
||||
else Def.task[Option[File]](None)
|
||||
}.value,
|
||||
if hasSbtBridge && managed then
|
||||
val jar = ZincLmUtil.fetchDefaultBridgeModule(
|
||||
sv,
|
||||
dependencyResolution.value,
|
||||
updateConfiguration.value,
|
||||
(update / unresolvedWarningConfiguration).value,
|
||||
streams.value.log
|
||||
)
|
||||
Some(jar)
|
||||
else None
|
||||
},
|
||||
scalaCompilerBridgeSource := ZincLmUtil.getDefaultBridgeSourceModule(scalaVersion.value),
|
||||
auxiliaryClassFiles ++= {
|
||||
if (ScalaArtifacts.isScala3(scalaVersion.value)) List(TastyFiles.instance)
|
||||
|
|
@ -736,7 +742,6 @@ object Defaults extends BuildCommon {
|
|||
classpathOptions := ClasspathOptionsUtil.noboot(scalaVersion.value),
|
||||
console / classpathOptions := ClasspathOptionsUtil.replNoboot(scalaVersion.value),
|
||||
)
|
||||
// must be a val: duplication detected by object identity
|
||||
private lazy val compileBaseGlobal: Seq[Setting[?]] = globalDefaults(
|
||||
Seq(
|
||||
auxiliaryClassFiles :== Nil,
|
||||
|
|
@ -812,18 +817,6 @@ object Defaults extends BuildCommon {
|
|||
if (plugin) scalaBase / ("sbt-" + sbtv) else scalaBase
|
||||
}
|
||||
|
||||
private def fetchBridgeBinaryJarTask(scalaVersion: String): Initialize[Task[Option[File]]] =
|
||||
Def.task {
|
||||
val bridgeJar = ZincLmUtil.fetchDefaultBridgeModule(
|
||||
scalaVersion,
|
||||
dependencyResolution.value,
|
||||
updateConfiguration.value,
|
||||
(update / unresolvedWarningConfiguration).value,
|
||||
streams.value.log
|
||||
)
|
||||
Some(bridgeJar)
|
||||
}
|
||||
|
||||
def compilersSetting = {
|
||||
compilers := {
|
||||
val st = state.value
|
||||
|
|
@ -1010,8 +1003,10 @@ object Defaults extends BuildCommon {
|
|||
},
|
||||
persistJarClasspath :== true,
|
||||
classpathEntryDefinesClassVF := {
|
||||
(if (persistJarClasspath.value) classpathDefinesClassCache.value
|
||||
else VirtualFileValueCache.definesClassCache(fileConverter.value)).get
|
||||
val cache =
|
||||
if persistJarClasspath.value then classpathDefinesClassCache.value
|
||||
else VirtualFileValueCache.definesClassCache(fileConverter.value)
|
||||
cache.get
|
||||
},
|
||||
compileIncSetup := compileIncSetupTask.value,
|
||||
console := consoleTask.value,
|
||||
|
|
@ -1083,16 +1078,9 @@ object Defaults extends BuildCommon {
|
|||
"1.3.0"
|
||||
)
|
||||
def watchTransitiveSourcesTask: Initialize[Task[Seq[Source]]] =
|
||||
watchTransitiveSourcesTaskImpl(watchSources)
|
||||
|
||||
private def watchTransitiveSourcesTaskImpl(
|
||||
key: TaskKey[Seq[Source]]
|
||||
): Initialize[Task[Seq[Source]]] = {
|
||||
import ScopeFilter.Make.*
|
||||
val selectDeps = ScopeFilter(inAggregates(ThisProject) || inDependencies(ThisProject))
|
||||
val allWatched = (key ?? Nil).all(selectDeps)
|
||||
Def.task { allWatched.value.flatten }
|
||||
}
|
||||
watchSources.??(Nil).all(selectDeps).map(_.flatten)
|
||||
|
||||
def transitiveUpdateTask: Initialize[Task[Seq[UpdateReport]]] = {
|
||||
import ScopeFilter.Make.*
|
||||
|
|
@ -1140,15 +1128,13 @@ object Defaults extends BuildCommon {
|
|||
// use the same class loader as the Scala classes used by sbt
|
||||
Def.task {
|
||||
val allJars = scalaProvider.jars
|
||||
val libraryJars = allJars
|
||||
.filter { jar =>
|
||||
(jar.getName == "scala-library.jar") || (jar.getName.startsWith(
|
||||
"scala3-library_3"
|
||||
))
|
||||
}
|
||||
(allJars.filter { jar =>
|
||||
val libraryJars = allJars.filter { jar =>
|
||||
jar.getName == "scala-library.jar" || jar.getName.startsWith("scala3-library_3")
|
||||
}
|
||||
val compilerJar = allJars.filter { jar =>
|
||||
jar.getName == "scala-compiler.jar" || jar.getName.startsWith("scala3-compiler_3")
|
||||
}) match
|
||||
}
|
||||
compilerJar match
|
||||
case Array(compilerJar) if libraryJars.nonEmpty =>
|
||||
makeScalaInstance(
|
||||
sv,
|
||||
|
|
@ -1318,75 +1304,76 @@ object Defaults extends BuildCommon {
|
|||
extraTestDigests :== Nil,
|
||||
)
|
||||
)
|
||||
lazy val testTasks: Seq[Setting[?]] =
|
||||
testTaskOptions(test) ++ testTaskOptions(testOnly) ++ testTaskOptions(
|
||||
testQuick
|
||||
) ++ testDefaults ++ Seq(
|
||||
testLoader := ClassLoaders.testTask.value,
|
||||
loadedTestFrameworks := {
|
||||
val loader = testLoader.value
|
||||
val log = streams.value.log
|
||||
testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x))).toMap
|
||||
},
|
||||
definedTests := detectTests.value,
|
||||
definedTestNames := definedTests
|
||||
.map(_.map(_.name).distinct)
|
||||
.storeAs(definedTestNames)
|
||||
.triggeredBy(compile)
|
||||
.value,
|
||||
definedTestDigests := IncrementalTest.definedTestDigestTask
|
||||
.triggeredBy(compile)
|
||||
.value,
|
||||
testQuick / testFilter := IncrementalTest.filterTask.value,
|
||||
extraTestDigests ++= IncrementalTest.extraTestDigestsTask.value,
|
||||
executeTests := {
|
||||
import sbt.TupleSyntax.*
|
||||
(
|
||||
test / streams,
|
||||
loadedTestFrameworks,
|
||||
testLoader,
|
||||
(test / testGrouping),
|
||||
(test / testExecution),
|
||||
(test / fullClasspath),
|
||||
testForkedParallel,
|
||||
(test / javaOptions),
|
||||
(classLoaderLayeringStrategy),
|
||||
thisProject,
|
||||
fileConverter,
|
||||
).flatMapN { case (s, lt, tl, gp, ex, cp, fp, jo, clls, thisProj, c) =>
|
||||
allTestGroupsTask(
|
||||
s,
|
||||
lt,
|
||||
tl,
|
||||
gp,
|
||||
ex,
|
||||
cp,
|
||||
fp,
|
||||
jo,
|
||||
clls,
|
||||
projectId = s"${thisProj.id} / ",
|
||||
c,
|
||||
)
|
||||
}
|
||||
}.value,
|
||||
// ((streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel, javaOptions in test) flatMap allTestGroupsTask).value,
|
||||
Test / testFull / testResultLogger :== TestResultLogger.SilentWhenNoTests, // https://github.com/sbt/sbt/issues/1185
|
||||
testFull := {
|
||||
val trl = (Test / testFull / testResultLogger).value
|
||||
val taskName = Project.showContextKey(state.value).show(resolvedScoped.value)
|
||||
try trl.run(streams.value.log, executeTests.value, taskName)
|
||||
finally close(testLoader.value)
|
||||
},
|
||||
testOnly := {
|
||||
try inputTests(testOnly).evaluated
|
||||
finally close(testLoader.value)
|
||||
},
|
||||
testQuick := {
|
||||
try inputTests(testQuick).evaluated
|
||||
finally close(testLoader.value)
|
||||
},
|
||||
test := testQuick.evaluated,
|
||||
)
|
||||
lazy val testTasks: Seq[Setting[?]] = Def.settings(
|
||||
testTaskOptions(test),
|
||||
testTaskOptions(testOnly),
|
||||
testTaskOptions(testQuick),
|
||||
testDefaults,
|
||||
testLoader := ClassLoaders.testTask.value,
|
||||
loadedTestFrameworks := {
|
||||
val loader = testLoader.value
|
||||
val log = streams.value.log
|
||||
testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x))).toMap
|
||||
},
|
||||
definedTests := detectTests.value,
|
||||
definedTestNames := definedTests
|
||||
.map(_.map(_.name).distinct)
|
||||
.storeAs(definedTestNames)
|
||||
.triggeredBy(compile)
|
||||
.value,
|
||||
definedTestDigests := IncrementalTest.definedTestDigestTask
|
||||
.triggeredBy(compile)
|
||||
.value,
|
||||
testQuick / testFilter := IncrementalTest.filterTask.value,
|
||||
extraTestDigests ++= IncrementalTest.extraTestDigestsTask.value,
|
||||
executeTests := {
|
||||
import sbt.TupleSyntax.*
|
||||
(
|
||||
test / streams,
|
||||
loadedTestFrameworks,
|
||||
testLoader,
|
||||
(test / testGrouping),
|
||||
(test / testExecution),
|
||||
(test / fullClasspath),
|
||||
testForkedParallel,
|
||||
(test / javaOptions),
|
||||
(classLoaderLayeringStrategy),
|
||||
thisProject,
|
||||
fileConverter,
|
||||
).flatMapN { case (s, lt, tl, gp, ex, cp, fp, jo, clls, thisProj, c) =>
|
||||
allTestGroupsTask(
|
||||
s,
|
||||
lt,
|
||||
tl,
|
||||
gp,
|
||||
ex,
|
||||
cp,
|
||||
fp,
|
||||
jo,
|
||||
clls,
|
||||
projectId = s"${thisProj.id} / ",
|
||||
c,
|
||||
)
|
||||
}
|
||||
}.value,
|
||||
// ((streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel, javaOptions in test) flatMap allTestGroupsTask).value,
|
||||
Test / testFull / testResultLogger :== TestResultLogger.SilentWhenNoTests, // https://github.com/sbt/sbt/issues/1185
|
||||
testFull := {
|
||||
val trl = (Test / testFull / testResultLogger).value
|
||||
val taskName = Project.showContextKey(state.value).show(resolvedScoped.value)
|
||||
try trl.run(streams.value.log, executeTests.value, taskName)
|
||||
finally close(testLoader.value)
|
||||
},
|
||||
testOnly := {
|
||||
try inputTests(testOnly).evaluated
|
||||
finally close(testLoader.value)
|
||||
},
|
||||
testQuick := {
|
||||
try inputTests(testQuick).evaluated
|
||||
finally close(testLoader.value)
|
||||
},
|
||||
test := testQuick.evaluated,
|
||||
)
|
||||
|
||||
private def close(sbtLoader: ClassLoader): Unit = sbtLoader match {
|
||||
case u: AutoCloseable => u.close()
|
||||
|
|
@ -1398,13 +1385,11 @@ object Defaults extends BuildCommon {
|
|||
* A scope whose task axis is set to Zero.
|
||||
*/
|
||||
lazy val TaskZero: Scope = ThisScope.copy(task = Zero)
|
||||
lazy val TaskGlobal: Scope = TaskZero
|
||||
|
||||
/**
|
||||
* A scope whose configuration axis is set to Zero.
|
||||
*/
|
||||
lazy val ConfigZero: Scope = ThisScope.copy(config = Zero)
|
||||
lazy val ConfigGlobal: Scope = ConfigZero
|
||||
def testTaskOptions(key: Scoped): Seq[Setting[?]] =
|
||||
inTask(key)(
|
||||
Seq(
|
||||
|
|
@ -1468,7 +1453,7 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def singleTestGroup(key: Scoped): Initialize[Task[Seq[Tests.Group]]] =
|
||||
inTask(key, singleTestGroupDefault)
|
||||
def singleTestGroupDefault: Initialize[Task[Seq[Tests.Group]]] = Def.task {
|
||||
lazy val singleTestGroupDefault: Initialize[Task[Seq[Tests.Group]]] = Def.task {
|
||||
val tests = definedTests.value
|
||||
val fk = fork.value
|
||||
val opts = forkOptions.value
|
||||
|
|
@ -1940,7 +1925,7 @@ object Defaults extends BuildCommon {
|
|||
converter.toVirtualFile(p.toPath())
|
||||
}
|
||||
|
||||
def artifactSetting: Initialize[Artifact] =
|
||||
lazy val artifactSetting: Initialize[Artifact] =
|
||||
Def.setting {
|
||||
val a = artifact.value
|
||||
val classifier = artifactClassifier.value
|
||||
|
|
@ -1980,7 +1965,7 @@ object Defaults extends BuildCommon {
|
|||
)
|
||||
)
|
||||
|
||||
def packageTask: Initialize[Task[HashedVirtualFileRef]] =
|
||||
lazy val packageTask: Initialize[Task[HashedVirtualFileRef]] =
|
||||
Def.cachedTask {
|
||||
val config = packageConfiguration.value
|
||||
val s = streams.value
|
||||
|
|
@ -1996,7 +1981,7 @@ object Defaults extends BuildCommon {
|
|||
out
|
||||
}
|
||||
|
||||
def packageConfigurationTask: Initialize[Task[Pkg.Configuration]] =
|
||||
lazy val packageConfigurationTask: Initialize[Task[Pkg.Configuration]] =
|
||||
Def.task {
|
||||
Pkg.Configuration(
|
||||
mappings.value,
|
||||
|
|
@ -2809,8 +2794,8 @@ object Defaults extends BuildCommon {
|
|||
|
||||
def noAggregation: Seq[Scoped] =
|
||||
Seq(run, runMain, bgRun, bgRunMain, console, consoleQuick, consoleProject)
|
||||
lazy val disableAggregation = Defaults.globalDefaults(noAggregation map disableAggregate)
|
||||
def disableAggregate(k: Scoped) = (k / aggregate) :== false
|
||||
lazy val disableAggregation =
|
||||
Defaults.globalDefaults(noAggregation.map(k => (k / aggregate) :== false))
|
||||
|
||||
// 1. runnerSettings is added unscoped via JvmPlugin.
|
||||
// 2. In addition it's added scoped to run task.
|
||||
|
|
@ -2835,9 +2820,7 @@ object Defaults extends BuildCommon {
|
|||
"Create a separate subproject instead of using IntegrationTest and in addition avoid using itSettings",
|
||||
"1.9.0"
|
||||
)
|
||||
lazy val itSettings: Seq[Setting[?]] = inConfig(IntegrationTest) {
|
||||
testSettings
|
||||
}
|
||||
lazy val itSettings: Seq[Setting[?]] = inConfig(IntegrationTest)(testSettings)
|
||||
lazy val defaultConfigs: Seq[Setting[?]] = inConfig(Compile)(compileSettings) ++
|
||||
inConfig(Test)(testSettings) ++
|
||||
inConfig(Runtime)(Classpaths.configSettings)
|
||||
|
|
@ -2870,7 +2853,7 @@ object Defaults extends BuildCommon {
|
|||
)
|
||||
)
|
||||
|
||||
def dependencyResolutionTask: Def.Initialize[Task[DependencyResolution]] =
|
||||
lazy val dependencyResolutionTask: Def.Initialize[Task[DependencyResolution]] =
|
||||
Def.task {
|
||||
CoursierDependencyResolution(csrConfiguration.value)
|
||||
}
|
||||
|
|
@ -3133,16 +3116,14 @@ object Classpaths {
|
|||
// Both POMs and JARs are Maven-compatible in sbt 2.x, so ignore the workarounds
|
||||
packagedDefaultArtifacts.value
|
||||
} else {
|
||||
val crossVersion = sbtCrossVersion.value
|
||||
val sbtV = (pluginCrossBuild / sbtBinaryVersion).value
|
||||
val scalaV = scalaBinaryVersion.value
|
||||
val crossVersion = (name: String) => name + s"_${scalaV}_$sbtV"
|
||||
val legacyPomArtifact = (makePom / artifact).value
|
||||
val converter = fileConverter.value
|
||||
def addSuffix(a: Artifact): Artifact = a.withName(crossVersion(a.name))
|
||||
Map(
|
||||
addSuffix(legacyPomArtifact) -> converter.toVirtualFile(
|
||||
makeMavenPomOfSbtPlugin.value.toPath()
|
||||
)
|
||||
) ++
|
||||
pomConsistentArtifactsForLegacySbt.value ++
|
||||
Map(addSuffix(legacyPomArtifact) -> makeMavenPomOfSbtPlugin(converter, crossVersion)) ++
|
||||
pomConsistentArtifactsForLegacySbt(converter, crossVersion) ++
|
||||
legacyPackagedArtifacts.value
|
||||
}
|
||||
}
|
||||
|
|
@ -3154,52 +3135,46 @@ object Classpaths {
|
|||
else Map.empty[Artifact, HashedVirtualFileRef]
|
||||
}
|
||||
|
||||
private def pomConsistentArtifactsForLegacySbt
|
||||
: Def.Initialize[Task[Map[Artifact, HashedVirtualFileRef]]] =
|
||||
Def.task {
|
||||
val crossVersion = sbtCrossVersion.value
|
||||
val legacyPackages = packaged(defaultPackages).value
|
||||
val converter = fileConverter.value
|
||||
def copyArtifact(
|
||||
artifact: Artifact,
|
||||
fileRef: HashedVirtualFileRef
|
||||
): (Artifact, HashedVirtualFileRef) = {
|
||||
val nameWithSuffix = crossVersion(artifact.name)
|
||||
val file = converter.toPath(fileRef).toFile
|
||||
val targetFile =
|
||||
new File(file.getParentFile, file.name.replace(artifact.name, nameWithSuffix))
|
||||
IO.copyFile(file, targetFile)
|
||||
artifact.withName(nameWithSuffix) -> converter.toVirtualFile(targetFile.toPath)
|
||||
}
|
||||
legacyPackages.map { case (artifact, file) =>
|
||||
copyArtifact(artifact, file);
|
||||
}
|
||||
private inline def pomConsistentArtifactsForLegacySbt(
|
||||
converter: FileConverter,
|
||||
crossVersion: String => String
|
||||
): Map[Artifact, HashedVirtualFileRef] =
|
||||
val legacyPackages = packaged(defaultPackages).value
|
||||
def copyArtifact(
|
||||
artifact: Artifact,
|
||||
fileRef: HashedVirtualFileRef
|
||||
): (Artifact, HashedVirtualFileRef) = {
|
||||
val nameWithSuffix = crossVersion(artifact.name)
|
||||
val file = converter.toPath(fileRef).toFile
|
||||
val targetFile =
|
||||
new File(file.getParentFile, file.name.replace(artifact.name, nameWithSuffix))
|
||||
IO.copyFile(file, targetFile)
|
||||
artifact.withName(nameWithSuffix) -> converter.toVirtualFile(targetFile.toPath)
|
||||
}
|
||||
legacyPackages.map { case (artifact, file) =>
|
||||
copyArtifact(artifact, file);
|
||||
}
|
||||
|
||||
private def sbtCrossVersion: Def.Initialize[String => String] = Def.setting {
|
||||
val sbtV = (pluginCrossBuild / sbtBinaryVersion).value
|
||||
val scalaV = scalaBinaryVersion.value
|
||||
name => name + s"_${scalaV}_$sbtV"
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a POM file that Maven can resolve.
|
||||
* It appends the sbt cross version into all artifactIds of sbt plugins
|
||||
* (the main one and the dependencies).
|
||||
*/
|
||||
private def makeMavenPomOfSbtPlugin: Def.Initialize[Task[File]] = Def.task {
|
||||
private inline def makeMavenPomOfSbtPlugin(
|
||||
converter: FileConverter,
|
||||
crossVersion: String => String
|
||||
): HashedVirtualFileRef =
|
||||
val config = makePomConfiguration.value
|
||||
val nameWithCross = sbtCrossVersion.value(artifact.value.name)
|
||||
val nameWithCross = crossVersion(artifact.value.name)
|
||||
val version = Keys.version.value
|
||||
val pomFile = config.file.get.getParentFile / s"$nameWithCross-$version.pom"
|
||||
val publisher = Keys.publisher.value
|
||||
val ivySbt = Keys.ivySbt.value
|
||||
val module = new ivySbt.Module(moduleSettings.value, appendSbtCrossVersion = true)
|
||||
publisher.makePomFile(module, config.withFile(pomFile), streams.value.log)
|
||||
pomFile
|
||||
}
|
||||
converter.toVirtualFile(pomFile.toPath)
|
||||
|
||||
val ivyPublishSettings: Seq[Setting[?]] = publishGlobalDefaults ++ Seq(
|
||||
def ivyPublishSettings: Seq[Setting[?]] = publishGlobalDefaults ++ Seq(
|
||||
artifacts :== Nil,
|
||||
packagedArtifacts :== Map.empty,
|
||||
makePom := {
|
||||
|
|
@ -3218,7 +3193,7 @@ object Classpaths {
|
|||
publishM2 := publishOrSkip(publishM2Configuration, publishM2 / skip).value
|
||||
)
|
||||
|
||||
private def baseGlobalDefaults =
|
||||
def baseGlobalDefaults =
|
||||
Defaults.globalDefaults(
|
||||
Seq(
|
||||
conflictWarning :== ConflictWarning.default("global"),
|
||||
|
|
@ -3290,7 +3265,7 @@ object Classpaths {
|
|||
)
|
||||
)
|
||||
|
||||
val ivyBaseSettings: Seq[Setting[?]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq(
|
||||
def ivyBaseSettings: Seq[Setting[?]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq(
|
||||
conflictWarning := conflictWarning.value.copy(label = Reference.display(thisProjectRef.value)),
|
||||
unmanagedBase := baseDirectory.value / "lib",
|
||||
normalizedName := Project.normalizeModuleID(name.value),
|
||||
|
|
@ -3593,8 +3568,8 @@ object Classpaths {
|
|||
update / unresolvedWarningConfiguration := UnresolvedWarningConfiguration(
|
||||
dependencyPositions.value
|
||||
),
|
||||
updateFull := (updateTask.tag(Tags.Update, Tags.Network)).value,
|
||||
update := (updateWithoutDetails("update").tag(Tags.Update, Tags.Network)).value,
|
||||
updateFull := updateTask.value,
|
||||
update := updateWithoutDetails("update").value,
|
||||
update := {
|
||||
val report = update.value
|
||||
val log = streams.value.log
|
||||
|
|
@ -3605,7 +3580,7 @@ object Classpaths {
|
|||
evicted / evictionWarningOptions := EvictionWarningOptions.full,
|
||||
evicted := {
|
||||
import ShowLines._
|
||||
val report = (updateTask.tag(Tags.Update, Tags.Network)).value
|
||||
val report = updateTask.value
|
||||
val log = streams.value.log
|
||||
val ew =
|
||||
EvictionWarning(ivyModule.value, (evicted / evictionWarningOptions).value, report)
|
||||
|
|
@ -3630,7 +3605,7 @@ object Classpaths {
|
|||
},
|
||||
dependencyResolution := dependencyResolutionTask.value,
|
||||
csrConfiguration := LMCoursier.updateClassifierConfigurationTask.value,
|
||||
TaskGlobal / updateClassifiers := LibraryManagement.updateClassifiersTask.value,
|
||||
TaskZero / updateClassifiers := LibraryManagement.updateClassifiersTask.value,
|
||||
)
|
||||
) ++ Seq(
|
||||
csrProject := CoursierInputsTasks.coursierProjectTask.value,
|
||||
|
|
@ -3645,7 +3620,7 @@ object Classpaths {
|
|||
IvyXml.generateIvyXmlSettings() ++
|
||||
LMCoursier.publicationsSetting(Seq(Compile, Test).map(c => c -> CConfiguration(c.name)))
|
||||
|
||||
val jvmBaseSettings: Seq[Setting[?]] = Seq(
|
||||
def jvmBaseSettings: Seq[Setting[?]] = Seq(
|
||||
libraryDependencies ++= autoLibraryDependency(
|
||||
autoScalaLibrary.value && scalaHome.value.isEmpty && managedScalaInstance.value,
|
||||
sbtPlugin.value,
|
||||
|
|
@ -3761,7 +3736,7 @@ object Classpaths {
|
|||
)
|
||||
else projectID.value
|
||||
}
|
||||
private[sbt] def ivySbt0: Initialize[Task[IvySbt]] =
|
||||
private[sbt] lazy val ivySbt0: Initialize[Task[IvySbt]] =
|
||||
Def.task {
|
||||
Credentials.register(credentials.value, streams.value.log)
|
||||
new IvySbt(ivyConfiguration.value)
|
||||
|
|
@ -3834,7 +3809,7 @@ object Classpaths {
|
|||
},
|
||||
dependencyResolution := dependencyResolutionTask.value,
|
||||
csrConfiguration := LMCoursier.updateSbtClassifierConfigurationTask.value,
|
||||
(TaskGlobal / updateSbtClassifiers) := (Def
|
||||
(TaskZero / updateSbtClassifiers) := (Def
|
||||
.task {
|
||||
val lm = dependencyResolution.value
|
||||
val s = streams.value
|
||||
|
|
@ -4010,9 +3985,10 @@ object Classpaths {
|
|||
}
|
||||
}
|
||||
|
||||
def updateTask: Initialize[Task[UpdateReport]] = updateTask0("updateFull", true, true)
|
||||
lazy val updateTask: Initialize[Task[UpdateReport]] =
|
||||
updateTask0("updateFull", true, true).tag(Tags.Update, Tags.Network)
|
||||
def updateWithoutDetails(label: String): Initialize[Task[UpdateReport]] =
|
||||
updateTask0(label, false, false)
|
||||
updateTask0(label, false, false).tag(Tags.Update, Tags.Network)
|
||||
|
||||
/**
|
||||
* cacheLabel - label to identify an update cache
|
||||
|
|
@ -4201,14 +4177,15 @@ object Classpaths {
|
|||
try {
|
||||
val extracted = Project.extract(st)
|
||||
val sk = (projRef / Zero / Zero / libraryDependencies).scopedKey
|
||||
val empty = extracted.structure.data.set(sk.scope, sk.key, Nil)
|
||||
val empty = extracted.structure.data.set(sk, Nil)
|
||||
val settings = extracted.structure.settings filter { (s: Setting[?]) =>
|
||||
(s.key.key == libraryDependencies.key) &&
|
||||
(s.key.scope.project == Select(projRef))
|
||||
}
|
||||
Map(settings.asInstanceOf[Seq[Setting[Seq[ModuleID]]]].flatMap { s =>
|
||||
s.init.evaluate(empty) map { _ -> s.pos }
|
||||
}*)
|
||||
settings
|
||||
.asInstanceOf[Seq[Setting[Seq[ModuleID]]]]
|
||||
.flatMap(s => s.init.evaluate(empty).map(_ -> s.pos))
|
||||
.toMap
|
||||
} catch {
|
||||
case NonFatal(_) => Map()
|
||||
}
|
||||
|
|
@ -4363,7 +4340,7 @@ object Classpaths {
|
|||
|
||||
private[sbt] def depMap(
|
||||
projects: Seq[ProjectRef],
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
log: Logger
|
||||
): Task[Map[ModuleRevisionId, ModuleDescriptor]] =
|
||||
val ivyModules = projects.flatMap { proj =>
|
||||
|
|
@ -4419,7 +4396,7 @@ object Classpaths {
|
|||
|
||||
def internalDependencyJarsTask: Initialize[Task[Classpath]] =
|
||||
ClasspathImpl.internalDependencyJarsTask
|
||||
def mkIvyConfiguration: Initialize[Task[InlineIvyConfiguration]] =
|
||||
lazy val mkIvyConfiguration: Initialize[Task[InlineIvyConfiguration]] =
|
||||
Def.task {
|
||||
val (rs, other) = (fullResolvers.value.toVector, otherResolvers.value.toVector)
|
||||
val s = streams.value
|
||||
|
|
@ -4440,14 +4417,14 @@ object Classpaths {
|
|||
def interSort(
|
||||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
deps: BuildDependencies
|
||||
): Seq[(ProjectRef, String)] = ClasspathImpl.interSort(projectRef, conf, data, deps)
|
||||
|
||||
def interSortConfigurations(
|
||||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
deps: BuildDependencies
|
||||
): Seq[(ProjectRef, ConfigRef)] =
|
||||
interSort(projectRef, conf, data, deps).map { case (projectRef, configName) =>
|
||||
|
|
@ -4491,29 +4468,24 @@ object Classpaths {
|
|||
sys.error("Configuration '" + conf + "' not defined in '" + in + "'")
|
||||
def allConfigs(conf: Configuration): Seq[Configuration] = ClasspathImpl.allConfigs(conf)
|
||||
|
||||
def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] =
|
||||
def getConfigurations(p: ResolvedReference, data: Def.Settings): Seq[Configuration] =
|
||||
ClasspathImpl.getConfigurations(p, data)
|
||||
def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] =
|
||||
ClasspathImpl.confOpt(configurations, conf)
|
||||
|
||||
def unmanagedLibs(dep: ResolvedReference, conf: String, data: Settings[Scope]): Task[Classpath] =
|
||||
def unmanagedLibs(dep: ResolvedReference, conf: String, data: Def.Settings): Task[Classpath] =
|
||||
ClasspathImpl.unmanagedLibs(dep, conf, data)
|
||||
|
||||
def getClasspath(
|
||||
key: TaskKey[Classpath],
|
||||
dep: ResolvedReference,
|
||||
conf: String,
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): Task[Classpath] =
|
||||
ClasspathImpl.getClasspath(key, dep, conf, data)
|
||||
|
||||
def defaultConfigurationTask(p: ResolvedReference, data: Settings[Scope]): Configuration =
|
||||
flatten(
|
||||
(p / defaultConfiguration)
|
||||
.get(data)
|
||||
).getOrElse(Configurations.Default)
|
||||
|
||||
def flatten[T](o: Option[Option[T]]): Option[T] = o flatMap idFun
|
||||
def defaultConfigurationTask(p: ResolvedReference, data: Def.Settings): Configuration =
|
||||
(p / defaultConfiguration).get(data).flatten.getOrElse(Configurations.Default)
|
||||
|
||||
val sbtIvySnapshots: URLRepository = Resolver.sbtIvyRepo("snapshots")
|
||||
val typesafeReleases: URLRepository =
|
||||
|
|
@ -4953,7 +4925,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
|
|||
def initScoped[T](sk: ScopedKey[?], i: Initialize[T]): Initialize[T] =
|
||||
initScope(fillTaskAxis(sk.scope, sk.key), i)
|
||||
def initScope[T](s: Scope, i: Initialize[T]): Initialize[T] =
|
||||
i.mapReferenced(Project.mapScope(Scope.replaceThis(s)))
|
||||
Project.inScope(s, i)
|
||||
|
||||
/**
|
||||
* Disables post-compilation hook for determining tests for tab-completion (such as for 'test-only').
|
||||
|
|
|
|||
|
|
@ -449,8 +449,8 @@ object EvaluateTask {
|
|||
ref: ProjectRef
|
||||
): Option[(Task[T], NodeView)] = {
|
||||
val thisScope = Load.projectScope(ref)
|
||||
val resolvedScope = Scope.replaceThis(thisScope)(taskKey.scope)
|
||||
for (t <- structure.data.get(resolvedScope, taskKey.key))
|
||||
val subScoped = Project.replaceThis(thisScope)(taskKey.scopedKey)
|
||||
for (t <- structure.data.get(subScoped))
|
||||
yield (t, nodeView(state, streams, taskKey :: Nil))
|
||||
}
|
||||
def nodeView(
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ import sbt.internal.{ Load, BuildStructure, Act, Aggregation, SessionSettings }
|
|||
import Scope.GlobalScope
|
||||
import Def.{ ScopedKey, Setting }
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util.AttributeKey
|
||||
import sbt.util.Show
|
||||
import std.Transform.DummyTaskMap
|
||||
import sbt.EvaluateTask.extractedTaskConfig
|
||||
|
|
@ -34,21 +33,21 @@ final case class Extracted(
|
|||
* If the project axis is not explicitly specified, it is resolved to be the current project according to the extracted `session`.
|
||||
* Other axes are resolved to be `Zero` if they are not specified.
|
||||
*/
|
||||
def get[T](key: SettingKey[T]): T = getOrError(inCurrent(key.scope), key.key)
|
||||
def get[T](key: TaskKey[T]): Task[T] = getOrError(inCurrent(key.scope), key.key)
|
||||
def get[T](key: SettingKey[T]): T = getOrError(inCurrent(key.scopedKey))
|
||||
def get[T](key: TaskKey[T]): Task[T] = getOrError(inCurrent(key.scopedKey))
|
||||
|
||||
/**
|
||||
* Gets the value assigned to `key` in the computed settings map wrapped in Some. If it does not exist, None is returned.
|
||||
* If the project axis is not explicitly specified, it is resolved to be the current project according to the extracted `session`.
|
||||
* Other axes are resolved to be `Zero` if they are not specified.
|
||||
*/
|
||||
def getOpt[T](key: SettingKey[T]): Option[T] = structure.data.get(inCurrent(key.scope), key.key)
|
||||
def getOpt[T](key: SettingKey[T]): Option[T] = structure.data.get(inCurrent(key.scopedKey))
|
||||
def getOpt[T](key: TaskKey[T]): Option[Task[T]] =
|
||||
structure.data.get(inCurrent(key.scope), key.key)
|
||||
structure.data.get(inCurrent(key))
|
||||
|
||||
private def inCurrent(scope: Scope): Scope =
|
||||
if scope.project == This then scope.rescope(currentRef)
|
||||
else scope
|
||||
private def inCurrent[T](key: ScopedKey[T]): ScopedKey[T] =
|
||||
if key.scope.project == This then key.copy(scope = key.scope.rescope(currentRef))
|
||||
else key
|
||||
|
||||
/**
|
||||
* Runs the task specified by `key` and returns the transformed State and the resulting value of the task.
|
||||
|
|
@ -63,7 +62,7 @@ final case class Extracted(
|
|||
val config = extractedTaskConfig(this, structure, state)
|
||||
val value: Option[(State, Result[T])] =
|
||||
EvaluateTask(structure, key.scopedKey, state, currentRef, config)
|
||||
val (newS, result) = getOrError(rkey.scope, rkey.key, value)
|
||||
val (newS, result) = getOrError(rkey.scopedKey, value)
|
||||
(newS, EvaluateTask.processResult2(result))
|
||||
}
|
||||
|
||||
|
|
@ -116,15 +115,15 @@ final case class Extracted(
|
|||
private def resolve[K <: Scoped.ScopingSetting[K] & Scoped](key: K): K =
|
||||
Scope.resolveScope(GlobalScope, currentRef.build, rootProject)(key.scope) / key
|
||||
|
||||
private def getOrError[T](scope: Scope, key: AttributeKey[?], value: Option[T])(implicit
|
||||
private def getOrError[T](key: ScopedKey[?], value: Option[T])(implicit
|
||||
display: Show[ScopedKey[?]]
|
||||
): T =
|
||||
value getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.")
|
||||
value.getOrElse(sys.error(display.show(key) + " is undefined."))
|
||||
|
||||
private def getOrError[T](scope: Scope, key: AttributeKey[T])(implicit
|
||||
private def getOrError[T](key: ScopedKey[T])(implicit
|
||||
display: Show[ScopedKey[?]]
|
||||
): T =
|
||||
getOrError(scope, key, structure.data.get(scope, key))(display)
|
||||
getOrError(key, structure.data.get(key))(display)
|
||||
|
||||
@deprecated(
|
||||
"This discards session settings. Migrate to appendWithSession or appendWithoutSession.",
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ import sbt.internal.{
|
|||
SettingGraph,
|
||||
SessionSettings
|
||||
}
|
||||
import sbt.internal.util.{ AttributeKey, AttributeMap, Relation, Settings }
|
||||
import sbt.internal.util.{ AttributeKey, AttributeMap, Relation }
|
||||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.server.ServerHandler
|
||||
import sbt.librarymanagement.Configuration
|
||||
|
|
@ -288,10 +288,10 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
def orIdentity[A](opt: Option[A => A]): A => A =
|
||||
opt.getOrElse(identity)
|
||||
|
||||
def getHook[A](key: SettingKey[A => A], data: Settings[Scope]): A => A =
|
||||
def getHook[A](key: SettingKey[A => A], data: Def.Settings): A => A =
|
||||
orIdentity((Global / key).get(data))
|
||||
|
||||
def getHooks(data: Settings[Scope]): (State => State, State => State) =
|
||||
def getHooks(data: Def.Settings): (State => State, State => State) =
|
||||
(getHook(Keys.onLoad, data), getHook(Keys.onUnload, data))
|
||||
|
||||
def current(state: State): ProjectRef = session(state).current
|
||||
|
|
@ -373,46 +373,34 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
|
||||
private[sbt] def scopedKeyData(
|
||||
structure: BuildStructure,
|
||||
scope: Scope,
|
||||
key: AttributeKey[?]
|
||||
key: ScopedKey[?]
|
||||
): Option[ScopedKeyData[?]] =
|
||||
structure.data.get(scope, key) map { v =>
|
||||
ScopedKeyData(ScopedKey(scope, key), v)
|
||||
}
|
||||
structure.data.getKeyValue(key).map((defining, value) => ScopedKeyData(key, defining, value))
|
||||
|
||||
def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[?])(
|
||||
using display: Show[ScopedKey[?]]
|
||||
def details(structure: BuildStructure, actual: Boolean, key: ScopedKey[?])(using
|
||||
display: Show[ScopedKey[?]]
|
||||
): String = {
|
||||
val scoped = ScopedKey(scope, key)
|
||||
val data = scopedKeyData(structure, key).map(_.description).getOrElse("No entry for key.")
|
||||
val description = key.key.description match
|
||||
case Some(desc) => s"Description:\n\t$desc\n"
|
||||
case None => ""
|
||||
|
||||
val data = scopedKeyData(structure, scope, key) map { _.description } getOrElse {
|
||||
"No entry for key."
|
||||
}
|
||||
val description = key.description match {
|
||||
case Some(desc) => "Description:\n\t" + desc + "\n"; case None => ""
|
||||
}
|
||||
|
||||
val definingScope = structure.data.definingScope(scope, key)
|
||||
val providedBy = definingScope match {
|
||||
case Some(sc) => "Provided by:\n\t" + Scope.display(sc, key.label) + "\n"
|
||||
case None => ""
|
||||
}
|
||||
val definingScoped = definingScope match {
|
||||
case Some(sc) => ScopedKey(sc, key)
|
||||
case None => scoped
|
||||
}
|
||||
val (definingKey, providedBy) = structure.data.definingKey(key) match
|
||||
case Some(k) => k -> s"Provided by:\n\t${Scope.display(k.scope, key.key.label)}\n"
|
||||
case None => key -> ""
|
||||
val comp =
|
||||
Def.compiled(structure.settings, actual)(using
|
||||
structure.delegates,
|
||||
structure.scopeLocal,
|
||||
display
|
||||
)
|
||||
val definedAt = comp get definingScoped map { c =>
|
||||
Def.definedAtString(c.settings).capitalize
|
||||
} getOrElse ""
|
||||
val definedAt = comp
|
||||
.get(definingKey)
|
||||
.map(c => Def.definedAtString(c.settings).capitalize)
|
||||
.getOrElse("")
|
||||
|
||||
val cMap = Def.flattenLocals(comp)
|
||||
val related = cMap.keys.filter(k => k.key == key && k.scope != scope)
|
||||
val related = cMap.keys.filter(k => k.key == key.key && k.scope != key.scope)
|
||||
def derivedDependencies(c: ScopedKey[?]): List[ScopedKey[?]] =
|
||||
comp
|
||||
.get(c)
|
||||
|
|
@ -420,14 +408,14 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
.toList
|
||||
.flatten
|
||||
|
||||
val depends = cMap.get(scoped) match {
|
||||
case Some(c) => c.dependencies.toSet; case None => Set.empty
|
||||
}
|
||||
val derivedDepends: Set[ScopedKey[?]] = derivedDependencies(definingScoped).toSet
|
||||
val depends = cMap.get(key) match
|
||||
case Some(c) => c.dependencies.toSet
|
||||
case None => Set.empty
|
||||
val derivedDepends: Set[ScopedKey[?]] = derivedDependencies(definingKey).toSet
|
||||
|
||||
val reverse = Project.reverseDependencies(cMap, scoped)
|
||||
val reverse = Project.reverseDependencies(cMap, key)
|
||||
val derivedReverse =
|
||||
reverse.filter(r => derivedDependencies(r).contains(definingScoped)).toSet
|
||||
reverse.filter(r => derivedDependencies(r).contains(definingKey)).toSet
|
||||
|
||||
def printDepScopes(
|
||||
baseLabel: String,
|
||||
|
|
@ -460,7 +448,7 @@ trait ProjectExtra extends Scoped.Syntax:
|
|||
definedAt +
|
||||
printDepScopes("Dependencies", "derived from", depends, derivedDepends) +
|
||||
printDepScopes("Reverse dependencies", "derives", reverse, derivedReverse) +
|
||||
printScopes("Delegates", delegates(structure, scope, key)) +
|
||||
printScopes("Delegates", delegates(structure, key.scope, key.key)) +
|
||||
printScopes("Related", related, 10)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -274,22 +274,8 @@ object ProjectMatrix {
|
|||
|
||||
private def resolveMappings: ListMap[ProjectRow, Project] = {
|
||||
val projectIds = resolveProjectIds
|
||||
def dirSuffix(axes: Seq[VirtualAxis]): String =
|
||||
axes.map(_.directorySuffix).filter(_.nonEmpty).mkString("-")
|
||||
val projects =
|
||||
rows.map { r =>
|
||||
import VirtualAxis.*
|
||||
val axes = r.axisValues.sortBy(_.suffixOrder)
|
||||
val scalaDirSuffix = dirSuffix(axes)
|
||||
val nonScalaDirSuffix = dirSuffix(axes.filterNot(_.isInstanceOf[ScalaVersionAxis]))
|
||||
val nonScalaNorPlatformDirSuffix =
|
||||
dirSuffix(axes.filterNot(_.isInstanceOf[ScalaVersionAxis | PlatformAxis]))
|
||||
val platform = axes
|
||||
.collect { case pa: VirtualAxis.PlatformAxis =>
|
||||
pa
|
||||
}
|
||||
.headOption
|
||||
.getOrElse(sys.error(s"platform axis is missing in $axes"))
|
||||
val childId = projectIds(r)
|
||||
val deps = dependencies.map { resolveMatrixDependency(_, r) } ++ nonMatrixDependencies
|
||||
val aggs = aggregate.map { case ref: LocalProjectMatrix =>
|
||||
|
|
@ -303,32 +289,7 @@ object ProjectMatrix {
|
|||
.aggregate(aggs*)
|
||||
.setPlugins(plugins)
|
||||
.configs(configurations*)
|
||||
.settings(
|
||||
name := self.id
|
||||
)
|
||||
.settings(
|
||||
r.scalaVersionOpt match {
|
||||
case Some(sv) =>
|
||||
List(scalaVersion := sv)
|
||||
case _ =>
|
||||
List(autoScalaLibrary := false, crossPaths := false)
|
||||
}
|
||||
)
|
||||
.settings(
|
||||
outputPath := {
|
||||
val o = outputPath.value
|
||||
if nonScalaNorPlatformDirSuffix.nonEmpty then s"$o/$nonScalaNorPlatformDirSuffix"
|
||||
else o
|
||||
},
|
||||
sourceDirectory := base.getAbsoluteFile / "src",
|
||||
unmanagedBase := base.getAbsoluteFile / "lib",
|
||||
ProjectExtra.inConfig(Compile)(makeSources(nonScalaDirSuffix, scalaDirSuffix)),
|
||||
ProjectExtra.inConfig(Test)(makeSources(nonScalaDirSuffix, scalaDirSuffix)),
|
||||
projectDependencies := projectDependenciesTask.value,
|
||||
virtualAxes := axes,
|
||||
projectMatrixBaseDirectory := base,
|
||||
)
|
||||
.settings(self.settings)
|
||||
.settings(baseSettings ++ rowSettings(r) ++ self.settings)
|
||||
.configure(transforms*)
|
||||
|
||||
r -> r.process(p)
|
||||
|
|
@ -336,8 +297,10 @@ object ProjectMatrix {
|
|||
ListMap(projects*)
|
||||
}
|
||||
|
||||
override lazy val componentProjects: Seq[Project] = resolvedMappings.values.toList
|
||||
|
||||
// backport of https://github.com/sbt/sbt/pull/5767
|
||||
def projectDependenciesTask: Def.Initialize[Task[Seq[ModuleID]]] =
|
||||
lazy val projectDependenciesTask: Def.Initialize[Task[Seq[ModuleID]]] =
|
||||
Def.task {
|
||||
val orig = projectDependencies.value
|
||||
val sbv = scalaBinaryVersion.value
|
||||
|
|
@ -363,7 +326,39 @@ object ProjectMatrix {
|
|||
}
|
||||
}
|
||||
|
||||
override lazy val componentProjects: Seq[Project] = resolvedMappings.values.toList
|
||||
private lazy val noScalaLibrary: Seq[Def.Setting[?]] =
|
||||
Seq(autoScalaLibrary := false, crossPaths := false)
|
||||
|
||||
private lazy val baseSettings: Seq[Def.Setting[?]] = Def.settings(
|
||||
name := self.id,
|
||||
sourceDirectory := base.getAbsoluteFile / "src",
|
||||
unmanagedBase := base.getAbsoluteFile / "lib",
|
||||
projectDependencies := projectDependenciesTask.value,
|
||||
projectMatrixBaseDirectory := base,
|
||||
)
|
||||
|
||||
private def rowSettings(r: ProjectRow): Seq[Def.Setting[?]] =
|
||||
import VirtualAxis.*
|
||||
val axes = r.axisValues.sortBy(_.suffixOrder)
|
||||
def dirSuffix(axes: Seq[VirtualAxis]): String =
|
||||
axes.map(_.directorySuffix).filter(_.nonEmpty).mkString("-")
|
||||
val scalaDirSuffix = dirSuffix(axes)
|
||||
val nonScalaDirSuffix = dirSuffix(axes.filterNot(_.isInstanceOf[ScalaVersionAxis]))
|
||||
val nonScalaNorPlatformDirSuffix = dirSuffix(
|
||||
axes.filterNot(_.isInstanceOf[ScalaVersionAxis | PlatformAxis])
|
||||
)
|
||||
Def.settings(
|
||||
r.scalaVersionOpt match {
|
||||
case Some(sv) => Seq(scalaVersion := sv)
|
||||
case _ => noScalaLibrary
|
||||
},
|
||||
if nonScalaNorPlatformDirSuffix.nonEmpty then
|
||||
Seq(outputPath ~= (o => s"$o/$nonScalaNorPlatformDirSuffix"))
|
||||
else Seq.empty,
|
||||
ProjectExtra.inConfig(Compile)(makeSources(nonScalaDirSuffix, scalaDirSuffix)),
|
||||
ProjectExtra.inConfig(Test)(makeSources(nonScalaDirSuffix, scalaDirSuffix)),
|
||||
virtualAxes := axes,
|
||||
)
|
||||
|
||||
private def resolveMatrixAggregate(
|
||||
other: ProjectMatrix,
|
||||
|
|
|
|||
|
|
@ -11,16 +11,14 @@ package sbt
|
|||
import Def.ScopedKey
|
||||
import sbt.internal.util.KeyTag
|
||||
|
||||
final case class ScopedKeyData[A](scoped: ScopedKey[A], value: Any) {
|
||||
val key = scoped.key
|
||||
val scope = scoped.scope
|
||||
def typeName: String = key.tag.toString
|
||||
final case class ScopedKeyData[A](key: ScopedKey[A], definingKey: ScopedKey[A], value: Any) {
|
||||
def typeName: String = key.key.tag.toString
|
||||
def settingValue: Option[Any] =
|
||||
key.tag match
|
||||
key.key.tag match
|
||||
case KeyTag.Setting(_) => Some(value)
|
||||
case _ => None
|
||||
def description: String =
|
||||
key.tag match
|
||||
key.key.tag match
|
||||
case KeyTag.Task(typeArg) => s"Task: $typeArg"
|
||||
case KeyTag.SeqTask(typeArg) => s"Task: Seq[$typeArg]"
|
||||
case KeyTag.InputTask(typeArg) => s"Input task: $typeArg"
|
||||
|
|
|
|||
|
|
@ -57,11 +57,13 @@ object SessionVar {
|
|||
key: ScopedKey[Task[T]],
|
||||
context: Scope,
|
||||
state: State
|
||||
): ScopedKey[Task[T]] = {
|
||||
val subScope = Scope.replaceThis(context)(key.scope)
|
||||
val scope = Project.structure(state).data.definingScope(subScope, key.key) getOrElse subScope
|
||||
ScopedKey(scope, key.key)
|
||||
}
|
||||
): ScopedKey[Task[T]] =
|
||||
val subScoped = Project.replaceThis(context)(key)
|
||||
Project
|
||||
.structure(state)
|
||||
.data
|
||||
.definingKey(subScoped)
|
||||
.getOrElse(subScoped)
|
||||
|
||||
def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
|
||||
Project.structure(state).streams(state).use(key) { s =>
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ import sbt.internal.util.{
|
|||
AttributeMap,
|
||||
IMap,
|
||||
MessageOnlyException,
|
||||
Settings,
|
||||
Util,
|
||||
}
|
||||
import sbt.util.Show
|
||||
|
|
@ -61,7 +60,7 @@ object Act {
|
|||
current: ProjectRef,
|
||||
defaultConfigs: Option[ResolvedReference] => Seq[String],
|
||||
keyMap: Map[String, AttributeKey[?]],
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): Parser[ScopedKey[Any]] =
|
||||
scopedKeySelected(index, current, defaultConfigs, keyMap, data, askProject = true)
|
||||
.map(_.key.asInstanceOf[ScopedKey[Any]])
|
||||
|
|
@ -115,7 +114,7 @@ object Act {
|
|||
current: ProjectRef,
|
||||
defaultConfigs: Option[ResolvedReference] => Seq[String],
|
||||
keyMap: Map[String, AttributeKey[?]],
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
askProject: Boolean,
|
||||
): Parser[ParsedKey] =
|
||||
scopedKeyFull(index, current, defaultConfigs, keyMap, askProject = askProject).flatMap {
|
||||
|
|
@ -197,7 +196,7 @@ object Act {
|
|||
key
|
||||
)
|
||||
|
||||
def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])(implicit
|
||||
def select(allKeys: Seq[Parser[ParsedKey]], data: Def.Settings)(implicit
|
||||
show: Show[ScopedKey[?]]
|
||||
): Parser[ParsedKey] =
|
||||
seq(allKeys) flatMap { ss =>
|
||||
|
|
@ -235,10 +234,7 @@ object Act {
|
|||
def showAmbiguous(keys: Seq[ScopedKey[?]])(implicit show: Show[ScopedKey[?]]): String =
|
||||
keys.take(3).map(x => show.show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "")
|
||||
|
||||
def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean = {
|
||||
val key = parsed.key
|
||||
data.definingScope(key.scope, key.key) == Some(key.scope)
|
||||
}
|
||||
def isValid(data: Def.Settings)(parsed: ParsedKey): Boolean = data.contains(parsed.key)
|
||||
|
||||
def examples(p: Parser[String], exs: Set[String], label: String): Parser[String] =
|
||||
(p !!! ("Expected " + label)).examples(exs)
|
||||
|
|
@ -571,28 +567,14 @@ object Act {
|
|||
def keyValues[T](extracted: Extracted)(keys: Seq[ScopedKey[T]]): Values[T] =
|
||||
keyValues(extracted.structure)(keys)
|
||||
def keyValues[T](structure: BuildStructure)(keys: Seq[ScopedKey[T]]): Values[T] =
|
||||
keys.flatMap { key =>
|
||||
getValue(structure.data, key.scope, key.key) map { value =>
|
||||
KeyValue(key, value)
|
||||
}
|
||||
}
|
||||
private def anyKeyValues(
|
||||
structure: BuildStructure,
|
||||
keys: Seq[ScopedKey[?]]
|
||||
): Seq[KeyValue[?]] =
|
||||
keys.flatMap { key =>
|
||||
getValue(structure.data, key.scope, key.key) map { value =>
|
||||
KeyValue(key, value)
|
||||
}
|
||||
}
|
||||
keys.flatMap(key => getValue(structure.data, key).map(KeyValue(key, _)))
|
||||
|
||||
private def getValue[T](
|
||||
data: Settings[Scope],
|
||||
scope: Scope,
|
||||
key: AttributeKey[T]
|
||||
): Option[T] =
|
||||
if (java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(scope, key)
|
||||
else data.get(scope, key)
|
||||
private def anyKeyValues(structure: BuildStructure, keys: Seq[ScopedKey[?]]): Seq[KeyValue[?]] =
|
||||
keys.flatMap(key => getValue(structure.data, key).map(KeyValue(key, _)))
|
||||
|
||||
private def getValue[T](data: Def.Settings, key: ScopedKey[T]): Option[T] =
|
||||
if (java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(key)
|
||||
else data.get(key)
|
||||
|
||||
def requireSession[T](s: State, p: => Parser[T]): Parser[T] =
|
||||
if s.get(sessionSettings).isEmpty then failure("No project loaded") else p
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ package internal
|
|||
|
||||
import java.text.DateFormat
|
||||
|
||||
import sbt.Def.ScopedKey
|
||||
import sbt.Def.{ ScopedKey, Settings }
|
||||
import sbt.Keys.{ showSuccess, showTiming, timingFormat }
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.SlashSyntax0.given
|
||||
|
|
@ -157,7 +157,7 @@ object Aggregation {
|
|||
private def timingString(
|
||||
startTime: Long,
|
||||
endTime: Long,
|
||||
data: Settings[Scope],
|
||||
data: Settings,
|
||||
currentRef: ProjectRef,
|
||||
): String = {
|
||||
val format = (currentRef / timingFormat).get(data) getOrElse defaultFormat
|
||||
|
|
@ -266,29 +266,40 @@ object Aggregation {
|
|||
else extra.aggregates.forward(ref)
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the reverse aggregate keys of all the `keys` at once.
|
||||
* This is more performant than computing the revere aggregate keys of each key individually
|
||||
* because of the duplicates. One aggregate key is the aggregation of many keys.
|
||||
*/
|
||||
def reverseAggregate[Proj](
|
||||
keys: Set[ScopedKey[?]],
|
||||
extra: BuildUtil[Proj],
|
||||
): Iterable[ScopedKey[?]] =
|
||||
val mask = ScopeMask()
|
||||
def recur(keys: Set[ScopedKey[?]], acc: Set[ScopedKey[?]]): Set[ScopedKey[?]] =
|
||||
if keys.isEmpty then acc
|
||||
else
|
||||
val aggKeys = for
|
||||
key <- keys
|
||||
ref <- projectAggregates(key.scope.project.toOption, extra, reverse = true)
|
||||
toResolve = key.scope.copy(project = Select(ref))
|
||||
resolved = Resolve(extra, Zero, key.key, mask)(toResolve)
|
||||
scoped = ScopedKey(resolved, key.key)
|
||||
if !acc.contains(scoped)
|
||||
yield scoped
|
||||
val filteredAggKeys = aggKeys.filter(aggregationEnabled(_, extra.data))
|
||||
// recursive because an aggregate project can be aggregated in another aggregate project
|
||||
recur(filteredAggKeys, acc ++ filteredAggKeys)
|
||||
recur(keys, keys)
|
||||
|
||||
def aggregate[A1, Proj](
|
||||
key: ScopedKey[A1],
|
||||
rawMask: ScopeMask,
|
||||
extra: BuildUtil[Proj],
|
||||
reverse: Boolean = false
|
||||
extra: BuildUtil[Proj]
|
||||
): Seq[ScopedKey[A1]] =
|
||||
val mask = rawMask.copy(project = true)
|
||||
Dag.topologicalSort(key): (k) =>
|
||||
if reverse then reverseAggregatedKeys(k, extra, mask)
|
||||
else if aggregationEnabled(k, extra.data) then aggregatedKeys(k, extra, mask)
|
||||
else Nil
|
||||
|
||||
def reverseAggregatedKeys[T](
|
||||
key: ScopedKey[T],
|
||||
extra: BuildUtil[?],
|
||||
mask: ScopeMask
|
||||
): Seq[ScopedKey[T]] =
|
||||
projectAggregates(key.scope.project.toOption, extra, reverse = true) flatMap { ref =>
|
||||
val toResolve = key.scope.copy(project = Select(ref))
|
||||
val resolved = Resolve(extra, Zero, key.key, mask)(toResolve)
|
||||
val skey = ScopedKey(resolved, key.key)
|
||||
if (aggregationEnabled(skey, extra.data)) skey :: Nil else Nil
|
||||
}
|
||||
if aggregationEnabled(k, extra.data) then aggregatedKeys(k, extra, mask) else Nil
|
||||
|
||||
def aggregatedKeys[T](
|
||||
key: ScopedKey[T],
|
||||
|
|
@ -301,7 +312,7 @@ object Aggregation {
|
|||
ScopedKey(resolved, key.key)
|
||||
}
|
||||
|
||||
def aggregationEnabled(key: ScopedKey[?], data: Settings[Scope]): Boolean =
|
||||
def aggregationEnabled(key: ScopedKey[?], data: Settings): Boolean =
|
||||
(Scope.fillTaskAxis(key.scope, key.key) / Keys.aggregate).get(data).getOrElse(true)
|
||||
private[sbt] val suppressShow =
|
||||
AttributeKey[Boolean]("suppress-aggregation-show", Int.MaxValue)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ import sbt.SlashSyntax0.given
|
|||
import BuildStreams.Streams
|
||||
import sbt.io.syntax._
|
||||
import sbt.internal.inc.MappedFileConverter
|
||||
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attributed, Settings }
|
||||
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attributed }
|
||||
import sbt.internal.util.Attributed.data
|
||||
import sbt.util.Logger
|
||||
import xsbti.FileConverter
|
||||
|
|
@ -29,7 +29,7 @@ final class BuildStructure(
|
|||
val units: Map[URI, LoadedBuildUnit],
|
||||
val root: URI,
|
||||
val settings: Seq[Setting[?]],
|
||||
val data: Settings[Scope],
|
||||
val data: Def.Settings,
|
||||
val index: StructureIndex,
|
||||
val streams: State => Streams,
|
||||
val delegates: Scope => Seq[Scope],
|
||||
|
|
@ -270,7 +270,7 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) {
|
|||
unit.projects.map(p => ProjectRef(build, p.id) -> p)
|
||||
}.toIndexedSeq
|
||||
|
||||
def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] =
|
||||
def extra(data: Def.Settings)(keyIndex: KeyIndex): BuildUtil[ResolvedProject] =
|
||||
BuildUtil(root, units, keyIndex, data)
|
||||
|
||||
private[sbt] def autos = GroupedAutoPlugins(units)
|
||||
|
|
@ -308,7 +308,7 @@ object BuildStreams {
|
|||
def mkStreams(
|
||||
units: Map[URI, LoadedBuildUnit],
|
||||
root: URI,
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): State => Streams = s => {
|
||||
s.get(Keys.stateStreams).getOrElse {
|
||||
std.Streams(
|
||||
|
|
@ -323,7 +323,7 @@ object BuildStreams {
|
|||
}
|
||||
}
|
||||
|
||||
def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])(
|
||||
def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Def.Settings)(
|
||||
scoped: ScopedKey[?]
|
||||
): File =
|
||||
resolvePath(projectPath(units, root, scoped, data), nonProjectPath(scoped))
|
||||
|
|
@ -385,7 +385,7 @@ object BuildStreams {
|
|||
units: Map[URI, LoadedBuildUnit],
|
||||
root: URI,
|
||||
scoped: ScopedKey[?],
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): File =
|
||||
scoped.scope.project match {
|
||||
case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath
|
||||
|
|
@ -396,9 +396,9 @@ object BuildStreams {
|
|||
case This => sys.error("Unresolved project reference (This) in " + displayFull(scoped))
|
||||
}
|
||||
|
||||
def refTarget(ref: ResolvedReference, fallbackBase: File, data: Settings[Scope]): File =
|
||||
def refTarget(ref: ResolvedReference, fallbackBase: File, data: Def.Settings): File =
|
||||
refTarget(GlobalScope.copy(project = Select(ref)), fallbackBase, data)
|
||||
|
||||
def refTarget(scope: Scope, fallbackBase: File, data: Settings[Scope]): File =
|
||||
def refTarget(scope: Scope, fallbackBase: File, data: Def.Settings): File =
|
||||
((scope / Keys.target).get(data) getOrElse outputDirectory(fallbackBase)) / StreamsDirectory
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,13 +9,13 @@
|
|||
package sbt
|
||||
package internal
|
||||
|
||||
import sbt.internal.util.{ Relation, Settings, Dag }
|
||||
import sbt.internal.util.{ Relation, Dag }
|
||||
|
||||
import java.net.URI
|
||||
|
||||
final class BuildUtil[Proj](
|
||||
val keyIndex: KeyIndex,
|
||||
val data: Settings[Scope],
|
||||
val data: Def.Settings,
|
||||
val root: URI,
|
||||
val rootProjectID: URI => String,
|
||||
val project: (URI, String) => Proj,
|
||||
|
|
@ -57,7 +57,7 @@ object BuildUtil {
|
|||
root: URI,
|
||||
units: Map[URI, LoadedBuildUnit],
|
||||
keyIndex: KeyIndex,
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): BuildUtil[ResolvedProject] = {
|
||||
val getp = (build: URI, project: String) => Load.getProject(units, build, project)
|
||||
val configs = (_: ResolvedProject).configurations.map(c => ConfigKey(c.name))
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import sbt.Keys._
|
|||
import sbt.nio.Keys._
|
||||
import sbt.nio.file.{ Glob, RecursiveGlob }
|
||||
import sbt.Def.Initialize
|
||||
import sbt.internal.util.{ Attributed, Dag, Settings }
|
||||
import sbt.internal.util.{ Attributed, Dag }
|
||||
import sbt.librarymanagement.{ Configuration, TrackLevel }
|
||||
import sbt.librarymanagement.Configurations.names
|
||||
import sbt.std.TaskExtra._
|
||||
|
|
@ -180,7 +180,7 @@ private[sbt] object ClasspathImpl {
|
|||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
self: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
deps: BuildDependencies,
|
||||
track: TrackLevel,
|
||||
log: Logger
|
||||
|
|
@ -198,7 +198,7 @@ private[sbt] object ClasspathImpl {
|
|||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
self: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
deps: BuildDependencies,
|
||||
track: TrackLevel,
|
||||
log: Logger
|
||||
|
|
@ -244,7 +244,7 @@ private[sbt] object ClasspathImpl {
|
|||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
self: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
deps: BuildDependencies,
|
||||
track: TrackLevel,
|
||||
log: Logger
|
||||
|
|
@ -282,7 +282,7 @@ private[sbt] object ClasspathImpl {
|
|||
def unmanagedDependencies0(
|
||||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
deps: BuildDependencies,
|
||||
log: Logger
|
||||
): Initialize[Task[Classpath]] =
|
||||
|
|
@ -306,7 +306,7 @@ private[sbt] object ClasspathImpl {
|
|||
def unmanagedLibs(
|
||||
dep: ResolvedReference,
|
||||
conf: String,
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): Task[Classpath] =
|
||||
getClasspath(unmanagedJars, dep, conf, data)
|
||||
|
||||
|
|
@ -315,7 +315,7 @@ private[sbt] object ClasspathImpl {
|
|||
deps: BuildDependencies,
|
||||
conf: Configuration,
|
||||
self: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
track: TrackLevel,
|
||||
includeSelf: Boolean,
|
||||
log: Logger
|
||||
|
|
@ -346,7 +346,7 @@ private[sbt] object ClasspathImpl {
|
|||
def interSort(
|
||||
projectRef: ProjectRef,
|
||||
conf: Configuration,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
deps: BuildDependencies
|
||||
): Seq[(ProjectRef, String)] =
|
||||
val visited = (new LinkedHashSet[(ProjectRef, String)]).asScala
|
||||
|
|
@ -431,7 +431,7 @@ private[sbt] object ClasspathImpl {
|
|||
def allConfigs(conf: Configuration): Seq[Configuration] =
|
||||
Dag.topologicalSort(conf)(_.extendsConfigs)
|
||||
|
||||
def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] =
|
||||
def getConfigurations(p: ResolvedReference, data: Def.Settings): Seq[Configuration] =
|
||||
(p / ivyConfigurations).get(data).getOrElse(Nil)
|
||||
|
||||
def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] =
|
||||
|
|
@ -441,14 +441,14 @@ private[sbt] object ClasspathImpl {
|
|||
key: TaskKey[Seq[A]],
|
||||
dep: ResolvedReference,
|
||||
conf: Configuration,
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): Task[Seq[A]] = getClasspath(key, dep, conf.name, data)
|
||||
|
||||
def getClasspath[A](
|
||||
key: TaskKey[Seq[A]],
|
||||
dep: ResolvedReference,
|
||||
conf: String,
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): Task[Seq[A]] =
|
||||
(dep / ConfigKey(conf) / key).get(data) match {
|
||||
case Some(x) => x
|
||||
|
|
|
|||
|
|
@ -114,11 +114,9 @@ private[sbt] object Clean {
|
|||
// This is the special portion of the task where we clear out the relevant streams
|
||||
// and file outputs of a task.
|
||||
val streamsKey = scope.task.toOption.map(k => ScopedKey(scope.copy(task = Zero), k))
|
||||
val stampKey = ScopedKey(scope, inputFileStamps.key)
|
||||
val stampsKey =
|
||||
extracted.structure.data.getDirect(scope, inputFileStamps.key) match {
|
||||
case Some(_) => ScopedKey(scope, inputFileStamps.key) :: Nil
|
||||
case _ => Nil
|
||||
}
|
||||
if extracted.structure.data.contains(stampKey) then stampKey :: Nil else Nil
|
||||
val streamsGlobs =
|
||||
(streamsKey.toSeq ++ stampsKey)
|
||||
.map(k => manager(k).cacheDirectory.toPath.toGlob / **)
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ object GlobalPlugin {
|
|||
(prods ++ intcp).distinct
|
||||
)(updateReport.value)
|
||||
}
|
||||
val resolvedTaskInit = taskInit.mapReferenced(Project.mapScope(Scope.replaceThis(p)))
|
||||
val resolvedTaskInit = taskInit.mapReferenced(Project.replaceThis(p))
|
||||
val task = resolvedTaskInit.evaluate(data)
|
||||
val roots = resolvedTaskInit.dependencies
|
||||
evaluate(state, structure, task, roots)
|
||||
|
|
|
|||
|
|
@ -87,8 +87,7 @@ object Inspect {
|
|||
val extracted = Project.extract(s)
|
||||
import extracted._
|
||||
option match {
|
||||
case Details(actual) =>
|
||||
Project.details(extracted.structure, actual, sk.scope, sk.key)
|
||||
case Details(actual) => Project.details(extracted.structure, actual, sk)
|
||||
case DependencyTreeMode =>
|
||||
val basedir = new File(Project.session(s).current.build)
|
||||
Project
|
||||
|
|
|
|||
|
|
@ -29,30 +29,14 @@ object KeyIndex {
|
|||
}
|
||||
|
||||
def aggregate(
|
||||
known: Iterable[ScopedKey[?]],
|
||||
known: Set[ScopedKey[?]],
|
||||
extra: BuildUtil[?],
|
||||
projects: Map[URI, Set[String]],
|
||||
configurations: Map[String, Seq[Configuration]]
|
||||
): ExtendableKeyIndex = {
|
||||
/*
|
||||
* Used to be:
|
||||
* (base(projects, configurations) /: known) { (index, key) =>
|
||||
* index.addAggregated(key, extra)
|
||||
* }
|
||||
* This was a significant serial bottleneck during project loading that we can work around by
|
||||
* computing the aggregations in parallel and then bulk adding them to the index.
|
||||
*/
|
||||
import scala.collection.parallel.CollectionConverters.*
|
||||
val toAggregate = known.par.map {
|
||||
case key if validID(key.key.label) =>
|
||||
Aggregation.aggregate(key, ScopeMask(), extra, reverse = true)
|
||||
case _ => Nil
|
||||
}
|
||||
toAggregate.foldLeft(base(projects, configurations)) {
|
||||
case (index, Nil) => index
|
||||
case (index, keys) => keys.foldLeft(index)(_.add(_))
|
||||
}
|
||||
}
|
||||
): ExtendableKeyIndex =
|
||||
Aggregation
|
||||
.reverseAggregate(known.filter(k => validID(k.key.label)), extra)
|
||||
.foldLeft(base(projects, configurations))(_.add(_))
|
||||
|
||||
private def base(
|
||||
projects: Map[URI, Set[String]],
|
||||
|
|
@ -278,7 +262,7 @@ private[sbt] final class KeyIndex0(val data: BuildIndex) extends ExtendableKeyIn
|
|||
|
||||
def addAggregated(scoped: ScopedKey[?], extra: BuildUtil[?]): ExtendableKeyIndex =
|
||||
if (validID(scoped.key.label)) {
|
||||
val aggregateProjects = Aggregation.aggregate(scoped, ScopeMask(), extra, reverse = true)
|
||||
val aggregateProjects = Aggregation.reverseAggregate(Set(scoped), extra)
|
||||
aggregateProjects.foldLeft(this: ExtendableKeyIndex)(_.add(_))
|
||||
} else this
|
||||
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ object LintUnused {
|
|||
}
|
||||
|
||||
def lintResultLines(
|
||||
result: Seq[(ScopedKey[?], String, Vector[SourcePosition])]
|
||||
result: Seq[(ScopedKey[?], String, Seq[SourcePosition])]
|
||||
): Vector[String] = {
|
||||
import scala.collection.mutable.ListBuffer
|
||||
val buffer = ListBuffer.empty[String]
|
||||
|
|
@ -127,7 +127,7 @@ object LintUnused {
|
|||
state: State,
|
||||
includeKeys: String => Boolean,
|
||||
excludeKeys: String => Boolean
|
||||
): Seq[(ScopedKey[?], String, Vector[SourcePosition])] = {
|
||||
): Seq[(ScopedKey[?], String, Seq[SourcePosition])] = {
|
||||
val extracted = Project.extract(state)
|
||||
val structure = extracted.structure
|
||||
val display = Def.showShortKey(None) // extracted.showKey
|
||||
|
|
@ -135,17 +135,11 @@ object LintUnused {
|
|||
val cMap = Def.flattenLocals(comp)
|
||||
val used: Set[ScopedKey[?]] = cMap.values.flatMap(_.dependencies).toSet
|
||||
val unused: Seq[ScopedKey[?]] = cMap.keys.filter(!used.contains(_)).toSeq
|
||||
val withDefinedAts: Seq[UnusedKey] = unused map { u =>
|
||||
val definingScope = structure.data.definingScope(u.scope, u.key)
|
||||
val definingScoped = definingScope match {
|
||||
case Some(sc) => ScopedKey(sc, u.key)
|
||||
case _ => u
|
||||
}
|
||||
val definedAt = comp.get(definingScoped) match {
|
||||
case Some(c) => definedAtString(c.settings.toVector)
|
||||
val withDefinedAts: Seq[UnusedKey] = unused.map { u =>
|
||||
val data = Project.scopedKeyData(structure, u)
|
||||
val definedAt = comp.get(data.map(_.definingKey).getOrElse(u)) match
|
||||
case Some(c) => definedAtString(c.settings)
|
||||
case _ => Vector.empty
|
||||
}
|
||||
val data = Project.scopedKeyData(structure, u.scope, u.key)
|
||||
UnusedKey(u, definedAt, data)
|
||||
}
|
||||
|
||||
|
|
@ -167,18 +161,16 @@ object LintUnused {
|
|||
&& isLocallyDefined(u) =>
|
||||
u
|
||||
}
|
||||
(unusedKeys map { u =>
|
||||
(u.scoped, display.show(u.scoped), u.positions)
|
||||
}).sortBy(_._2)
|
||||
unusedKeys.map(u => (u.scoped, display.show(u.scoped), u.positions)).sortBy(_._2)
|
||||
}
|
||||
|
||||
private case class UnusedKey(
|
||||
scoped: ScopedKey[?],
|
||||
positions: Vector[SourcePosition],
|
||||
positions: Seq[SourcePosition],
|
||||
data: Option[ScopedKeyData[?]]
|
||||
)
|
||||
|
||||
private def definedAtString(settings: Vector[Setting[?]]): Vector[SourcePosition] = {
|
||||
private def definedAtString(settings: Seq[Setting[?]]): Seq[SourcePosition] = {
|
||||
settings flatMap { setting =>
|
||||
setting.pos match {
|
||||
case NoPosition => Vector.empty
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ import sbt.internal.inc.classpath.ClasspathUtil
|
|||
import sbt.internal.inc.{ MappedFileConverter, ScalaInstance, ZincLmUtil, ZincUtil }
|
||||
import sbt.internal.util.Attributed.data
|
||||
import sbt.internal.util.Types.const
|
||||
import sbt.internal.util.{ Attributed, Settings }
|
||||
import sbt.internal.util.Attributed
|
||||
import sbt.internal.server.BuildServerEvalReporter
|
||||
import sbt.io.{ GlobFilter, IO }
|
||||
import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyDependencyResolution, IvyPaths }
|
||||
|
|
@ -310,7 +310,7 @@ private[sbt] object Load {
|
|||
(rootEval, bs)
|
||||
}
|
||||
|
||||
private def checkTargets(data: Settings[Scope]): Option[String] =
|
||||
private def checkTargets(data: Def.Settings): Option[String] =
|
||||
val dups = overlappingTargets(allTargets(data))
|
||||
if (dups.isEmpty) None
|
||||
else {
|
||||
|
|
@ -323,7 +323,7 @@ private[sbt] object Load {
|
|||
private def overlappingTargets(targets: Seq[(ProjectRef, File)]): Map[File, Seq[ProjectRef]] =
|
||||
targets.groupBy(_._2).view.filter(_._2.size > 1).mapValues(_.map(_._1)).toMap
|
||||
|
||||
private def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = {
|
||||
private def allTargets(data: Def.Settings): Seq[(ProjectRef, File)] = {
|
||||
import ScopeFilter._
|
||||
val allProjects = ScopeFilter(Make.inAnyProject)
|
||||
val targetAndRef = Def.setting { (Keys.thisProjectRef.value, Keys.target.value) }
|
||||
|
|
@ -369,19 +369,19 @@ private[sbt] object Load {
|
|||
if (isDummy(tk)) tk else tk.set(Keys.taskDefinitionKey, key)
|
||||
|
||||
def structureIndex(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
settings: Seq[Setting[?]],
|
||||
extra: KeyIndex => BuildUtil[?],
|
||||
projects: Map[URI, LoadedBuildUnit]
|
||||
): StructureIndex = {
|
||||
val keys = Index.allKeys(settings)
|
||||
val attributeKeys = Index.attributeKeys(data) ++ keys.map(_.key)
|
||||
val scopedKeys = keys ++ data.allKeys((s, k) => ScopedKey(s, k)).toVector
|
||||
val attributeKeys = data.attributeKeys ++ keys.map(_.key)
|
||||
val scopedKeys = keys ++ data.keys
|
||||
val projectsMap = projects.view.mapValues(_.defined.keySet).toMap
|
||||
val configsMap: Map[String, Seq[Configuration]] =
|
||||
projects.values.flatMap(bu => bu.defined map { case (k, v) => (k, v.configurations) }).toMap
|
||||
val keyIndex = KeyIndex(scopedKeys.toVector, projectsMap, configsMap)
|
||||
val aggIndex = KeyIndex.aggregate(scopedKeys.toVector, extra(keyIndex), projectsMap, configsMap)
|
||||
val keyIndex = KeyIndex(scopedKeys, projectsMap, configsMap)
|
||||
val aggIndex = KeyIndex.aggregate(scopedKeys, extra(keyIndex), projectsMap, configsMap)
|
||||
new StructureIndex(
|
||||
Index.stringToKeyMap(attributeKeys),
|
||||
Index.triggers(data),
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ import java.io.PrintWriter
|
|||
|
||||
sealed abstract class LogManager {
|
||||
def apply(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
writer: PrintWriter,
|
||||
|
|
@ -30,20 +30,20 @@ sealed abstract class LogManager {
|
|||
): ManagedLogger
|
||||
@deprecated("Use alternate apply that provides a LoggerContext", "1.4.0")
|
||||
def apply(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
writer: PrintWriter
|
||||
): ManagedLogger = apply(data, state, task, writer, LoggerContext.globalContext)
|
||||
|
||||
def backgroundLog(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
context: LoggerContext
|
||||
): ManagedLogger
|
||||
@deprecated("Use alternate background log that provides a LoggerContext", "1.4.0")
|
||||
final def backgroundLog(data: Settings[Scope], state: State, task: ScopedKey[?]): ManagedLogger =
|
||||
final def backgroundLog(data: Def.Settings, state: State, task: ScopedKey[?]): ManagedLogger =
|
||||
backgroundLog(data, state, task, LoggerContext.globalContext)
|
||||
}
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ object LogManager {
|
|||
// This is called by mkStreams
|
||||
//
|
||||
def construct(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State
|
||||
): (ScopedKey[?], PrintWriter) => ManagedLogger =
|
||||
(task: ScopedKey[?], to: PrintWriter) => {
|
||||
|
|
@ -74,7 +74,7 @@ object LogManager {
|
|||
|
||||
@deprecated("Use alternate constructBackgroundLog that provides a LoggerContext", "1.8.0")
|
||||
def constructBackgroundLog(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State
|
||||
): ScopedKey[?] => ManagedLogger = {
|
||||
val context = state.get(Keys.loggerContext).getOrElse(LoggerContext.globalContext)
|
||||
|
|
@ -82,7 +82,7 @@ object LogManager {
|
|||
}
|
||||
|
||||
def constructBackgroundLog(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
context: LoggerContext
|
||||
): (ScopedKey[?]) => ManagedLogger =
|
||||
|
|
@ -119,7 +119,7 @@ object LogManager {
|
|||
extra: AppenderSupplier
|
||||
) extends LogManager {
|
||||
def apply(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
to: PrintWriter,
|
||||
|
|
@ -137,7 +137,7 @@ object LogManager {
|
|||
)
|
||||
|
||||
def backgroundLog(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
context: LoggerContext
|
||||
|
|
@ -150,16 +150,16 @@ object LogManager {
|
|||
// to change from global being the default to overriding, switch the order of state.get and data.get
|
||||
def getOr[T](
|
||||
key: AttributeKey[T],
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
scope: Scope,
|
||||
state: State,
|
||||
default: T
|
||||
): T =
|
||||
data.get(scope, key) orElse state.get(key) getOrElse default
|
||||
data.get(ScopedKey(scope, key)).orElse(state.get(key)).getOrElse(default)
|
||||
|
||||
@deprecated("Use defaultLogger that provides a LoggerContext", "1.4.0")
|
||||
def defaultLogger(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
console: Appender,
|
||||
|
|
@ -170,7 +170,7 @@ object LogManager {
|
|||
defaultLogger(data, state, task, console, backed, relay, extra, LoggerContext.globalContext)
|
||||
// This is the main function that is used to generate the logger for tasks.
|
||||
def defaultLogger(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
console: Appender,
|
||||
|
|
@ -242,7 +242,7 @@ object LogManager {
|
|||
}
|
||||
|
||||
def backgroundLog(
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
state: State,
|
||||
task: ScopedKey[?],
|
||||
console: Appender,
|
||||
|
|
@ -271,7 +271,7 @@ object LogManager {
|
|||
|
||||
// TODO: Fix this
|
||||
// if global logging levels are not explicitly set, set them from project settings
|
||||
// private[sbt] def setGlobalLogLevels(s: State, data: Settings[Scope]): State =
|
||||
// private[sbt] def setGlobalLogLevels(s: State, data: Def.Settings): State =
|
||||
// if (hasExplicitGlobalLogLevels(s))
|
||||
// s
|
||||
// else {
|
||||
|
|
|
|||
|
|
@ -24,7 +24,8 @@ private[sbt] case class ProjectQuery(
|
|||
val scalaMatches =
|
||||
params.get(Keys.scalaBinaryVersion.key) match
|
||||
case Some(expected) =>
|
||||
val actualSbv = structure.data.get(Scope.ThisScope.rescope(p), scalaBinaryVersion.key)
|
||||
val actualSbv =
|
||||
structure.data.get(Def.ScopedKey(Scope.ThisScope.rescope(p), scalaBinaryVersion.key))
|
||||
actualSbv match
|
||||
case Some(sbv) => sbv == expected
|
||||
case None => true
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
package sbt
|
||||
package internal
|
||||
|
||||
import sbt.internal.util.{ AttributeKey, complete, Relation, Settings, Util }
|
||||
import sbt.internal.util.{ AttributeKey, complete, Relation, Util }
|
||||
import sbt.util.Show
|
||||
import sbt.librarymanagement.Configuration
|
||||
|
||||
|
|
@ -138,7 +138,7 @@ private[sbt] object SettingCompletions {
|
|||
* The last part of the completion will generate a template for the value or function literal that will initialize the setting or task.
|
||||
*/
|
||||
def settingParser(
|
||||
settings: Settings[Scope],
|
||||
settings: Def.Settings,
|
||||
rawKeyMap: Map[String, AttributeKey[?]],
|
||||
context: ResolvedProject,
|
||||
): Parser[String] = {
|
||||
|
|
@ -156,7 +156,7 @@ private[sbt] object SettingCompletions {
|
|||
/** Parser for a Scope+AttributeKey (ScopedKey). */
|
||||
def scopedKeyParser(
|
||||
keyMap: Map[String, AttributeKey[?]],
|
||||
settings: Settings[Scope],
|
||||
settings: Def.Settings,
|
||||
context: ResolvedProject
|
||||
): Parser[ScopedKey[?]] = {
|
||||
val cutoff = KeyRanks.MainCutoff
|
||||
|
|
@ -195,15 +195,11 @@ private[sbt] object SettingCompletions {
|
|||
*/
|
||||
def scopeParser(
|
||||
key: AttributeKey[?],
|
||||
settings: Settings[Scope],
|
||||
settings: Def.Settings,
|
||||
context: ResolvedProject
|
||||
): Parser[Scope] = {
|
||||
val data = settings.data
|
||||
val allScopes = data.keys.toSeq
|
||||
val definedScopes = data.toSeq flatMap { case (scope, attrs) =>
|
||||
if attrs.contains(key) then scope :: Nil else Nil
|
||||
}
|
||||
scope(allScopes, definedScopes, context)
|
||||
val definedScopes = settings.keys.collect { case sk if sk.key == key => sk.scope }
|
||||
scope(settings.scopes.toSeq, definedScopes.toSeq, context)
|
||||
}
|
||||
|
||||
private def scope(
|
||||
|
|
|
|||
|
|
@ -25,11 +25,8 @@ object SettingGraph {
|
|||
compiled(structure.settings, false)(using structure.delegates, structure.scopeLocal, display)
|
||||
)
|
||||
def loop(scoped: ScopedKey[?], generation: Int): SettingGraph = {
|
||||
val key = scoped.key
|
||||
val scope = scoped.scope
|
||||
val definedIn = structure.data.definingScope(scope, key) map { sc =>
|
||||
display.show(ScopedKey(sc, key))
|
||||
}
|
||||
val data = Project.scopedKeyData(structure, scoped)
|
||||
val definedIn = data.map(d => display.show(d.definingKey))
|
||||
val depends = cMap.get(scoped) match {
|
||||
case Some(c) => c.dependencies.toSet; case None => Set.empty
|
||||
}
|
||||
|
|
@ -39,8 +36,8 @@ object SettingGraph {
|
|||
SettingGraph(
|
||||
display.show(scoped),
|
||||
definedIn,
|
||||
Project.scopedKeyData(structure, scope, key),
|
||||
key.description,
|
||||
data,
|
||||
scoped.key.description,
|
||||
basedir,
|
||||
depends map { (x: ScopedKey[?]) =>
|
||||
loop(x, generation + 1)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ import sbt.ProjectExtra.*
|
|||
import sbt.SlashSyntax0.given
|
||||
import sbt.internal.io.Source
|
||||
import sbt.internal.nio.Globs
|
||||
import sbt.internal.util.AttributeMap
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.nio.FileStamper
|
||||
import sbt.nio.Keys._
|
||||
|
|
@ -54,7 +53,7 @@ private[sbt] object WatchTransitiveDependencies {
|
|||
val state: State
|
||||
) {
|
||||
def structure: BuildStructure = extracted.structure
|
||||
def data: Map[Scope, AttributeMap] = extracted.structure.data.data
|
||||
def data: Settings = extracted.structure.data
|
||||
}
|
||||
|
||||
private def argumentsImpl(
|
||||
|
|
@ -113,18 +112,18 @@ private[sbt] object WatchTransitiveDependencies {
|
|||
val keys = collectKeys(args, allKeys, Set.empty, Set.empty)
|
||||
def getDynamicInputs(scopedKey: ScopedKey[Seq[Glob]], trigger: Boolean): Seq[DynamicInput] = {
|
||||
data
|
||||
.get(scopedKey.scope)
|
||||
.map { am =>
|
||||
am.get(scopedKey.key) match {
|
||||
case Some(globs: Seq[Glob]) =>
|
||||
if (!trigger) {
|
||||
val stamper = am.get(inputFileStamper.key).getOrElse(FileStamper.Hash)
|
||||
val forceTrigger = am.get(watchForceTriggerOnAnyChange.key).getOrElse(false)
|
||||
globs.map(g => DynamicInput(g, stamper, forceTrigger))
|
||||
} else {
|
||||
globs.map(g => DynamicInput(g, FileStamper.LastModified, forceTrigger = true))
|
||||
}
|
||||
case None => Nil: Seq[DynamicInput]
|
||||
.getDirect(scopedKey)
|
||||
.map { globs =>
|
||||
if (!trigger) {
|
||||
val stamper =
|
||||
data.getDirect(scopedKey.copy(key = inputFileStamper.key)).getOrElse(FileStamper.Hash)
|
||||
val forceTrigger =
|
||||
data
|
||||
.getDirect(scopedKey.copy(key = watchForceTriggerOnAnyChange.key))
|
||||
.getOrElse(false)
|
||||
globs.map(g => DynamicInput(g, stamper, forceTrigger))
|
||||
} else {
|
||||
globs.map(g => DynamicInput(g, FileStamper.LastModified, forceTrigger = true))
|
||||
}
|
||||
}
|
||||
.getOrElse(Nil)
|
||||
|
|
@ -148,21 +147,15 @@ private[sbt] object WatchTransitiveDependencies {
|
|||
.toIndexedSeq
|
||||
val projects = projectScopes.flatMap(_.project.toOption).distinct.toSet
|
||||
val scopes: Seq[Either[Scope, Seq[Glob]]] =
|
||||
data.flatMap { case (s, am) =>
|
||||
if (s == Scope.Global || s.project.toOption.exists(projects.contains))
|
||||
am.get(Keys.watchSources.key) match {
|
||||
case Some(k) =>
|
||||
k.work match {
|
||||
// Avoid extracted.runTask if possible.
|
||||
case Action.Pure(w, _) => Some(Right(w().map(_.toGlob)))
|
||||
case _ => Some(Left(s))
|
||||
}
|
||||
case _ => None
|
||||
data.scopes.toSeq
|
||||
.filter(s => s == Scope.Global || s.project.toOption.exists(projects.contains))
|
||||
.flatMap { s =>
|
||||
data.getDirect(ScopedKey(s, Keys.watchSources.key)).map { task =>
|
||||
task.work match
|
||||
case a: Action.Pure[Seq[Watched.WatchSource]] => Right(a.f().map(_.toGlob))
|
||||
case _ => Left(s)
|
||||
}
|
||||
else {
|
||||
None
|
||||
}
|
||||
}.toSeq
|
||||
def toDynamicInput(glob: Glob): DynamicInput =
|
||||
DynamicInput(glob, FileStamper.LastModified, forceTrigger = true)
|
||||
scopes.flatMap {
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import sjsonnew._
|
|||
import sjsonnew.support.scalajson.unsafe._
|
||||
|
||||
object SettingQuery {
|
||||
import sbt.internal.util.{ AttributeKey, Settings }
|
||||
import sbt.internal.util.AttributeKey
|
||||
import sbt.internal.util.complete.{ DefaultParsers, Parser }, DefaultParsers._
|
||||
import sbt.Def.{ showBuildRelativeKey2, ScopedKey }
|
||||
|
||||
|
|
@ -70,7 +70,7 @@ object SettingQuery {
|
|||
currentBuild: URI,
|
||||
defaultConfigs: Option[ResolvedReference] => Seq[String],
|
||||
keyMap: Map[String, AttributeKey[?]],
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): Parser[ParsedKey] =
|
||||
scopedKeyFull(index, currentBuild, defaultConfigs, keyMap) flatMap { choices =>
|
||||
Act.select(choices, data)(showBuildRelativeKey2(currentBuild))
|
||||
|
|
@ -81,7 +81,7 @@ object SettingQuery {
|
|||
currentBuild: URI,
|
||||
defaultConfigs: Option[ResolvedReference] => Seq[String],
|
||||
keyMap: Map[String, AttributeKey[?]],
|
||||
data: Settings[Scope]
|
||||
data: Def.Settings
|
||||
): Parser[ScopedKey[?]] =
|
||||
scopedKeySelected(index, currentBuild, defaultConfigs, keyMap, data).map(_.key)
|
||||
|
||||
|
|
@ -96,7 +96,7 @@ object SettingQuery {
|
|||
|
||||
def getSettingValue[A](structure: BuildStructure, key: Def.ScopedKey[A]): Either[String, A] =
|
||||
structure.data
|
||||
.get(key.scope, key.key)
|
||||
.get(key)
|
||||
.toRight(s"Key ${Def.displayFull(key)} not found")
|
||||
.flatMap {
|
||||
case _: Task[_] => Left(s"Key ${Def.displayFull(key)} is a task, can only query settings")
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ object MiniDependencyTreePlugin extends AutoPlugin {
|
|||
override def globalSettings: Seq[Def.Setting[?]] = Seq(
|
||||
dependencyTreeIncludeScalaLibrary := false
|
||||
)
|
||||
override def projectSettings: Seq[Def.Setting[?]] =
|
||||
override lazy val projectSettings: Seq[Def.Setting[?]] =
|
||||
DependencyTreeSettings.coreSettings ++
|
||||
inConfig(Compile)(DependencyTreeSettings.baseBasicReportingSettings) ++
|
||||
inConfig(Test)(DependencyTreeSettings.baseBasicReportingSettings)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,6 @@ import sbt.internal.util.{
|
|||
ConsoleOut,
|
||||
GlobalLogging,
|
||||
MainAppender,
|
||||
Settings,
|
||||
Terminal,
|
||||
}
|
||||
import sbt.internal.inc.PlainVirtualFileConverter
|
||||
|
|
@ -97,7 +96,7 @@ object FakeState {
|
|||
val delegates: (Scope) => Seq[Scope] = _ => Nil
|
||||
val scopeLocal: Def.ScopeLocal = _ => Nil
|
||||
|
||||
val (cMap, data: Settings[Scope]) =
|
||||
val (cMap, data: Def.Settings) =
|
||||
Def.makeWithCompiledMap(settings)(using delegates, scopeLocal, Def.showFullKey)
|
||||
val extra: KeyIndex => BuildUtil[?] = (keyIndex) =>
|
||||
BuildUtil(base.toURI, Map.empty, keyIndex, data)
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ package sbt
|
|||
package internal
|
||||
|
||||
import Def.{ ScopedKey, Setting }
|
||||
import sbt.internal.util.{ AttributeKey, AttributeMap, Relation, Settings }
|
||||
import sbt.internal.util.{ AttributeKey, Relation }
|
||||
import sbt.internal.util.Types.{ const, some }
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.librarymanagement.Configuration
|
||||
|
|
@ -59,17 +59,18 @@ abstract class TestBuild {
|
|||
sealed case class Structure(
|
||||
env: Env,
|
||||
current: ProjectRef,
|
||||
data: Settings[Scope],
|
||||
data: Def.Settings,
|
||||
keyIndex: KeyIndex,
|
||||
keyMap: Map[String, AttributeKey[?]]
|
||||
) {
|
||||
override def toString =
|
||||
env.toString + "\n" + "current: " + current + "\nSettings:\n\t" + showData + keyMap.keys
|
||||
.mkString("All keys:\n\t", ", ", "")
|
||||
def showKeys(map: AttributeMap): String = map.keys.mkString("\n\t ", ",", "\n")
|
||||
def showKeys(keys: Iterable[AttributeKey[?]]): String = keys.mkString("\n\t ", ",", "\n")
|
||||
def showData: String = {
|
||||
val scopeStrings =
|
||||
for ((scope, map) <- data.data) yield (Scope.display(scope, "<key>"), showKeys(map))
|
||||
for (scope, keys) <- data.keys.groupMap(_.scope)(_.key)
|
||||
yield (Scope.display(scope, "<key>"), showKeys(keys))
|
||||
scopeStrings.toSeq.sorted.map(t => t._1 + t._2).mkString("\n\t")
|
||||
}
|
||||
val extra: BuildUtil[Proj] = {
|
||||
|
|
@ -86,11 +87,10 @@ abstract class TestBuild {
|
|||
}
|
||||
|
||||
lazy val allAttributeKeys: Set[AttributeKey[?]] = {
|
||||
val x = data.data.values.flatMap(_.keys).toSet
|
||||
if (x.isEmpty) {
|
||||
if (data.attributeKeys.isEmpty) {
|
||||
sys.error("allAttributeKeys is empty")
|
||||
}
|
||||
x
|
||||
data.attributeKeys
|
||||
}
|
||||
lazy val (taskAxes, zeroTaskAxis, onlyTaskAxis, multiTaskAxis) = {
|
||||
import collection.mutable
|
||||
|
|
@ -98,11 +98,10 @@ abstract class TestBuild {
|
|||
|
||||
// task axis of Scope is set to Zero and the value of the second map is the original task axis
|
||||
val taskAxesMappings =
|
||||
for ((scope, keys) <- data.data; key <- keys.keys)
|
||||
yield (ScopedKey(scope.copy(task = Zero), key), scope.task): (
|
||||
ScopedKey[?],
|
||||
ScopeAxis[AttributeKey[?]]
|
||||
)
|
||||
for
|
||||
(scope, keys) <- data.keys.groupMap(_.scope)(_.key)
|
||||
key <- keys
|
||||
yield (ScopedKey(scope.copy(task = Zero), key), scope.task)
|
||||
|
||||
val taskAxes = Relation.empty ++ taskAxesMappings
|
||||
val zero = new HashSet[ScopedKey[?]]
|
||||
|
|
@ -240,15 +239,14 @@ abstract class TestBuild {
|
|||
}
|
||||
}
|
||||
val data = Def.makeWithCompiledMap(settings)(using env.delegates, const(Nil), display)._2
|
||||
val keys = data.allKeys((s, key) => ScopedKey(s, key))
|
||||
val keyMap = keys.map(k => (k.key.label, k.key)).toMap[String, AttributeKey[?]]
|
||||
val keyMap = data.keys.map(k => (k.key.label, k.key)).toMap[String, AttributeKey[?]]
|
||||
val projectsMap = env.builds.map(b => (b.uri, b.projects.map(_.id).toSet)).toMap
|
||||
val confs = for {
|
||||
b <- env.builds
|
||||
p <- b.projects
|
||||
} yield p.id -> p.configurations
|
||||
val confMap = confs.toMap
|
||||
Structure(env, current, data, KeyIndex(keys, projectsMap, confMap), keyMap)
|
||||
Structure(env, current, data, KeyIndex(data.keys, projectsMap, confMap), keyMap)
|
||||
}
|
||||
|
||||
lazy val mkEnv: Gen[Env] = {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@
|
|||
package sbt
|
||||
|
||||
trait Import {
|
||||
type Settings = Def.Settings
|
||||
type Setting[T] = Def.Setting[T]
|
||||
type ScopedKey[T] = Def.ScopedKey[T]
|
||||
type SettingsDefinition = Def.SettingsDefinition
|
||||
|
|
@ -146,7 +147,7 @@ trait Import {
|
|||
// type Dag[A <: Dag[A]] = sbt.internal.util.Dag[A]
|
||||
type DelegatingPMap[K[_], V[_]] = sbt.internal.util.DelegatingPMap[K, V]
|
||||
val ErrorHandling = sbt.internal.util.ErrorHandling
|
||||
type EvaluateSettings[S] = sbt.internal.util.EvaluateSettings[S]
|
||||
// type EvaluateSettings[I <: Init] = sbt.internal.util.EvaluateSettings[I]
|
||||
val EvaluationState = sbt.internal.util.EvaluationState
|
||||
val ExitHook = sbt.internal.util.ExitHook
|
||||
type ExitHook = sbt.internal.util.ExitHook
|
||||
|
|
@ -168,7 +169,7 @@ trait Import {
|
|||
type IDSet[T] = sbt.internal.util.IDSet[T]
|
||||
val IMap = sbt.internal.util.IMap
|
||||
type IMap[K[_], V[_]] = sbt.internal.util.IMap[K, V]
|
||||
type Init[S] = sbt.internal.util.Init[S]
|
||||
type Init = sbt.internal.util.Init
|
||||
type JLine = sbt.internal.util.JLine
|
||||
// val KCons = sbt.internal.util.KCons
|
||||
// type KCons[H, +T <: KList[M], +M[_]] = sbt.internal.util.KCons[H, T, M]
|
||||
|
|
@ -193,7 +194,6 @@ trait Import {
|
|||
val Relation = sbt.internal.util.Relation
|
||||
type Relation[A, B] = sbt.internal.util.Relation[A, B]
|
||||
val ScalaKeywords = sbt.internal.util.ScalaKeywords
|
||||
type Settings[S] = sbt.internal.util.Settings[S]
|
||||
type SharedAttributeKey[T] = sbt.internal.util.SharedAttributeKey[T]
|
||||
val Signals = sbt.internal.util.Signals
|
||||
val SimpleReader = sbt.internal.util.SimpleReader
|
||||
|
|
|
|||
|
|
@ -20,13 +20,12 @@ enum EvaluationState:
|
|||
case Calling
|
||||
case Evaluated
|
||||
|
||||
abstract class EvaluateSettings[ScopeType]:
|
||||
protected val init: Init[ScopeType]
|
||||
class EvaluateSettings[I <: Init](
|
||||
val init: I,
|
||||
executor: Executor,
|
||||
compiledSettings: Seq[init.Compiled[?]],
|
||||
):
|
||||
import init._
|
||||
|
||||
protected def executor: Executor
|
||||
protected def compiledSettings: Seq[Compiled[?]]
|
||||
|
||||
import EvaluationState.*
|
||||
|
||||
private val complete = new LinkedBlockingQueue[Option[Throwable]]
|
||||
|
|
@ -68,7 +67,7 @@ abstract class EvaluateSettings[ScopeType]:
|
|||
private val running = new AtomicInteger
|
||||
private val cancel = new AtomicBoolean(false)
|
||||
|
||||
def run(implicit delegates: ScopeType => Seq[ScopeType]): Settings[ScopeType] = {
|
||||
def run(implicit delegates: ScopeType => Seq[ScopeType]): Settings = {
|
||||
assert(running.get() == 0, "Already running")
|
||||
startWork()
|
||||
roots.foreach(_.registerIfNew())
|
||||
|
|
@ -83,7 +82,7 @@ abstract class EvaluateSettings[ScopeType]:
|
|||
private def getResults(implicit delegates: ScopeType => Seq[ScopeType]) =
|
||||
static.toTypedSeq.foldLeft(empty) { case (ss, static.TPair(key, node)) =>
|
||||
if key.key.isLocal then ss
|
||||
else ss.set(key.scope, key.key, node.get)
|
||||
else ss.set(key, node.get)
|
||||
}
|
||||
|
||||
private lazy val getValue: [A] => INode[A] => A = [A] => (fa: INode[A]) => fa.get
|
||||
|
|
|
|||
|
|
@ -12,53 +12,10 @@ import sbt.util.Show
|
|||
import Util.{ nil, nilSeq }
|
||||
import scala.jdk.CollectionConverters.*
|
||||
|
||||
sealed trait Settings[ScopeType]:
|
||||
def data: Map[ScopeType, AttributeMap]
|
||||
def keys(scope: ScopeType): Set[AttributeKey[?]]
|
||||
def scopes: Set[ScopeType]
|
||||
def definingScope(scope: ScopeType, key: AttributeKey[?]): Option[ScopeType]
|
||||
def allKeys[A](f: (ScopeType, AttributeKey[?]) => A): Seq[A]
|
||||
def get[A](scope: ScopeType, key: AttributeKey[A]): Option[A]
|
||||
def getDirect[A](scope: ScopeType, key: AttributeKey[A]): Option[A]
|
||||
def set[A](scope: ScopeType, key: AttributeKey[A], value: A): Settings[ScopeType]
|
||||
end Settings
|
||||
|
||||
private final class Settings0[ScopeType](
|
||||
val data: Map[ScopeType, AttributeMap],
|
||||
val delegates: ScopeType => Seq[ScopeType]
|
||||
) extends Settings[ScopeType]:
|
||||
|
||||
def scopes: Set[ScopeType] = data.keySet
|
||||
def keys(scope: ScopeType) = data(scope).keys.toSet
|
||||
|
||||
def allKeys[A](f: (ScopeType, AttributeKey[?]) => A): Seq[A] =
|
||||
data.flatMap { case (scope, map) =>
|
||||
map.keys.map(k => f(scope, k))
|
||||
}.toSeq
|
||||
|
||||
def get[A](scope: ScopeType, key: AttributeKey[A]): Option[A] =
|
||||
delegates(scope).flatMap { sc =>
|
||||
getDirect(sc, key)
|
||||
}.headOption
|
||||
|
||||
def definingScope(scope: ScopeType, key: AttributeKey[?]): Option[ScopeType] =
|
||||
delegates(scope).find { sc =>
|
||||
getDirect(sc, key).isDefined
|
||||
}
|
||||
|
||||
def getDirect[A](scope: ScopeType, key: AttributeKey[A]): Option[A] =
|
||||
data.get(scope).flatMap(_.get(key))
|
||||
|
||||
def set[A](scope: ScopeType, key: AttributeKey[A], value: A): Settings[ScopeType] =
|
||||
val map = data.getOrElse(scope, AttributeMap.empty)
|
||||
val newData = data.updated(scope, map.put(key, value))
|
||||
Settings0(newData, delegates)
|
||||
|
||||
end Settings0
|
||||
|
||||
// delegates should contain the input Scope as the first entry
|
||||
// delegates should contain the input ScopeType as the first entry
|
||||
// this trait is intended to be mixed into an object
|
||||
trait Init[ScopeType]:
|
||||
trait Init:
|
||||
type ScopeType
|
||||
|
||||
/**
|
||||
* The Show instance used when a detailed String needs to be generated.
|
||||
|
|
@ -80,6 +37,58 @@ trait Init[ScopeType]:
|
|||
type ScopeLocal = ScopedKey[?] => Seq[Setting[?]]
|
||||
type MapConstant = [a] => ScopedKey[a] => Option[a]
|
||||
|
||||
sealed trait Settings:
|
||||
def attributeKeys: Set[AttributeKey[?]]
|
||||
def keys: Iterable[ScopedKey[?]]
|
||||
def contains(key: ScopedKey[?]): Boolean
|
||||
def values: Iterable[Any]
|
||||
def data: Map[ScopedKey[?], Any]
|
||||
def scopes: Set[ScopeType]
|
||||
def getKeyValue[A](key: ScopedKey[A]): Option[(ScopedKey[A], A)]
|
||||
def get[A](key: ScopedKey[A]): Option[A]
|
||||
def definingKey[A](key: ScopedKey[A]): Option[ScopedKey[A]]
|
||||
def getDirect[A](key: ScopedKey[A]): Option[A]
|
||||
def set[A](key: ScopedKey[A], value: A): Settings
|
||||
end Settings
|
||||
|
||||
private final class Settings0(
|
||||
val scopes: Set[ScopeType],
|
||||
val attributeKeys: Set[AttributeKey[?]],
|
||||
// In 1.x it was a Map[Scope, AttributeMap]
|
||||
// For the heap, it is better to store the ScopedKey[?] directly to avoid recreating
|
||||
// abd duplicating them later.
|
||||
val data: Map[ScopedKey[?], Any],
|
||||
d: ScopeType => Seq[ScopeType]
|
||||
) extends Settings:
|
||||
def keys: Iterable[ScopedKey[?]] = data.keys
|
||||
def contains(key: ScopedKey[?]): Boolean = data.contains(key)
|
||||
def values: Iterable[Any] = data.values
|
||||
|
||||
def get[A](key: ScopedKey[A]): Option[A] =
|
||||
delegates(key).flatMap(data.get).nextOption.asInstanceOf[Option[A]]
|
||||
|
||||
def definingKey[A](key: ScopedKey[A]): Option[ScopedKey[A]] =
|
||||
delegates(key).find(data.contains)
|
||||
|
||||
def getKeyValue[A](key: ScopedKey[A]): Option[(ScopedKey[A], A)] =
|
||||
delegates(key).flatMap { k =>
|
||||
data.get(k) match
|
||||
case None => None
|
||||
case Some(v) => Some(k -> v.asInstanceOf[A])
|
||||
}.nextOption
|
||||
|
||||
def getDirect[A](key: ScopedKey[A]): Option[A] = data.get(key).asInstanceOf[Option[A]]
|
||||
|
||||
def set[A](key: ScopedKey[A], value: A): Settings =
|
||||
val newScopes = scopes + key.scope
|
||||
val newAttributeKeys = attributeKeys + key.key
|
||||
val newData = data.updated(key, value)
|
||||
Settings0(newScopes, newAttributeKeys, newData, d)
|
||||
|
||||
private def delegates[A](key: ScopedKey[A]): Iterator[ScopedKey[A]] =
|
||||
d(key.scope).iterator.map(s => key.copy(scope = s))
|
||||
end Settings0
|
||||
|
||||
private[sbt] abstract class ValidateKeyRef {
|
||||
def apply[T](key: ScopedKey[T], selfRefOk: Boolean): ValidatedRef[T]
|
||||
}
|
||||
|
|
@ -163,16 +172,16 @@ trait Init[ScopeType]:
|
|||
private final val nextID = new java.util.concurrent.atomic.AtomicLong
|
||||
private final def nextDefaultID(): Long = nextID.incrementAndGet()
|
||||
|
||||
def empty(implicit delegates: ScopeType => Seq[ScopeType]): Settings[ScopeType] =
|
||||
Settings0(Map.empty, delegates)
|
||||
def empty(implicit delegates: ScopeType => Seq[ScopeType]): Settings =
|
||||
Settings0(Set.empty, Set.empty, Map.empty, delegates)
|
||||
|
||||
def asTransform(s: Settings[ScopeType]): [A] => ScopedKey[A] => A =
|
||||
def asTransform(s: Settings): [A] => ScopedKey[A] => A =
|
||||
[A] => (sk: ScopedKey[A]) => getValue(s, sk)
|
||||
|
||||
def getValue[T](s: Settings[ScopeType], k: ScopedKey[T]) =
|
||||
s.get(k.scope, k.key) getOrElse (throw new InvalidReference(k))
|
||||
def getValue[T](s: Settings, k: ScopedKey[T]) =
|
||||
s.get(k).getOrElse(throw new InvalidReference(k))
|
||||
|
||||
def asFunction[A](s: Settings[ScopeType]): ScopedKey[A] => A = k => getValue(s, k)
|
||||
def asFunction[A](s: Settings): ScopedKey[A] => A = k => getValue(s, k)
|
||||
|
||||
def mapScope(f: ScopeType => ScopeType): MapScoped =
|
||||
[a] => (k: ScopedKey[a]) => k.copy(scope = f(k.scope))
|
||||
|
|
@ -197,7 +206,7 @@ trait Init[ScopeType]:
|
|||
// inject derived settings into scopes where their dependencies are directly defined
|
||||
// and prepend per-scope settings
|
||||
val derived = deriveAndLocal(initDefaults, mkDelegates(delegates))
|
||||
// group by Scope/Key, dropping dead initializations
|
||||
// group by ScopeType/Key, dropping dead initializations
|
||||
val sMap: ScopedMap = grouped(derived)
|
||||
// delegate references to undefined values according to 'delegates'
|
||||
val dMap: ScopedMap =
|
||||
|
|
@ -211,13 +220,13 @@ trait Init[ScopeType]:
|
|||
delegates: ScopeType => Seq[ScopeType],
|
||||
scopeLocal: ScopeLocal,
|
||||
display: Show[ScopedKey[?]]
|
||||
): Settings[ScopeType] = makeWithCompiledMap(init)._2
|
||||
): Settings = makeWithCompiledMap(init)._2
|
||||
|
||||
def makeWithCompiledMap(init: Seq[Setting[?]])(using
|
||||
delegates: ScopeType => Seq[ScopeType],
|
||||
scopeLocal: ScopeLocal,
|
||||
display: Show[ScopedKey[?]]
|
||||
): (CompiledMap, Settings[ScopeType]) =
|
||||
): (CompiledMap, Settings) =
|
||||
val cMap = compiled(init)(using delegates, scopeLocal, display)
|
||||
// order the initializations. cyclic references are detected here.
|
||||
val ordered: Seq[Compiled[?]] = sort(cMap)
|
||||
|
|
@ -235,16 +244,14 @@ trait Init[ScopeType]:
|
|||
def compile(sMap: ScopedMap): CompiledMap =
|
||||
sMap match
|
||||
case m: IMap.IMap0[ScopedKey, SettingSeq] @unchecked =>
|
||||
Par(m.backing.toVector)
|
||||
import scala.collection.parallel.CollectionConverters.*
|
||||
m.backing.par
|
||||
.map { case (k, ss) =>
|
||||
val deps = ss.flatMap(_.dependencies).toSet
|
||||
(
|
||||
k,
|
||||
Compiled(k.asInstanceOf[ScopedKey[Any]], deps, ss.asInstanceOf[SettingSeq[Any]])
|
||||
)
|
||||
val deps = ss.iterator.flatMap(_.dependencies).toSet
|
||||
k -> Compiled(k.asInstanceOf[ScopedKey[Any]], deps, ss.asInstanceOf[SettingSeq[Any]])
|
||||
}
|
||||
.toVector
|
||||
.toMap
|
||||
.to(Map)
|
||||
|
||||
case _ =>
|
||||
sMap.toTypedSeq.map { case sMap.TPair(k, ss) =>
|
||||
val deps = ss.flatMap(_.dependencies)
|
||||
|
|
@ -324,16 +331,12 @@ trait Init[ScopeType]:
|
|||
|
||||
private def applyInits(ordered: Seq[Compiled[?]])(implicit
|
||||
delegates: ScopeType => Seq[ScopeType]
|
||||
): Settings[ScopeType] =
|
||||
): Settings =
|
||||
val x =
|
||||
java.util.concurrent.Executors.newFixedThreadPool(Runtime.getRuntime.availableProcessors)
|
||||
try {
|
||||
val eval: EvaluateSettings[ScopeType] = new EvaluateSettings[ScopeType] {
|
||||
override val init: Init.this.type = Init.this
|
||||
def compiledSettings = ordered
|
||||
def executor = x
|
||||
}
|
||||
eval.run
|
||||
val eval: EvaluateSettings[Init.this.type] = new EvaluateSettings(Init.this, x, ordered)
|
||||
eval.run(using delegates)
|
||||
} finally {
|
||||
x.shutdown()
|
||||
}
|
||||
|
|
@ -416,15 +419,9 @@ trait Init[ScopeType]:
|
|||
final class Flattened(val key: ScopedKey[?], val dependencies: Iterable[ScopedKey[?]])
|
||||
|
||||
def flattenLocals(compiled: CompiledMap): Map[ScopedKey[?], Flattened] = {
|
||||
val locals = compiled flatMap { case (key, comp) =>
|
||||
if (key.key.isLocal) Seq(comp)
|
||||
else nilSeq[Compiled[?]]
|
||||
}
|
||||
val locals = compiled.collect { case (key, comp) if key.key.isLocal => comp }
|
||||
val ordered = Dag.topologicalSort(locals)(
|
||||
_.dependencies.flatMap(dep =>
|
||||
if (dep.key.isLocal) Seq[Compiled[?]](compiled(dep))
|
||||
else nilSeq[Compiled[?]]
|
||||
)
|
||||
_.dependencies.collect { case dep if dep.key.isLocal => compiled(dep) }
|
||||
)
|
||||
def flatten(
|
||||
cmap: Map[ScopedKey[?], Flattened],
|
||||
|
|
@ -433,7 +430,7 @@ trait Init[ScopeType]:
|
|||
): Flattened =
|
||||
new Flattened(
|
||||
key,
|
||||
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else Seq[ScopedKey[?]](dep))
|
||||
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else Seq(dep))
|
||||
)
|
||||
|
||||
val empty = Map.empty[ScopedKey[?], Flattened]
|
||||
|
|
@ -442,10 +439,9 @@ trait Init[ScopeType]:
|
|||
cmap.updated(c.key, flatten(cmap, c.key, c.dependencies))
|
||||
}
|
||||
|
||||
compiled flatMap { case (key, comp) =>
|
||||
if (key.key.isLocal) nilSeq[(ScopedKey[?], Flattened)]
|
||||
else
|
||||
Seq[(ScopedKey[?], Flattened)]((key, flatten(flattenedLocals, key, comp.dependencies)))
|
||||
compiled.collect {
|
||||
case (key, comp) if !key.key.isLocal =>
|
||||
(key, flatten(flattenedLocals, key, comp.dependencies))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -653,7 +649,7 @@ trait Init[ScopeType]:
|
|||
|
||||
private[sbt] def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1]
|
||||
|
||||
def evaluate(map: Settings[ScopeType]): A1
|
||||
def evaluate(map: Settings): A1
|
||||
def zip[A2](o: Initialize[A2]): Initialize[(A1, A2)] = zipTupled(o)(identity)
|
||||
|
||||
def zipWith[A2, U](o: Initialize[A2])(f: (A1, A2) => U): Initialize[U] =
|
||||
|
|
@ -799,7 +795,7 @@ trait Init[ScopeType]:
|
|||
(fa: Initialize[A]) => (fa.mapReferenced(g))
|
||||
private def mapConstantK(g: MapConstant): [A] => Initialize[A] => Initialize[A] = [A] =>
|
||||
(fa: Initialize[A]) => (fa.mapConstant(g))
|
||||
private def evaluateK(g: Settings[ScopeType]): [A] => Initialize[A] => A = [A] =>
|
||||
private def evaluateK(g: Settings): [A] => Initialize[A] => A = [A] =>
|
||||
(fa: Initialize[A]) => (fa.evaluate(g))
|
||||
private def deps(ls: List[Initialize[?]]): Seq[ScopedKey[?]] =
|
||||
ls.flatMap(_.dependencies)
|
||||
|
|
@ -820,7 +816,7 @@ trait Init[ScopeType]:
|
|||
extends Keyed[S, A1]:
|
||||
override final def apply[A2](g: A1 => A2): Initialize[A2] =
|
||||
GetValue(scopedKey, g compose transform)
|
||||
override final def evaluate(ss: Settings[ScopeType]): A1 = transform(getValue(ss, scopedKey))
|
||||
override final def evaluate(ss: Settings): A1 = transform(getValue(ss, scopedKey))
|
||||
override final def mapReferenced(g: MapScoped): Initialize[A1] =
|
||||
GetValue(g(scopedKey), transform)
|
||||
|
||||
|
|
@ -842,7 +838,7 @@ trait Init[ScopeType]:
|
|||
trait KeyedInitialize[A1] extends Keyed[A1, A1]:
|
||||
override final def apply[A2](g: A1 => A2): Initialize[A2] =
|
||||
GetValue(scopedKey, g)
|
||||
override final def evaluate(ss: Settings[ScopeType]): A1 = getValue(ss, scopedKey)
|
||||
override final def evaluate(ss: Settings): A1 = getValue(ss, scopedKey)
|
||||
override final def mapReferenced(g: MapScoped): Initialize[A1] = g(scopedKey)
|
||||
|
||||
private[sbt] override final def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1] =
|
||||
|
|
@ -861,7 +857,7 @@ trait Init[ScopeType]:
|
|||
override def dependencies: Seq[ScopedKey[?]] = Nil
|
||||
override def apply[A2](g2: ([x] => Initialize[x] => Initialize[x]) => A2): Initialize[A2] =
|
||||
map(this)(g2)
|
||||
override def evaluate(ss: Settings[ScopeType]): [x] => Initialize[x] => Initialize[x] = f
|
||||
override def evaluate(ss: Settings): [x] => Initialize[x] => Initialize[x] = f
|
||||
override def mapReferenced(g: MapScoped): Initialize[[x] => Initialize[x] => Initialize[x]] =
|
||||
TransformCapture(mapReferencedK(g) ∙ f)
|
||||
override def mapConstant(g: MapConstant): Initialize[[x] => Initialize[x] => Initialize[x]] =
|
||||
|
|
@ -880,7 +876,7 @@ trait Init[ScopeType]:
|
|||
extends Initialize[ScopedKey[A1]]:
|
||||
override def dependencies: Seq[ScopedKey[?]] = Nil
|
||||
override def apply[A2](g2: ScopedKey[A1] => A2): Initialize[A2] = map(this)(g2)
|
||||
override def evaluate(ss: Settings[ScopeType]): ScopedKey[A1] = key
|
||||
override def evaluate(ss: Settings): ScopedKey[A1] = key
|
||||
override def mapReferenced(g: MapScoped): Initialize[ScopedKey[A1]] =
|
||||
ValidationCapture(g(key), selfRefOk)
|
||||
override def mapConstant(g: MapConstant): Initialize[ScopedKey[A1]] = this
|
||||
|
|
@ -898,7 +894,7 @@ trait Init[ScopeType]:
|
|||
extends Initialize[A1]:
|
||||
override def dependencies: Seq[ScopedKey[?]] = in.dependencies
|
||||
override def apply[A2](g: A1 => A2): Initialize[A2] = Bind[S, A2](s => f(s)(g), in)
|
||||
override def evaluate(ss: Settings[ScopeType]): A1 = f(in.evaluate(ss)).evaluate(ss)
|
||||
override def evaluate(ss: Settings): A1 = f(in.evaluate(ss)).evaluate(ss)
|
||||
override def mapReferenced(g: MapScoped) =
|
||||
Bind[S, A1](s => f(s).mapReferenced(g), in.mapReferenced(g))
|
||||
|
||||
|
|
@ -927,7 +923,7 @@ trait Init[ScopeType]:
|
|||
case Some(i) => Right(Optional(i.validateKeyReferenced(g).toOption, f))
|
||||
|
||||
override def mapConstant(g: MapConstant): Initialize[A1] = Optional(a map mapConstantK(g)[S], f)
|
||||
override def evaluate(ss: Settings[ScopeType]): A1 =
|
||||
override def evaluate(ss: Settings): A1 =
|
||||
f(a.flatMap { i => trapBadRef(evaluateK(ss)(i)) })
|
||||
|
||||
// proper solution is for evaluate to be deprecated or for external use only and a new internal method returning Either be used
|
||||
|
|
@ -946,7 +942,7 @@ trait Init[ScopeType]:
|
|||
override def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1] = Right(this)
|
||||
override def apply[A2](g: A1 => A2): Initialize[A2] = Value[A2](() => g(value()))
|
||||
override def mapConstant(g: MapConstant): Initialize[A1] = this
|
||||
override def evaluate(map: Settings[ScopeType]): A1 = value()
|
||||
override def evaluate(map: Settings): A1 = value()
|
||||
private[sbt] override def processAttributes[A2](init: A2)(f: (A2, AttributeMap) => A2): A2 =
|
||||
init
|
||||
end Value
|
||||
|
|
@ -958,7 +954,7 @@ trait Init[ScopeType]:
|
|||
Right(this)
|
||||
override def apply[A2](g: Set[ScopeType] => A2) = map(this)(g)
|
||||
override def mapConstant(g: MapConstant): Initialize[Set[ScopeType]] = this
|
||||
override def evaluate(map: Settings[ScopeType]): Set[ScopeType] = map.scopes
|
||||
override def evaluate(map: Settings): Set[ScopeType] = map.scopes
|
||||
private[sbt] override def processAttributes[A2](init: A2)(f: (A2, AttributeMap) => A2): A2 =
|
||||
init
|
||||
end StaticScopes
|
||||
|
|
@ -971,7 +967,7 @@ trait Init[ScopeType]:
|
|||
override def mapConstant(g: MapConstant): Initialize[A2] =
|
||||
Uniform(f, inputs.map(_.mapConstant(g)))
|
||||
override def apply[A3](g: A2 => A3): Initialize[A3] = Uniform(g.compose(f), inputs)
|
||||
override def evaluate(ss: Settings[ScopeType]): A2 = f(inputs.map(_.evaluate(ss)))
|
||||
override def evaluate(ss: Settings): A2 = f(inputs.map(_.evaluate(ss)))
|
||||
|
||||
override def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A2] =
|
||||
val tx = inputs.map(_.validateKeyReferenced(g))
|
||||
|
|
@ -997,7 +993,7 @@ trait Init[ScopeType]:
|
|||
|
||||
override def apply[A2](g: A1 => A2): Initialize[A2] = Apply(g compose f, inputs)
|
||||
|
||||
override def evaluate(ss: Settings[ScopeType]): A1 = f(inputs.unmap(evaluateK(ss)))
|
||||
override def evaluate(ss: Settings): A1 = f(inputs.unmap(evaluateK(ss)))
|
||||
|
||||
override def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1] =
|
||||
val tx: Tuple.Map[Tup, ValidatedInit] = inputs.transform(validateKeyReferencedK(g))
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ object SettingsTest extends Properties("settings") {
|
|||
checkKey(chk, Some(expected), eval)
|
||||
}
|
||||
|
||||
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Settings[Scope]) = {
|
||||
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Def.Settings) = {
|
||||
val value = settings.get(key.scope, key.key)
|
||||
("Key: " + key) |:
|
||||
("Value: " + value) |:
|
||||
|
|
@ -199,7 +199,7 @@ object SettingsTest extends Properties("settings") {
|
|||
(value == expected)
|
||||
}
|
||||
|
||||
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
|
||||
def evaluate(settings: Seq[Setting[_]]): Def.Settings =
|
||||
try {
|
||||
makeWithCompiledMap(settings)(delegates, scopeLocal, showFullKey)._2
|
||||
} catch {
|
||||
|
|
|
|||
|
|
@ -19,7 +19,8 @@ final case class Scope(nestIndex: Int, idAtIndex: Int = 0)
|
|||
// Lots of type constructors would become binary, which as you may know requires lots of type lambdas
|
||||
// when you want a type function with only one parameter.
|
||||
// That would be a general pain.)
|
||||
case class SettingsExample() extends Init[Scope] {
|
||||
case class SettingsExample() extends Init {
|
||||
type ScopeType = Scope
|
||||
// Provides a way of showing a Scope+AttributeKey[_]
|
||||
val showFullKey: Show[ScopedKey[?]] = Show[ScopedKey[?]]((key: ScopedKey[?]) => {
|
||||
s"${key.scope.nestIndex}(${key.scope.idAtIndex})/${key.key.label}"
|
||||
|
|
@ -64,7 +65,7 @@ case class SettingsUsage(val settingsExample: SettingsExample) {
|
|||
// "compiles" and applies the settings.
|
||||
// This can be split into multiple steps to access intermediate results if desired.
|
||||
// The 'inspect' command operates on the output of 'compile', for example.
|
||||
val applied: Settings[Scope] =
|
||||
val applied: Settings =
|
||||
makeWithCompiledMap(mySettings)(using delegates, scopeLocal, showFullKey)._2
|
||||
|
||||
// Show results.
|
||||
|
|
|
|||
Loading…
Reference in New Issue