Merge branch 'develop' into wip/scalainstance

This commit is contained in:
Eugene Yokota 2024-11-14 22:02:10 -05:00
commit 29c77f6f5f
44 changed files with 488 additions and 619 deletions

View File

@ -9,19 +9,12 @@
package sbt
package internal
import sbt.internal.util.{
AttributeEntry,
AttributeKey,
LineRange,
MessageOnlyException,
RangePosition,
Settings
}
import sbt.internal.util.{ AttributeKey, LineRange, MessageOnlyException, RangePosition }
import java.io.File
import java.nio.file.Path
import sbt.internal.util.complete.DefaultParsers.validID
import Def.{ ScopedKey, Setting }
import Def.{ ScopedKey, Setting, Settings }
import Scope.GlobalScope
import sbt.SlashSyntax0.given
import sbt.internal.parser.SbtParser
@ -351,17 +344,6 @@ object BuildUtilLite:
end BuildUtilLite
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[?], ScopedKey[Task[?]]] = {
val pairs = data.scopes flatMap (scope =>
data.data(scope).entries collect { case AttributeEntry(key, value: Task[_]) =>
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[?]]]))
}
)
pairs.toMap[Task[?], ScopedKey[Task[?]]]
}
def allKeys(settings: Seq[Setting[?]]): Set[ScopedKey[?]] = {
val result = new java.util.HashSet[ScopedKey[?]]
settings.foreach { s =>
@ -372,9 +354,6 @@ object Index {
result.asScala.toSet
}
def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[?]] =
settings.data.values.flatMap(_.keys).toSet[AttributeKey[?]]
def stringToKeyMap(settings: Set[AttributeKey[?]]): Map[String, AttributeKey[?]] =
stringToKeyMap0(settings)(_.label)
@ -396,19 +375,17 @@ object Index {
private type TriggerMap = collection.mutable.HashMap[TaskId[?], Seq[TaskId[?]]]
def triggers(ss: Settings[Scope]): Triggers = {
def triggers(ss: Settings): Triggers = {
val runBefore = new TriggerMap
val triggeredBy = new TriggerMap
for
a <- ss.data.values
case AttributeEntry(_, base: Task[?]) <- a.entries
do
ss.values.collect { case base: Task[?] =>
def update(map: TriggerMap, key: AttributeKey[Seq[Task[?]]]): Unit =
base.info.attributes.get(key).getOrElse(Seq.empty).foreach { task =>
base.getOrElse(key, Seq.empty).foreach { task =>
map(task) = base +: map.getOrElse(task, Nil)
}
update(runBefore, Def.runBefore)
update(triggeredBy, Def.triggeredBy)
}
val onComplete = (GlobalScope / Def.onComplete).get(ss).getOrElse(() => ())
new Triggers(runBefore, triggeredBy, map => { onComplete(); map })
}

View File

@ -463,10 +463,7 @@ object Tests {
fun: TestFunction,
tags: Seq[(Tag, Int)]
): Task[Map[String, SuiteResult]] = {
val base = Task[(String, (SuiteResult, Seq[TestTask]))](
Info[(String, (SuiteResult, Seq[TestTask]))]().setName(name),
Action.Pure(() => (name, fun.apply()), `inline` = false)
)
val base = Task(Action.Pure(() => (name, fun.apply()), `inline` = false)).setName(name)
val taggedBase = base.tagw(tags*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_))*)
taggedBase flatMap { case (name, (result, nested)) =>
val nestedRunnables = createNestedRunnables(loader, fun, nested)

View File

@ -54,7 +54,8 @@ trait BuildSyntax:
end BuildSyntax
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
object Def extends BuildSyntax with Init with InitializeImplicits:
type ScopeType = Scope
type Classpath = Seq[Attributed[HashedVirtualFileRef]]
def settings(ss: SettingsDefinition*): Seq[Setting[?]] = ss.flatMap(_.settings)
@ -457,11 +458,11 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
sys.error(s"Dummy task '$name' did not get converted to a full task.")
)
.named(name)
base.copy(info = base.info.set(isDummyTask, true))
base.set(isDummyTask, true)
}
private[sbt] def isDummy(t: Task[?]): Boolean =
t.info.attributes.get(isDummyTask) getOrElse false
t.get(isDummyTask).getOrElse(false)
end Def
sealed trait InitializeImplicits { self: Def.type =>

View File

@ -127,7 +127,7 @@ object Previous {
val successfulTaskResults = (
for
case results.TPair(task: Task[?], Result.Value(v)) <- results.toTypedSeq
key <- task.info.attributes.get(Def.taskDefinitionKey).asInstanceOf[Option[AnyTaskKey]]
key <- task.get(Def.taskDefinitionKey).asInstanceOf[Option[AnyTaskKey]]
yield key -> v
).toMap
// We then traverse the successful results and look up all of the referenced values for

View File

@ -334,7 +334,7 @@ object Project:
ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key)
def mapScope(f: Scope => Scope): [a] => ScopedKey[a] => ScopedKey[a] =
[a] => (k: ScopedKey[a]) => ScopedKey(f(k.scope), k.key)
[a] => (k: ScopedKey[a]) => k.copy(scope = f(k.scope))
def transform(g: Scope => Scope, ss: Seq[Def.Setting[?]]): Seq[Def.Setting[?]] =
// We use caching to avoid creating new Scope instances too many times
@ -361,7 +361,10 @@ object Project:
Project.transform(Scope.replaceThis(scope), ss)
private[sbt] def inScope[A](scope: Scope, i: Initialize[A]): Initialize[A] =
i.mapReferenced(Project.mapScope(Scope.replaceThis(scope)))
i.mapReferenced(replaceThis(scope))
private[sbt] def replaceThis(scope: Scope): Def.MapScoped =
mapScope(Scope.replaceThis(scope))
/**
* Normalize a String so that it is suitable for use as a dependency management module identifier.

View File

@ -11,7 +11,7 @@ package sbt
import scala.annotation.targetName
import sbt.internal.util.Types.*
import sbt.internal.util.{ AttributeKey, KeyTag, Settings, SourcePosition }
import sbt.internal.util.{ AttributeKey, KeyTag, SourcePosition }
import sbt.internal.util.TupleMapExtension.*
import sbt.util.OptJsonWriter
import sbt.ConcurrentRestrictions.Tag
@ -303,8 +303,7 @@ object Scoped:
setting(scopedKey, app, source)
/** From the given `Settings`, extract the value bound to this key. */
final def get(settings: Settings[Scope]): Option[A1] =
settings.get(scopedKey.scope, scopedKey.key)
final def get(settings: Def.Settings): Option[A1] = settings.get(scopedKey)
/**
* Creates an [[Def.Initialize]] with value `scala.None` if there was no previous definition of this key,
@ -385,7 +384,7 @@ object Scoped:
): Initialize[Task[A1]] =
Initialize
.joinAny[Task](coerceToAnyTaskSeq(tasks))
.zipWith(init)((ts, i) => i.copy(info = i.info.set(key, ts)))
.zipWith(init)((ts, i) => i.set(key, ts))
extension [A1](init: Initialize[InputTask[A1]])
@targetName("onTaskInitializeInputTask")
@ -460,7 +459,7 @@ object Scoped:
def toSettingKey: SettingKey[Task[A1]] = scopedSetting(scope, key)
def get(settings: Settings[Scope]): Option[Task[A1]] = settings.get(scope, key)
def get(settings: Def.Settings): Option[Task[A1]] = settings.get(scopedKey)
/**
* Creates an [[Def.Initialize]] with value `scala.None` if there was no previous definition of this key,

View File

@ -47,7 +47,7 @@ end ParserInstance
/** Composes the Task and Initialize Instances to provide an Instance for [A1] Initialize[Task[A1]]. */
object FullInstance:
type SS = sbt.internal.util.Settings[Scope]
type SS = Def.Settings
val settingsData = TaskKey[SS](
"settings-data",
"Provides access to the project data for the build.",

View File

@ -4001,14 +4001,15 @@ object Classpaths {
try {
val extracted = Project.extract(st)
val sk = (projRef / Zero / Zero / libraryDependencies).scopedKey
val empty = extracted.structure.data.set(sk.scope, sk.key, Nil)
val empty = extracted.structure.data.set(sk, Nil)
val settings = extracted.structure.settings filter { (s: Setting[?]) =>
(s.key.key == libraryDependencies.key) &&
(s.key.scope.project == Select(projRef))
}
Map(settings.asInstanceOf[Seq[Setting[Seq[ModuleID]]]].flatMap { s =>
s.init.evaluate(empty) map { _ -> s.pos }
}*)
settings
.asInstanceOf[Seq[Setting[Seq[ModuleID]]]]
.flatMap(s => s.init.evaluate(empty).map(_ -> s.pos))
.toMap
} catch {
case NonFatal(_) => Map()
}
@ -4163,7 +4164,7 @@ object Classpaths {
private[sbt] def depMap(
projects: Seq[ProjectRef],
data: Settings[Scope],
data: Def.Settings,
log: Logger
): Task[Map[ModuleRevisionId, ModuleDescriptor]] =
val ivyModules = projects.flatMap { proj =>
@ -4240,14 +4241,14 @@ object Classpaths {
def interSort(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
data: Def.Settings,
deps: BuildDependencies
): Seq[(ProjectRef, String)] = ClasspathImpl.interSort(projectRef, conf, data, deps)
def interSortConfigurations(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
data: Def.Settings,
deps: BuildDependencies
): Seq[(ProjectRef, ConfigRef)] =
interSort(projectRef, conf, data, deps).map { case (projectRef, configName) =>
@ -4291,23 +4292,23 @@ object Classpaths {
sys.error("Configuration '" + conf + "' not defined in '" + in + "'")
def allConfigs(conf: Configuration): Seq[Configuration] = ClasspathImpl.allConfigs(conf)
def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] =
def getConfigurations(p: ResolvedReference, data: Def.Settings): Seq[Configuration] =
ClasspathImpl.getConfigurations(p, data)
def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] =
ClasspathImpl.confOpt(configurations, conf)
def unmanagedLibs(dep: ResolvedReference, conf: String, data: Settings[Scope]): Task[Classpath] =
def unmanagedLibs(dep: ResolvedReference, conf: String, data: Def.Settings): Task[Classpath] =
ClasspathImpl.unmanagedLibs(dep, conf, data)
def getClasspath(
key: TaskKey[Classpath],
dep: ResolvedReference,
conf: String,
data: Settings[Scope]
data: Def.Settings
): Task[Classpath] =
ClasspathImpl.getClasspath(key, dep, conf, data)
def defaultConfigurationTask(p: ResolvedReference, data: Settings[Scope]): Configuration =
def defaultConfigurationTask(p: ResolvedReference, data: Def.Settings): Configuration =
(p / defaultConfiguration).get(data).flatten.getOrElse(Configurations.Default)
val sbtIvySnapshots: URLRepository = Resolver.sbtIvyRepo("snapshots")
@ -4748,7 +4749,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
def initScoped[T](sk: ScopedKey[?], i: Initialize[T]): Initialize[T] =
initScope(fillTaskAxis(sk.scope, sk.key), i)
def initScope[T](s: Scope, i: Initialize[T]): Initialize[T] =
i.mapReferenced(Project.mapScope(Scope.replaceThis(s)))
Project.inScope(s, i)
/**
* Disables post-compilation hook for determining tests for tab-completion (such as for 'test-only').

View File

@ -449,8 +449,8 @@ object EvaluateTask {
ref: ProjectRef
): Option[(Task[T], NodeView)] = {
val thisScope = Load.projectScope(ref)
val resolvedScope = Scope.replaceThis(thisScope)(taskKey.scope)
for (t <- structure.data.get(resolvedScope, taskKey.key))
val subScoped = Project.replaceThis(thisScope)(taskKey.scopedKey)
for (t <- structure.data.get(subScoped))
yield (t, nodeView(state, streams, taskKey :: Nil))
}
def nodeView(
@ -582,7 +582,7 @@ object EvaluateTask {
Function.chain(
results.toTypedSeq flatMap {
case results.TPair(_, Result.Value(KeyValue(_, st: StateTransform))) => Some(st.transform)
case results.TPair(Task(info, _), Result.Value(v)) => info.post(v).get(transformState)
case results.TPair(task: Task[?], Result.Value(v)) => task.post(v).get(transformState)
case _ => Nil
}
)

View File

@ -12,7 +12,6 @@ import sbt.internal.{ Load, BuildStructure, Act, Aggregation, SessionSettings }
import Scope.GlobalScope
import Def.{ ScopedKey, Setting }
import sbt.internal.util.complete.Parser
import sbt.internal.util.AttributeKey
import sbt.util.Show
import std.Transform.DummyTaskMap
import sbt.EvaluateTask.extractedTaskConfig
@ -34,21 +33,21 @@ final case class Extracted(
* If the project axis is not explicitly specified, it is resolved to be the current project according to the extracted `session`.
* Other axes are resolved to be `Zero` if they are not specified.
*/
def get[T](key: SettingKey[T]): T = getOrError(inCurrent(key.scope), key.key)
def get[T](key: TaskKey[T]): Task[T] = getOrError(inCurrent(key.scope), key.key)
def get[T](key: SettingKey[T]): T = getOrError(inCurrent(key.scopedKey))
def get[T](key: TaskKey[T]): Task[T] = getOrError(inCurrent(key.scopedKey))
/**
* Gets the value assigned to `key` in the computed settings map wrapped in Some. If it does not exist, None is returned.
* If the project axis is not explicitly specified, it is resolved to be the current project according to the extracted `session`.
* Other axes are resolved to be `Zero` if they are not specified.
*/
def getOpt[T](key: SettingKey[T]): Option[T] = structure.data.get(inCurrent(key.scope), key.key)
def getOpt[T](key: SettingKey[T]): Option[T] = structure.data.get(inCurrent(key.scopedKey))
def getOpt[T](key: TaskKey[T]): Option[Task[T]] =
structure.data.get(inCurrent(key.scope), key.key)
structure.data.get(inCurrent(key))
private def inCurrent(scope: Scope): Scope =
if scope.project == This then scope.rescope(currentRef)
else scope
private def inCurrent[T](key: ScopedKey[T]): ScopedKey[T] =
if key.scope.project == This then key.copy(scope = key.scope.rescope(currentRef))
else key
/**
* Runs the task specified by `key` and returns the transformed State and the resulting value of the task.
@ -63,7 +62,7 @@ final case class Extracted(
val config = extractedTaskConfig(this, structure, state)
val value: Option[(State, Result[T])] =
EvaluateTask(structure, key.scopedKey, state, currentRef, config)
val (newS, result) = getOrError(rkey.scope, rkey.key, value)
val (newS, result) = getOrError(rkey.scopedKey, value)
(newS, EvaluateTask.processResult2(result))
}
@ -116,15 +115,15 @@ final case class Extracted(
private def resolve[K <: Scoped.ScopingSetting[K] & Scoped](key: K): K =
Scope.resolveScope(GlobalScope, currentRef.build, rootProject)(key.scope) / key
private def getOrError[T](scope: Scope, key: AttributeKey[?], value: Option[T])(implicit
private def getOrError[T](key: ScopedKey[?], value: Option[T])(implicit
display: Show[ScopedKey[?]]
): T =
value getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.")
value.getOrElse(sys.error(display.show(key) + " is undefined."))
private def getOrError[T](scope: Scope, key: AttributeKey[T])(implicit
private def getOrError[T](key: ScopedKey[T])(implicit
display: Show[ScopedKey[?]]
): T =
getOrError(scope, key, structure.data.get(scope, key))(display)
getOrError(key, structure.data.get(key))(display)
@deprecated(
"This discards session settings. Migrate to appendWithSession or appendWithoutSession.",

View File

@ -47,7 +47,7 @@ import sbt.internal.{
SettingGraph,
SessionSettings
}
import sbt.internal.util.{ AttributeKey, AttributeMap, Relation, Settings }
import sbt.internal.util.{ AttributeKey, AttributeMap, Relation }
import sbt.internal.util.Types.const
import sbt.internal.server.ServerHandler
import sbt.librarymanagement.Configuration
@ -288,10 +288,10 @@ trait ProjectExtra extends Scoped.Syntax:
def orIdentity[A](opt: Option[A => A]): A => A =
opt.getOrElse(identity)
def getHook[A](key: SettingKey[A => A], data: Settings[Scope]): A => A =
def getHook[A](key: SettingKey[A => A], data: Def.Settings): A => A =
orIdentity((Global / key).get(data))
def getHooks(data: Settings[Scope]): (State => State, State => State) =
def getHooks(data: Def.Settings): (State => State, State => State) =
(getHook(Keys.onLoad, data), getHook(Keys.onUnload, data))
def current(state: State): ProjectRef = session(state).current
@ -373,46 +373,34 @@ trait ProjectExtra extends Scoped.Syntax:
private[sbt] def scopedKeyData(
structure: BuildStructure,
scope: Scope,
key: AttributeKey[?]
key: ScopedKey[?]
): Option[ScopedKeyData[?]] =
structure.data.get(scope, key) map { v =>
ScopedKeyData(ScopedKey(scope, key), v)
}
structure.data.getKeyValue(key).map((defining, value) => ScopedKeyData(key, defining, value))
def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[?])(
using display: Show[ScopedKey[?]]
def details(structure: BuildStructure, actual: Boolean, key: ScopedKey[?])(using
display: Show[ScopedKey[?]]
): String = {
val scoped = ScopedKey(scope, key)
val data = scopedKeyData(structure, key).map(_.description).getOrElse("No entry for key.")
val description = key.key.description match
case Some(desc) => s"Description:\n\t$desc\n"
case None => ""
val data = scopedKeyData(structure, scope, key) map { _.description } getOrElse {
"No entry for key."
}
val description = key.description match {
case Some(desc) => "Description:\n\t" + desc + "\n"; case None => ""
}
val definingScope = structure.data.definingScope(scope, key)
val providedBy = definingScope match {
case Some(sc) => "Provided by:\n\t" + Scope.display(sc, key.label) + "\n"
case None => ""
}
val definingScoped = definingScope match {
case Some(sc) => ScopedKey(sc, key)
case None => scoped
}
val (definingKey, providedBy) = structure.data.definingKey(key) match
case Some(k) => k -> s"Provided by:\n\t${Scope.display(k.scope, key.key.label)}\n"
case None => key -> ""
val comp =
Def.compiled(structure.settings, actual)(using
structure.delegates,
structure.scopeLocal,
display
)
val definedAt = comp get definingScoped map { c =>
Def.definedAtString(c.settings).capitalize
} getOrElse ""
val definedAt = comp
.get(definingKey)
.map(c => Def.definedAtString(c.settings).capitalize)
.getOrElse("")
val cMap = Def.flattenLocals(comp)
val related = cMap.keys.filter(k => k.key == key && k.scope != scope)
val related = cMap.keys.filter(k => k.key == key.key && k.scope != key.scope)
def derivedDependencies(c: ScopedKey[?]): List[ScopedKey[?]] =
comp
.get(c)
@ -420,14 +408,14 @@ trait ProjectExtra extends Scoped.Syntax:
.toList
.flatten
val depends = cMap.get(scoped) match {
case Some(c) => c.dependencies.toSet; case None => Set.empty
}
val derivedDepends: Set[ScopedKey[?]] = derivedDependencies(definingScoped).toSet
val depends = cMap.get(key) match
case Some(c) => c.dependencies.toSet
case None => Set.empty
val derivedDepends: Set[ScopedKey[?]] = derivedDependencies(definingKey).toSet
val reverse = Project.reverseDependencies(cMap, scoped)
val reverse = Project.reverseDependencies(cMap, key)
val derivedReverse =
reverse.filter(r => derivedDependencies(r).contains(definingScoped)).toSet
reverse.filter(r => derivedDependencies(r).contains(definingKey)).toSet
def printDepScopes(
baseLabel: String,
@ -460,7 +448,7 @@ trait ProjectExtra extends Scoped.Syntax:
definedAt +
printDepScopes("Dependencies", "derived from", depends, derivedDepends) +
printDepScopes("Reverse dependencies", "derives", reverse, derivedReverse) +
printScopes("Delegates", delegates(structure, scope, key)) +
printScopes("Delegates", delegates(structure, key.scope, key.key)) +
printScopes("Related", related, 10)
}

View File

@ -11,16 +11,14 @@ package sbt
import Def.ScopedKey
import sbt.internal.util.KeyTag
final case class ScopedKeyData[A](scoped: ScopedKey[A], value: Any) {
val key = scoped.key
val scope = scoped.scope
def typeName: String = key.tag.toString
final case class ScopedKeyData[A](key: ScopedKey[A], definingKey: ScopedKey[A], value: Any) {
def typeName: String = key.key.tag.toString
def settingValue: Option[Any] =
key.tag match
key.key.tag match
case KeyTag.Setting(_) => Some(value)
case _ => None
def description: String =
key.tag match
key.key.tag match
case KeyTag.Task(typeArg) => s"Task: $typeArg"
case KeyTag.SeqTask(typeArg) => s"Task: Seq[$typeArg]"
case KeyTag.InputTask(typeArg) => s"Input task: $typeArg"

View File

@ -49,20 +49,21 @@ object SessionVar {
def orEmpty(opt: Option[Map]) = opt.getOrElse(emptyMap)
def transform[S](task: Task[S], f: (State, S) => State): Task[S] = {
def transform[S](task: Task[S], f: (State, S) => State): Task[S] =
val g = (s: S, map: AttributeMap) => map.put(Keys.transformState, (state: State) => f(state, s))
task.copy(info = task.info.postTransform(g))
}
task.postTransform(g)
def resolveContext[T](
key: ScopedKey[Task[T]],
context: Scope,
state: State
): ScopedKey[Task[T]] = {
val subScope = Scope.replaceThis(context)(key.scope)
val scope = Project.structure(state).data.definingScope(subScope, key.key) getOrElse subScope
ScopedKey(scope, key.key)
}
): ScopedKey[Task[T]] =
val subScoped = Project.replaceThis(context)(key)
Project
.structure(state)
.data
.definingKey(subScoped)
.getOrElse(subScoped)
def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
Project.structure(state).streams(state).use(key) { s =>

View File

@ -112,7 +112,7 @@ private[sbt] abstract class AbstractTaskExecuteProgress extends ExecuteProgress
}
private def taskName0(t: TaskId[?]): String = {
def definedName(node: Task[?]): Option[String] =
node.info.name.orElse(TaskName.transformNode(node).map(showScopedKey.show))
node.name.orElse(TaskName.transformNode(node).map(showScopedKey.show))
def inferredName(t: Task[?]): Option[String] = nameDelegate(t) map taskName
def nameDelegate(t: Task[?]): Option[TaskId[?]] =
Option(anonOwners.get(t)).orElse(Option(calledBy.get(t)))

View File

@ -24,7 +24,6 @@ import sbt.internal.util.{
AttributeMap,
IMap,
MessageOnlyException,
Settings,
Util,
}
import sbt.util.Show
@ -61,7 +60,7 @@ object Act {
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[?]],
data: Settings[Scope]
data: Def.Settings
): Parser[ScopedKey[Any]] =
scopedKeySelected(index, current, defaultConfigs, keyMap, data, askProject = true)
.map(_.key.asInstanceOf[ScopedKey[Any]])
@ -115,7 +114,7 @@ object Act {
current: ProjectRef,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[?]],
data: Settings[Scope],
data: Def.Settings,
askProject: Boolean,
): Parser[ParsedKey] =
scopedKeyFull(index, current, defaultConfigs, keyMap, askProject = askProject).flatMap {
@ -197,7 +196,7 @@ object Act {
key
)
def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])(implicit
def select(allKeys: Seq[Parser[ParsedKey]], data: Def.Settings)(implicit
show: Show[ScopedKey[?]]
): Parser[ParsedKey] =
seq(allKeys) flatMap { ss =>
@ -235,10 +234,7 @@ object Act {
def showAmbiguous(keys: Seq[ScopedKey[?]])(implicit show: Show[ScopedKey[?]]): String =
keys.take(3).map(x => show.show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "")
def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean = {
val key = parsed.key
data.definingScope(key.scope, key.key) == Some(key.scope)
}
def isValid(data: Def.Settings)(parsed: ParsedKey): Boolean = data.contains(parsed.key)
def examples(p: Parser[String], exs: Set[String], label: String): Parser[String] =
(p !!! ("Expected " + label)).examples(exs)
@ -571,28 +567,14 @@ object Act {
def keyValues[T](extracted: Extracted)(keys: Seq[ScopedKey[T]]): Values[T] =
keyValues(extracted.structure)(keys)
def keyValues[T](structure: BuildStructure)(keys: Seq[ScopedKey[T]]): Values[T] =
keys.flatMap { key =>
getValue(structure.data, key.scope, key.key) map { value =>
KeyValue(key, value)
}
}
private def anyKeyValues(
structure: BuildStructure,
keys: Seq[ScopedKey[?]]
): Seq[KeyValue[?]] =
keys.flatMap { key =>
getValue(structure.data, key.scope, key.key) map { value =>
KeyValue(key, value)
}
}
keys.flatMap(key => getValue(structure.data, key).map(KeyValue(key, _)))
private def getValue[T](
data: Settings[Scope],
scope: Scope,
key: AttributeKey[T]
): Option[T] =
if (java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(scope, key)
else data.get(scope, key)
private def anyKeyValues(structure: BuildStructure, keys: Seq[ScopedKey[?]]): Seq[KeyValue[?]] =
keys.flatMap(key => getValue(structure.data, key).map(KeyValue(key, _)))
private def getValue[T](data: Def.Settings, key: ScopedKey[T]): Option[T] =
if (java.lang.Boolean.getBoolean("sbt.cli.nodelegation")) data.getDirect(key)
else data.get(key)
def requireSession[T](s: State, p: => Parser[T]): Parser[T] =
if s.get(sessionSettings).isEmpty then failure("No project loaded") else p

View File

@ -11,7 +11,7 @@ package internal
import java.text.DateFormat
import sbt.Def.ScopedKey
import sbt.Def.{ ScopedKey, Settings }
import sbt.Keys.{ showSuccess, showTiming, timingFormat }
import sbt.ProjectExtra.*
import sbt.SlashSyntax0.given
@ -157,7 +157,7 @@ object Aggregation {
private def timingString(
startTime: Long,
endTime: Long,
data: Settings[Scope],
data: Settings,
currentRef: ProjectRef,
): String = {
val format = (currentRef / timingFormat).get(data) getOrElse defaultFormat
@ -266,29 +266,40 @@ object Aggregation {
else extra.aggregates.forward(ref)
}
/**
* Compute the reverse aggregate keys of all the `keys` at once.
* This is more performant than computing the revere aggregate keys of each key individually
* because of the duplicates. One aggregate key is the aggregation of many keys.
*/
def reverseAggregate[Proj](
keys: Set[ScopedKey[?]],
extra: BuildUtil[Proj],
): Iterable[ScopedKey[?]] =
val mask = ScopeMask()
def recur(keys: Set[ScopedKey[?]], acc: Set[ScopedKey[?]]): Set[ScopedKey[?]] =
if keys.isEmpty then acc
else
val aggKeys = for
key <- keys
ref <- projectAggregates(key.scope.project.toOption, extra, reverse = true)
toResolve = key.scope.copy(project = Select(ref))
resolved = Resolve(extra, Zero, key.key, mask)(toResolve)
scoped = ScopedKey(resolved, key.key)
if !acc.contains(scoped)
yield scoped
val filteredAggKeys = aggKeys.filter(aggregationEnabled(_, extra.data))
// recursive because an aggregate project can be aggregated in another aggregate project
recur(filteredAggKeys, acc ++ filteredAggKeys)
recur(keys, keys)
def aggregate[A1, Proj](
key: ScopedKey[A1],
rawMask: ScopeMask,
extra: BuildUtil[Proj],
reverse: Boolean = false
extra: BuildUtil[Proj]
): Seq[ScopedKey[A1]] =
val mask = rawMask.copy(project = true)
Dag.topologicalSort(key): (k) =>
if reverse then reverseAggregatedKeys(k, extra, mask)
else if aggregationEnabled(k, extra.data) then aggregatedKeys(k, extra, mask)
else Nil
def reverseAggregatedKeys[T](
key: ScopedKey[T],
extra: BuildUtil[?],
mask: ScopeMask
): Seq[ScopedKey[T]] =
projectAggregates(key.scope.project.toOption, extra, reverse = true) flatMap { ref =>
val toResolve = key.scope.copy(project = Select(ref))
val resolved = Resolve(extra, Zero, key.key, mask)(toResolve)
val skey = ScopedKey(resolved, key.key)
if (aggregationEnabled(skey, extra.data)) skey :: Nil else Nil
}
if aggregationEnabled(k, extra.data) then aggregatedKeys(k, extra, mask) else Nil
def aggregatedKeys[T](
key: ScopedKey[T],
@ -301,7 +312,7 @@ object Aggregation {
ScopedKey(resolved, key.key)
}
def aggregationEnabled(key: ScopedKey[?], data: Settings[Scope]): Boolean =
def aggregationEnabled(key: ScopedKey[?], data: Settings): Boolean =
(Scope.fillTaskAxis(key.scope, key.key) / Keys.aggregate).get(data).getOrElse(true)
private[sbt] val suppressShow =
AttributeKey[Boolean]("suppress-aggregation-show", Int.MaxValue)

View File

@ -20,7 +20,7 @@ import sbt.SlashSyntax0.given
import BuildStreams.Streams
import sbt.io.syntax._
import sbt.internal.inc.MappedFileConverter
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attributed, Settings }
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attributed }
import sbt.internal.util.Attributed.data
import sbt.util.Logger
import xsbti.FileConverter
@ -29,7 +29,7 @@ final class BuildStructure(
val units: Map[URI, LoadedBuildUnit],
val root: URI,
val settings: Seq[Setting[?]],
val data: Settings[Scope],
val data: Def.Settings,
val index: StructureIndex,
val streams: State => Streams,
val delegates: Scope => Seq[Scope],
@ -71,7 +71,6 @@ final class BuildStructure(
// information that is not original, but can be reconstructed from the rest of BuildStructure
final class StructureIndex(
val keyMap: Map[String, AttributeKey[?]],
val taskToKey: Map[Task[?], ScopedKey[Task[?]]],
val triggers: Triggers,
val keyIndex: KeyIndex,
val aggregateKeyIndex: KeyIndex,
@ -271,7 +270,7 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) {
unit.projects.map(p => ProjectRef(build, p.id) -> p)
}.toIndexedSeq
def extra(data: Settings[Scope])(keyIndex: KeyIndex): BuildUtil[ResolvedProject] =
def extra(data: Def.Settings)(keyIndex: KeyIndex): BuildUtil[ResolvedProject] =
BuildUtil(root, units, keyIndex, data)
private[sbt] def autos = GroupedAutoPlugins(units)
@ -309,7 +308,7 @@ object BuildStreams {
def mkStreams(
units: Map[URI, LoadedBuildUnit],
root: URI,
data: Settings[Scope]
data: Def.Settings
): State => Streams = s => {
s.get(Keys.stateStreams).getOrElse {
std.Streams(
@ -324,7 +323,7 @@ object BuildStreams {
}
}
def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Settings[Scope])(
def path(units: Map[URI, LoadedBuildUnit], root: URI, data: Def.Settings)(
scoped: ScopedKey[?]
): File =
resolvePath(projectPath(units, root, scoped, data), nonProjectPath(scoped))
@ -386,7 +385,7 @@ object BuildStreams {
units: Map[URI, LoadedBuildUnit],
root: URI,
scoped: ScopedKey[?],
data: Settings[Scope]
data: Def.Settings
): File =
scoped.scope.project match {
case Zero => refTarget(GlobalScope, units(root).localBase, data) / GlobalPath
@ -397,9 +396,9 @@ object BuildStreams {
case This => sys.error("Unresolved project reference (This) in " + displayFull(scoped))
}
def refTarget(ref: ResolvedReference, fallbackBase: File, data: Settings[Scope]): File =
def refTarget(ref: ResolvedReference, fallbackBase: File, data: Def.Settings): File =
refTarget(GlobalScope.copy(project = Select(ref)), fallbackBase, data)
def refTarget(scope: Scope, fallbackBase: File, data: Settings[Scope]): File =
def refTarget(scope: Scope, fallbackBase: File, data: Def.Settings): File =
((scope / Keys.target).get(data) getOrElse outputDirectory(fallbackBase)) / StreamsDirectory
}

View File

@ -9,13 +9,13 @@
package sbt
package internal
import sbt.internal.util.{ Relation, Settings, Dag }
import sbt.internal.util.{ Relation, Dag }
import java.net.URI
final class BuildUtil[Proj](
val keyIndex: KeyIndex,
val data: Settings[Scope],
val data: Def.Settings,
val root: URI,
val rootProjectID: URI => String,
val project: (URI, String) => Proj,
@ -57,7 +57,7 @@ object BuildUtil {
root: URI,
units: Map[URI, LoadedBuildUnit],
keyIndex: KeyIndex,
data: Settings[Scope]
data: Def.Settings
): BuildUtil[ResolvedProject] = {
val getp = (build: URI, project: String) => Load.getProject(units, build, project)
val configs = (_: ResolvedProject).configurations.map(c => ConfigKey(c.name))

View File

@ -15,7 +15,7 @@ import sbt.Keys._
import sbt.nio.Keys._
import sbt.nio.file.{ Glob, RecursiveGlob }
import sbt.Def.Initialize
import sbt.internal.util.{ Attributed, Dag, Settings }
import sbt.internal.util.{ Attributed, Dag }
import sbt.librarymanagement.{ Configuration, TrackLevel }
import sbt.librarymanagement.Configurations.names
import sbt.std.TaskExtra._
@ -180,7 +180,7 @@ private[sbt] object ClasspathImpl {
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
data: Def.Settings,
deps: BuildDependencies,
track: TrackLevel,
log: Logger
@ -198,7 +198,7 @@ private[sbt] object ClasspathImpl {
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
data: Def.Settings,
deps: BuildDependencies,
track: TrackLevel,
log: Logger
@ -244,7 +244,7 @@ private[sbt] object ClasspathImpl {
projectRef: ProjectRef,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
data: Def.Settings,
deps: BuildDependencies,
track: TrackLevel,
log: Logger
@ -282,7 +282,7 @@ private[sbt] object ClasspathImpl {
def unmanagedDependencies0(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
data: Def.Settings,
deps: BuildDependencies,
log: Logger
): Initialize[Task[Classpath]] =
@ -306,7 +306,7 @@ private[sbt] object ClasspathImpl {
def unmanagedLibs(
dep: ResolvedReference,
conf: String,
data: Settings[Scope]
data: Def.Settings
): Task[Classpath] =
getClasspath(unmanagedJars, dep, conf, data)
@ -315,7 +315,7 @@ private[sbt] object ClasspathImpl {
deps: BuildDependencies,
conf: Configuration,
self: Configuration,
data: Settings[Scope],
data: Def.Settings,
track: TrackLevel,
includeSelf: Boolean,
log: Logger
@ -346,7 +346,7 @@ private[sbt] object ClasspathImpl {
def interSort(
projectRef: ProjectRef,
conf: Configuration,
data: Settings[Scope],
data: Def.Settings,
deps: BuildDependencies
): Seq[(ProjectRef, String)] =
val visited = (new LinkedHashSet[(ProjectRef, String)]).asScala
@ -431,7 +431,7 @@ private[sbt] object ClasspathImpl {
def allConfigs(conf: Configuration): Seq[Configuration] =
Dag.topologicalSort(conf)(_.extendsConfigs)
def getConfigurations(p: ResolvedReference, data: Settings[Scope]): Seq[Configuration] =
def getConfigurations(p: ResolvedReference, data: Def.Settings): Seq[Configuration] =
(p / ivyConfigurations).get(data).getOrElse(Nil)
def confOpt(configurations: Seq[Configuration], conf: String): Option[Configuration] =
@ -441,14 +441,14 @@ private[sbt] object ClasspathImpl {
key: TaskKey[Seq[A]],
dep: ResolvedReference,
conf: Configuration,
data: Settings[Scope]
data: Def.Settings
): Task[Seq[A]] = getClasspath(key, dep, conf.name, data)
def getClasspath[A](
key: TaskKey[Seq[A]],
dep: ResolvedReference,
conf: String,
data: Settings[Scope]
data: Def.Settings
): Task[Seq[A]] =
(dep / ConfigKey(conf) / key).get(data) match {
case Some(x) => x

View File

@ -114,11 +114,9 @@ private[sbt] object Clean {
// This is the special portion of the task where we clear out the relevant streams
// and file outputs of a task.
val streamsKey = scope.task.toOption.map(k => ScopedKey(scope.copy(task = Zero), k))
val stampKey = ScopedKey(scope, inputFileStamps.key)
val stampsKey =
extracted.structure.data.getDirect(scope, inputFileStamps.key) match {
case Some(_) => ScopedKey(scope, inputFileStamps.key) :: Nil
case _ => Nil
}
if extracted.structure.data.contains(stampKey) then stampKey :: Nil else Nil
val streamsGlobs =
(streamsKey.toSeq ++ stampsKey)
.map(k => manager(k).cacheDirectory.toPath.toGlob / **)

View File

@ -92,7 +92,7 @@ object GlobalPlugin {
(prods ++ intcp).distinct
)(updateReport.value)
}
val resolvedTaskInit = taskInit.mapReferenced(Project.mapScope(Scope.replaceThis(p)))
val resolvedTaskInit = taskInit.mapReferenced(Project.replaceThis(p))
val task = resolvedTaskInit.evaluate(data)
val roots = resolvedTaskInit.dependencies
evaluate(state, structure, task, roots)

View File

@ -87,8 +87,7 @@ object Inspect {
val extracted = Project.extract(s)
import extracted._
option match {
case Details(actual) =>
Project.details(extracted.structure, actual, sk.scope, sk.key)
case Details(actual) => Project.details(extracted.structure, actual, sk)
case DependencyTreeMode =>
val basedir = new File(Project.session(s).current.build)
Project

View File

@ -29,30 +29,14 @@ object KeyIndex {
}
def aggregate(
known: Iterable[ScopedKey[?]],
known: Set[ScopedKey[?]],
extra: BuildUtil[?],
projects: Map[URI, Set[String]],
configurations: Map[String, Seq[Configuration]]
): ExtendableKeyIndex = {
/*
* Used to be:
* (base(projects, configurations) /: known) { (index, key) =>
* index.addAggregated(key, extra)
* }
* This was a significant serial bottleneck during project loading that we can work around by
* computing the aggregations in parallel and then bulk adding them to the index.
*/
import scala.collection.parallel.CollectionConverters.*
val toAggregate = known.par.map {
case key if validID(key.key.label) =>
Aggregation.aggregate(key, ScopeMask(), extra, reverse = true)
case _ => Nil
}
toAggregate.foldLeft(base(projects, configurations)) {
case (index, Nil) => index
case (index, keys) => keys.foldLeft(index)(_.add(_))
}
}
): ExtendableKeyIndex =
Aggregation
.reverseAggregate(known.filter(k => validID(k.key.label)), extra)
.foldLeft(base(projects, configurations))(_.add(_))
private def base(
projects: Map[URI, Set[String]],
@ -278,7 +262,7 @@ private[sbt] final class KeyIndex0(val data: BuildIndex) extends ExtendableKeyIn
def addAggregated(scoped: ScopedKey[?], extra: BuildUtil[?]): ExtendableKeyIndex =
if (validID(scoped.key.label)) {
val aggregateProjects = Aggregation.aggregate(scoped, ScopeMask(), extra, reverse = true)
val aggregateProjects = Aggregation.reverseAggregate(Set(scoped), extra)
aggregateProjects.foldLeft(this: ExtendableKeyIndex)(_.add(_))
} else this

View File

@ -94,7 +94,7 @@ object LintUnused {
}
def lintResultLines(
result: Seq[(ScopedKey[?], String, Vector[SourcePosition])]
result: Seq[(ScopedKey[?], String, Seq[SourcePosition])]
): Vector[String] = {
import scala.collection.mutable.ListBuffer
val buffer = ListBuffer.empty[String]
@ -127,7 +127,7 @@ object LintUnused {
state: State,
includeKeys: String => Boolean,
excludeKeys: String => Boolean
): Seq[(ScopedKey[?], String, Vector[SourcePosition])] = {
): Seq[(ScopedKey[?], String, Seq[SourcePosition])] = {
val extracted = Project.extract(state)
val structure = extracted.structure
val display = Def.showShortKey(None) // extracted.showKey
@ -135,17 +135,11 @@ object LintUnused {
val cMap = Def.flattenLocals(comp)
val used: Set[ScopedKey[?]] = cMap.values.flatMap(_.dependencies).toSet
val unused: Seq[ScopedKey[?]] = cMap.keys.filter(!used.contains(_)).toSeq
val withDefinedAts: Seq[UnusedKey] = unused map { u =>
val definingScope = structure.data.definingScope(u.scope, u.key)
val definingScoped = definingScope match {
case Some(sc) => ScopedKey(sc, u.key)
case _ => u
}
val definedAt = comp.get(definingScoped) match {
case Some(c) => definedAtString(c.settings.toVector)
val withDefinedAts: Seq[UnusedKey] = unused.map { u =>
val data = Project.scopedKeyData(structure, u)
val definedAt = comp.get(data.map(_.definingKey).getOrElse(u)) match
case Some(c) => definedAtString(c.settings)
case _ => Vector.empty
}
val data = Project.scopedKeyData(structure, u.scope, u.key)
UnusedKey(u, definedAt, data)
}
@ -167,18 +161,16 @@ object LintUnused {
&& isLocallyDefined(u) =>
u
}
(unusedKeys map { u =>
(u.scoped, display.show(u.scoped), u.positions)
}).sortBy(_._2)
unusedKeys.map(u => (u.scoped, display.show(u.scoped), u.positions)).sortBy(_._2)
}
private case class UnusedKey(
scoped: ScopedKey[?],
positions: Vector[SourcePosition],
positions: Seq[SourcePosition],
data: Option[ScopedKeyData[?]]
)
private def definedAtString(settings: Vector[Setting[?]]): Vector[SourcePosition] = {
private def definedAtString(settings: Seq[Setting[?]]): Seq[SourcePosition] = {
settings flatMap { setting =>
setting.pos match {
case NoPosition => Vector.empty

View File

@ -21,7 +21,7 @@ import sbt.internal.inc.classpath.ClasspathUtil
import sbt.internal.inc.{ MappedFileConverter, ScalaInstance, ZincLmUtil, ZincUtil }
import sbt.internal.util.Attributed.data
import sbt.internal.util.Types.const
import sbt.internal.util.{ Attributed, Settings }
import sbt.internal.util.Attributed
import sbt.internal.server.BuildServerEvalReporter
import sbt.io.{ GlobFilter, IO }
import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyDependencyResolution, IvyPaths }
@ -310,7 +310,7 @@ private[sbt] object Load {
(rootEval, bs)
}
private def checkTargets(data: Settings[Scope]): Option[String] =
private def checkTargets(data: Def.Settings): Option[String] =
val dups = overlappingTargets(allTargets(data))
if (dups.isEmpty) None
else {
@ -323,7 +323,7 @@ private[sbt] object Load {
private def overlappingTargets(targets: Seq[(ProjectRef, File)]): Map[File, Seq[ProjectRef]] =
targets.groupBy(_._2).view.filter(_._2.size > 1).mapValues(_.map(_._1)).toMap
private def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = {
private def allTargets(data: Def.Settings): Seq[(ProjectRef, File)] = {
import ScopeFilter._
val allProjects = ScopeFilter(Make.inAnyProject)
val targetAndRef = Def.setting { (Keys.thisProjectRef.value, Keys.target.value) }
@ -366,25 +366,24 @@ private[sbt] object Load {
}
def setDefinitionKey[T](tk: Task[T], key: ScopedKey[?]): Task[T] =
if (isDummy(tk)) tk else Task(tk.info.set(Keys.taskDefinitionKey, key), tk.work)
if (isDummy(tk)) tk else tk.set(Keys.taskDefinitionKey, key)
def structureIndex(
data: Settings[Scope],
data: Def.Settings,
settings: Seq[Setting[?]],
extra: KeyIndex => BuildUtil[?],
projects: Map[URI, LoadedBuildUnit]
): StructureIndex = {
val keys = Index.allKeys(settings)
val attributeKeys = Index.attributeKeys(data) ++ keys.map(_.key)
val scopedKeys = keys ++ data.allKeys((s, k) => ScopedKey(s, k)).toVector
val attributeKeys = data.attributeKeys ++ keys.map(_.key)
val scopedKeys = keys ++ data.keys
val projectsMap = projects.view.mapValues(_.defined.keySet).toMap
val configsMap: Map[String, Seq[Configuration]] =
projects.values.flatMap(bu => bu.defined map { case (k, v) => (k, v.configurations) }).toMap
val keyIndex = KeyIndex(scopedKeys.toVector, projectsMap, configsMap)
val aggIndex = KeyIndex.aggregate(scopedKeys.toVector, extra(keyIndex), projectsMap, configsMap)
val keyIndex = KeyIndex(scopedKeys, projectsMap, configsMap)
val aggIndex = KeyIndex.aggregate(scopedKeys, extra(keyIndex), projectsMap, configsMap)
new StructureIndex(
Index.stringToKeyMap(attributeKeys),
Index.taskToKeyMap(data),
Index.triggers(data),
keyIndex,
aggIndex

View File

@ -22,7 +22,7 @@ import java.io.PrintWriter
sealed abstract class LogManager {
def apply(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
writer: PrintWriter,
@ -30,20 +30,20 @@ sealed abstract class LogManager {
): ManagedLogger
@deprecated("Use alternate apply that provides a LoggerContext", "1.4.0")
def apply(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
writer: PrintWriter
): ManagedLogger = apply(data, state, task, writer, LoggerContext.globalContext)
def backgroundLog(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
context: LoggerContext
): ManagedLogger
@deprecated("Use alternate background log that provides a LoggerContext", "1.4.0")
final def backgroundLog(data: Settings[Scope], state: State, task: ScopedKey[?]): ManagedLogger =
final def backgroundLog(data: Def.Settings, state: State, task: ScopedKey[?]): ManagedLogger =
backgroundLog(data, state, task, LoggerContext.globalContext)
}
@ -62,7 +62,7 @@ object LogManager {
// This is called by mkStreams
//
def construct(
data: Settings[Scope],
data: Def.Settings,
state: State
): (ScopedKey[?], PrintWriter) => ManagedLogger =
(task: ScopedKey[?], to: PrintWriter) => {
@ -74,7 +74,7 @@ object LogManager {
@deprecated("Use alternate constructBackgroundLog that provides a LoggerContext", "1.8.0")
def constructBackgroundLog(
data: Settings[Scope],
data: Def.Settings,
state: State
): ScopedKey[?] => ManagedLogger = {
val context = state.get(Keys.loggerContext).getOrElse(LoggerContext.globalContext)
@ -82,7 +82,7 @@ object LogManager {
}
def constructBackgroundLog(
data: Settings[Scope],
data: Def.Settings,
state: State,
context: LoggerContext
): (ScopedKey[?]) => ManagedLogger =
@ -119,7 +119,7 @@ object LogManager {
extra: AppenderSupplier
) extends LogManager {
def apply(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
to: PrintWriter,
@ -137,7 +137,7 @@ object LogManager {
)
def backgroundLog(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
context: LoggerContext
@ -150,16 +150,16 @@ object LogManager {
// to change from global being the default to overriding, switch the order of state.get and data.get
def getOr[T](
key: AttributeKey[T],
data: Settings[Scope],
data: Def.Settings,
scope: Scope,
state: State,
default: T
): T =
data.get(scope, key) orElse state.get(key) getOrElse default
data.get(ScopedKey(scope, key)).orElse(state.get(key)).getOrElse(default)
@deprecated("Use defaultLogger that provides a LoggerContext", "1.4.0")
def defaultLogger(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
console: Appender,
@ -170,7 +170,7 @@ object LogManager {
defaultLogger(data, state, task, console, backed, relay, extra, LoggerContext.globalContext)
// This is the main function that is used to generate the logger for tasks.
def defaultLogger(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
console: Appender,
@ -242,7 +242,7 @@ object LogManager {
}
def backgroundLog(
data: Settings[Scope],
data: Def.Settings,
state: State,
task: ScopedKey[?],
console: Appender,
@ -271,7 +271,7 @@ object LogManager {
// TODO: Fix this
// if global logging levels are not explicitly set, set them from project settings
// private[sbt] def setGlobalLogLevels(s: State, data: Settings[Scope]): State =
// private[sbt] def setGlobalLogLevels(s: State, data: Def.Settings): State =
// if (hasExplicitGlobalLogLevels(s))
// s
// else {

View File

@ -24,7 +24,8 @@ private[sbt] case class ProjectQuery(
val scalaMatches =
params.get(Keys.scalaBinaryVersion.key) match
case Some(expected) =>
val actualSbv = structure.data.get(Scope.ThisScope.rescope(p), scalaBinaryVersion.key)
val actualSbv =
structure.data.get(Def.ScopedKey(Scope.ThisScope.rescope(p), scalaBinaryVersion.key))
actualSbv match
case Some(sbv) => sbv == expected
case None => true

View File

@ -9,7 +9,7 @@
package sbt
package internal
import sbt.internal.util.{ AttributeKey, complete, Relation, Settings, Util }
import sbt.internal.util.{ AttributeKey, complete, Relation, Util }
import sbt.util.Show
import sbt.librarymanagement.Configuration
@ -138,7 +138,7 @@ private[sbt] object SettingCompletions {
* The last part of the completion will generate a template for the value or function literal that will initialize the setting or task.
*/
def settingParser(
settings: Settings[Scope],
settings: Def.Settings,
rawKeyMap: Map[String, AttributeKey[?]],
context: ResolvedProject,
): Parser[String] = {
@ -156,7 +156,7 @@ private[sbt] object SettingCompletions {
/** Parser for a Scope+AttributeKey (ScopedKey). */
def scopedKeyParser(
keyMap: Map[String, AttributeKey[?]],
settings: Settings[Scope],
settings: Def.Settings,
context: ResolvedProject
): Parser[ScopedKey[?]] = {
val cutoff = KeyRanks.MainCutoff
@ -195,15 +195,11 @@ private[sbt] object SettingCompletions {
*/
def scopeParser(
key: AttributeKey[?],
settings: Settings[Scope],
settings: Def.Settings,
context: ResolvedProject
): Parser[Scope] = {
val data = settings.data
val allScopes = data.keys.toSeq
val definedScopes = data.toSeq flatMap { case (scope, attrs) =>
if attrs.contains(key) then scope :: Nil else Nil
}
scope(allScopes, definedScopes, context)
val definedScopes = settings.keys.collect { case sk if sk.key == key => sk.scope }
scope(settings.scopes.toSeq, definedScopes.toSeq, context)
}
private def scope(

View File

@ -25,11 +25,8 @@ object SettingGraph {
compiled(structure.settings, false)(using structure.delegates, structure.scopeLocal, display)
)
def loop(scoped: ScopedKey[?], generation: Int): SettingGraph = {
val key = scoped.key
val scope = scoped.scope
val definedIn = structure.data.definingScope(scope, key) map { sc =>
display.show(ScopedKey(sc, key))
}
val data = Project.scopedKeyData(structure, scoped)
val definedIn = data.map(d => display.show(d.definingKey))
val depends = cMap.get(scoped) match {
case Some(c) => c.dependencies.toSet; case None => Set.empty
}
@ -39,8 +36,8 @@ object SettingGraph {
SettingGraph(
display.show(scoped),
definedIn,
Project.scopedKeyData(structure, scope, key),
key.description,
data,
scoped.key.description,
basedir,
depends map { (x: ScopedKey[?]) =>
loop(x, generation + 1)

View File

@ -15,9 +15,9 @@ import Keys.taskDefinitionKey
private[sbt] object TaskName {
def name(node: Task[?]): String = definedName(node).getOrElse(anonymousName(node))
def definedName(node: Task[?]): Option[String] =
node.info.name.orElse(transformNode(node).map(displayFull))
node.name.orElse(transformNode(node).map(displayFull))
def anonymousName(node: TaskId[?]): String =
"<anon-" + System.identityHashCode(node).toHexString + ">"
def transformNode(node: Task[?]): Option[ScopedKey[?]] =
node.info.attributes.get(taskDefinitionKey)
node.get(taskDefinitionKey)
}

View File

@ -16,7 +16,6 @@ import sbt.ProjectExtra.*
import sbt.SlashSyntax0.given
import sbt.internal.io.Source
import sbt.internal.nio.Globs
import sbt.internal.util.AttributeMap
import sbt.internal.util.complete.Parser
import sbt.nio.FileStamper
import sbt.nio.Keys._
@ -54,7 +53,7 @@ private[sbt] object WatchTransitiveDependencies {
val state: State
) {
def structure: BuildStructure = extracted.structure
def data: Map[Scope, AttributeMap] = extracted.structure.data.data
def data: Settings = extracted.structure.data
}
private def argumentsImpl(
@ -113,18 +112,18 @@ private[sbt] object WatchTransitiveDependencies {
val keys = collectKeys(args, allKeys, Set.empty, Set.empty)
def getDynamicInputs(scopedKey: ScopedKey[Seq[Glob]], trigger: Boolean): Seq[DynamicInput] = {
data
.get(scopedKey.scope)
.map { am =>
am.get(scopedKey.key) match {
case Some(globs: Seq[Glob]) =>
if (!trigger) {
val stamper = am.get(inputFileStamper.key).getOrElse(FileStamper.Hash)
val forceTrigger = am.get(watchForceTriggerOnAnyChange.key).getOrElse(false)
globs.map(g => DynamicInput(g, stamper, forceTrigger))
} else {
globs.map(g => DynamicInput(g, FileStamper.LastModified, forceTrigger = true))
}
case None => Nil: Seq[DynamicInput]
.getDirect(scopedKey)
.map { globs =>
if (!trigger) {
val stamper =
data.getDirect(scopedKey.copy(key = inputFileStamper.key)).getOrElse(FileStamper.Hash)
val forceTrigger =
data
.getDirect(scopedKey.copy(key = watchForceTriggerOnAnyChange.key))
.getOrElse(false)
globs.map(g => DynamicInput(g, stamper, forceTrigger))
} else {
globs.map(g => DynamicInput(g, FileStamper.LastModified, forceTrigger = true))
}
}
.getOrElse(Nil)
@ -148,21 +147,15 @@ private[sbt] object WatchTransitiveDependencies {
.toIndexedSeq
val projects = projectScopes.flatMap(_.project.toOption).distinct.toSet
val scopes: Seq[Either[Scope, Seq[Glob]]] =
data.flatMap { case (s, am) =>
if (s == Scope.Global || s.project.toOption.exists(projects.contains))
am.get(Keys.watchSources.key) match {
case Some(k) =>
k.work match {
// Avoid extracted.runTask if possible.
case Action.Pure(w, _) => Some(Right(w().map(_.toGlob)))
case _ => Some(Left(s))
}
case _ => None
data.scopes.toSeq
.filter(s => s == Scope.Global || s.project.toOption.exists(projects.contains))
.flatMap { s =>
data.getDirect(ScopedKey(s, Keys.watchSources.key)).map { task =>
task.work match
case a: Action.Pure[Seq[Watched.WatchSource]] => Right(a.f().map(_.toGlob))
case _ => Left(s)
}
else {
None
}
}.toSeq
def toDynamicInput(glob: Glob): DynamicInput =
DynamicInput(glob, FileStamper.LastModified, forceTrigger = true)
scopes.flatMap {

View File

@ -19,7 +19,7 @@ import sjsonnew._
import sjsonnew.support.scalajson.unsafe._
object SettingQuery {
import sbt.internal.util.{ AttributeKey, Settings }
import sbt.internal.util.AttributeKey
import sbt.internal.util.complete.{ DefaultParsers, Parser }, DefaultParsers._
import sbt.Def.{ showBuildRelativeKey2, ScopedKey }
@ -70,7 +70,7 @@ object SettingQuery {
currentBuild: URI,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[?]],
data: Settings[Scope]
data: Def.Settings
): Parser[ParsedKey] =
scopedKeyFull(index, currentBuild, defaultConfigs, keyMap) flatMap { choices =>
Act.select(choices, data)(showBuildRelativeKey2(currentBuild))
@ -81,7 +81,7 @@ object SettingQuery {
currentBuild: URI,
defaultConfigs: Option[ResolvedReference] => Seq[String],
keyMap: Map[String, AttributeKey[?]],
data: Settings[Scope]
data: Def.Settings
): Parser[ScopedKey[?]] =
scopedKeySelected(index, currentBuild, defaultConfigs, keyMap, data).map(_.key)
@ -96,7 +96,7 @@ object SettingQuery {
def getSettingValue[A](structure: BuildStructure, key: Def.ScopedKey[A]): Either[String, A] =
structure.data
.get(key.scope, key.key)
.get(key)
.toRight(s"Key ${Def.displayFull(key)} not found")
.flatMap {
case _: Task[_] => Left(s"Key ${Def.displayFull(key)} is a task, can only query settings")

View File

@ -145,7 +145,7 @@ private[sbt] object Settings {
* @return the setting with the task definition
*/
private def addTaskDefinition[T](setting: Def.Setting[Task[T]]): Def.Setting[Task[T]] =
setting.mapInit((sk, task) => Task(task.info.set(taskDefinitionKey, sk), task.work))
setting.mapInit((sk, task) => task.set(taskDefinitionKey, sk))
/**
* Returns all of the paths described by a glob along with their basic file attributes.

View File

@ -17,7 +17,6 @@ import sbt.internal.util.{
ConsoleOut,
GlobalLogging,
MainAppender,
Settings,
Terminal,
}
import sbt.internal.inc.PlainVirtualFileConverter
@ -97,7 +96,7 @@ object FakeState {
val delegates: (Scope) => Seq[Scope] = _ => Nil
val scopeLocal: Def.ScopeLocal = _ => Nil
val (cMap, data: Settings[Scope]) =
val (cMap, data: Def.Settings) =
Def.makeWithCompiledMap(settings)(using delegates, scopeLocal, Def.showFullKey)
val extra: KeyIndex => BuildUtil[?] = (keyIndex) =>
BuildUtil(base.toURI, Map.empty, keyIndex, data)

View File

@ -10,7 +10,7 @@ package sbt
package internal
import Def.{ ScopedKey, Setting }
import sbt.internal.util.{ AttributeKey, AttributeMap, Relation, Settings }
import sbt.internal.util.{ AttributeKey, Relation }
import sbt.internal.util.Types.{ const, some }
import sbt.internal.util.complete.Parser
import sbt.librarymanagement.Configuration
@ -59,17 +59,18 @@ abstract class TestBuild {
sealed case class Structure(
env: Env,
current: ProjectRef,
data: Settings[Scope],
data: Def.Settings,
keyIndex: KeyIndex,
keyMap: Map[String, AttributeKey[?]]
) {
override def toString =
env.toString + "\n" + "current: " + current + "\nSettings:\n\t" + showData + keyMap.keys
.mkString("All keys:\n\t", ", ", "")
def showKeys(map: AttributeMap): String = map.keys.mkString("\n\t ", ",", "\n")
def showKeys(keys: Iterable[AttributeKey[?]]): String = keys.mkString("\n\t ", ",", "\n")
def showData: String = {
val scopeStrings =
for ((scope, map) <- data.data) yield (Scope.display(scope, "<key>"), showKeys(map))
for (scope, keys) <- data.keys.groupMap(_.scope)(_.key)
yield (Scope.display(scope, "<key>"), showKeys(keys))
scopeStrings.toSeq.sorted.map(t => t._1 + t._2).mkString("\n\t")
}
val extra: BuildUtil[Proj] = {
@ -86,11 +87,10 @@ abstract class TestBuild {
}
lazy val allAttributeKeys: Set[AttributeKey[?]] = {
val x = data.data.values.flatMap(_.keys).toSet
if (x.isEmpty) {
if (data.attributeKeys.isEmpty) {
sys.error("allAttributeKeys is empty")
}
x
data.attributeKeys
}
lazy val (taskAxes, zeroTaskAxis, onlyTaskAxis, multiTaskAxis) = {
import collection.mutable
@ -98,11 +98,10 @@ abstract class TestBuild {
// task axis of Scope is set to Zero and the value of the second map is the original task axis
val taskAxesMappings =
for ((scope, keys) <- data.data; key <- keys.keys)
yield (ScopedKey(scope.copy(task = Zero), key), scope.task): (
ScopedKey[?],
ScopeAxis[AttributeKey[?]]
)
for
(scope, keys) <- data.keys.groupMap(_.scope)(_.key)
key <- keys
yield (ScopedKey(scope.copy(task = Zero), key), scope.task)
val taskAxes = Relation.empty ++ taskAxesMappings
val zero = new HashSet[ScopedKey[?]]
@ -240,15 +239,14 @@ abstract class TestBuild {
}
}
val data = Def.makeWithCompiledMap(settings)(using env.delegates, const(Nil), display)._2
val keys = data.allKeys((s, key) => ScopedKey(s, key))
val keyMap = keys.map(k => (k.key.label, k.key)).toMap[String, AttributeKey[?]]
val keyMap = data.keys.map(k => (k.key.label, k.key)).toMap[String, AttributeKey[?]]
val projectsMap = env.builds.map(b => (b.uri, b.projects.map(_.id).toSet)).toMap
val confs = for {
b <- env.builds
p <- b.projects
} yield p.id -> p.configurations
val confMap = confs.toMap
Structure(env, current, data, KeyIndex(keys, projectsMap, confMap), keyMap)
Structure(env, current, data, KeyIndex(data.keys, projectsMap, confMap), keyMap)
}
lazy val mkEnv: Gen[Env] = {

View File

@ -9,6 +9,7 @@
package sbt
trait Import {
type Settings = Def.Settings
type Setting[T] = Def.Setting[T]
type ScopedKey[T] = Def.ScopedKey[T]
type SettingsDefinition = Def.SettingsDefinition
@ -146,7 +147,7 @@ trait Import {
// type Dag[A <: Dag[A]] = sbt.internal.util.Dag[A]
type DelegatingPMap[K[_], V[_]] = sbt.internal.util.DelegatingPMap[K, V]
val ErrorHandling = sbt.internal.util.ErrorHandling
type EvaluateSettings[S] = sbt.internal.util.EvaluateSettings[S]
// type EvaluateSettings[I <: Init] = sbt.internal.util.EvaluateSettings[I]
val EvaluationState = sbt.internal.util.EvaluationState
val ExitHook = sbt.internal.util.ExitHook
type ExitHook = sbt.internal.util.ExitHook
@ -168,7 +169,7 @@ trait Import {
type IDSet[T] = sbt.internal.util.IDSet[T]
val IMap = sbt.internal.util.IMap
type IMap[K[_], V[_]] = sbt.internal.util.IMap[K, V]
type Init[S] = sbt.internal.util.Init[S]
type Init = sbt.internal.util.Init
type JLine = sbt.internal.util.JLine
// val KCons = sbt.internal.util.KCons
// type KCons[H, +T <: KList[M], +M[_]] = sbt.internal.util.KCons[H, T, M]
@ -193,7 +194,6 @@ trait Import {
val Relation = sbt.internal.util.Relation
type Relation[A, B] = sbt.internal.util.Relation[A, B]
val ScalaKeywords = sbt.internal.util.ScalaKeywords
type Settings[S] = sbt.internal.util.Settings[S]
type SharedAttributeKey[T] = sbt.internal.util.SharedAttributeKey[T]
val Signals = sbt.internal.util.Signals
val SimpleReader = sbt.internal.util.SimpleReader

View File

@ -14,32 +14,51 @@ import ConcurrentRestrictions.{ Tag, TagMap, tagsKey }
import sbt.util.Monad
/**
* Combines metadata `info` and a computation `work` to define a task.
* Combines metadata `attributes` and a computation `work` to define a task.
*/
final case class Task[A](info: Info[A], work: Action[A]) extends TaskId[A]:
override def toString = info.name getOrElse ("Task(" + info + ")")
override def hashCode = info.hashCode
final class Task[A](
val attributes: AttributeMap,
val post: A => AttributeMap,
val work: Action[A]
) extends TaskId[A]:
override def toString = name.getOrElse(s"Task($attributes)")
def name: Option[String] = get(Task.Name)
def description: Option[String] = get(Task.Description)
def get[B](key: AttributeKey[B]): Option[B] = attributes.get(key)
def getOrElse[B](key: AttributeKey[B], default: => B): B = attributes.getOrElse(key, default)
def setName(name: String): Task[A] = set(Task.Name, name)
def setDescription(description: String): Task[A] = set(Task.Description, description)
def set[B](key: AttributeKey[B], value: B) =
new Task(attributes.put(key, value), post, work)
def postTransform(f: (A, AttributeMap) => AttributeMap): Task[A] =
new Task(attributes, a => f(a, post(a)), work)
def tag(tags: Tag*): Task[A] = tagw(tags.map(t => (t, 1))*)
def tagw(tags: (Tag, Int)*): Task[A] = {
val tgs: TagMap = info.get(tagsKey).getOrElse(TagMap.empty)
def tagw(tags: (Tag, Int)*): Task[A] =
val tgs: TagMap = get(tagsKey).getOrElse(TagMap.empty)
val value = tags.foldLeft(tgs)((acc, tag) => acc + tag)
val nextInfo = info.set(tagsKey, value)
withInfo(info = nextInfo)
}
def tags: TagMap = info.get(tagsKey).getOrElse(TagMap.empty)
def name: Option[String] = info.name
def attributes: AttributeMap = info.attributes
private[sbt] def withInfo(info: Info[A]): Task[A] =
Task(info = info, work = this.work)
set(tagsKey, value)
def tags: TagMap = get(tagsKey).getOrElse(TagMap.empty)
end Task
object Task:
import sbt.std.TaskExtra.*
def apply[A](work: Action[A]): Task[A] =
new Task[A](AttributeMap.empty, defaultAttributeMap, work)
def apply[A](attributes: AttributeMap, work: Action[A]): Task[A] =
new Task[A](attributes, defaultAttributeMap, work)
def unapply[A](task: Task[A]): Option[Action[A]] = Some(task.work)
val Name = AttributeKey[String]("name")
val Description = AttributeKey[String]("description")
val defaultAttributeMap = const(AttributeMap.empty)
given taskMonad: Monad[Task] with
type F[a] = Task[a]
override def pure[A1](a: () => A1): Task[A1] = toTask(a)
@ -54,34 +73,3 @@ object Task:
override def flatMap[A1, A2](in: F[A1])(f: A1 => F[A2]): F[A2] = in.flatMap(f)
override def flatten[A1](in: Task[Task[A1]]): Task[A1] = in.flatMap(identity)
end Task
/**
* Used to provide information about a task, such as the name, description, and tags for controlling
* concurrent execution.
* @param attributes
* Arbitrary user-defined key/value pairs describing this task
* @param post
* a transformation that takes the result of evaluating this task and produces user-defined
* key/value pairs.
*/
final case class Info[T](
attributes: AttributeMap = AttributeMap.empty,
post: T => AttributeMap = Info.defaultAttributeMap
) {
import Info._
def name = attributes.get(Name)
def description = attributes.get(Description)
def setName(n: String) = set(Name, n)
def setDescription(d: String) = set(Description, d)
def set[A](key: AttributeKey[A], value: A) = copy(attributes = this.attributes.put(key, value))
def get[A](key: AttributeKey[A]): Option[A] = attributes.get(key)
def postTransform(f: (T, AttributeMap) => AttributeMap) = copy(post = (t: T) => f(t, post(t)))
override def toString = if (attributes.isEmpty) "_" else attributes.toString
}
object Info:
val Name = AttributeKey[String]("name")
val Description = AttributeKey[String]("description")
val defaultAttributeMap = const(AttributeMap.empty)
end Info

View File

@ -103,7 +103,7 @@ trait TaskExtra0 {
joinTasks0[Any](existToAny(in))
private[sbt] def joinTasks0[S](in: Seq[Task[S]]): JoinTask[S, Seq] = new JoinTask[S, Seq] {
def join: Task[Seq[S]] =
Task[Seq[S]](Info(), Action.Join(in, (s: Seq[Result[S]]) => Right(TaskExtra.all(s))))
Task[Seq[S]](Action.Join(in, (s: Seq[Result[S]]) => Right(TaskExtra.all(s))))
def reduced(f: (S, S) => S): Task[S] = TaskExtra.reduced(in.toIndexedSeq, f)
}
private[sbt] def existToAny(in: Seq[Task[?]]): Seq[Task[Any]] = in.asInstanceOf[Seq[Task[Any]]]
@ -114,8 +114,8 @@ trait TaskExtra extends TaskExtra0 {
final def constant[T](t: T): Task[T] = task(t)
final def task[T](f: => T): Task[T] = toTask(() => f)
final implicit def toTask[T](f: () => T): Task[T] = Task(Info(), Action.Pure(f, false))
final def inlineTask[T](value: T): Task[T] = Task(Info(), Action.Pure(() => value, true))
final implicit def toTask[T](f: () => T): Task[T] = Task(Action.Pure(f, false))
final def inlineTask[T](value: T): Task[T] = Task(Action.Pure(() => value, true))
final implicit def upcastTask[A >: B, B](t: Task[B]): Task[A] = t mapN { x =>
x: A
@ -131,7 +131,7 @@ trait TaskExtra extends TaskExtra0 {
final implicit def joinTasks[S](in: Seq[Task[S]]): JoinTask[S, Seq] = new JoinTask[S, Seq] {
def join: Task[Seq[S]] =
Task[Seq[S]](Info(), Action.Join(in, (s: Seq[Result[S]]) => Right(TaskExtra.all(s))))
Task[Seq[S]](Action.Join(in, (s: Seq[Result[S]]) => Right(TaskExtra.all(s))))
def reduced(f: (S, S) => S): Task[S] = TaskExtra.reduced(in.toIndexedSeq, f)
}
@ -144,18 +144,18 @@ trait TaskExtra extends TaskExtra0 {
final implicit def multInputTask[Tup <: Tuple](tasks: Tuple.Map[Tup, Task]): MultiInTask[Tup] =
new MultiInTask[Tup]:
override def flatMapN[A](f: Tup => Task[A]): Task[A] =
Task(Info(), Action.FlatMapped(tasks, f.compose(allM)))
Task(Action.FlatMapped(tasks, f.compose(allM)))
override def flatMapR[A](f: Tuple.Map[Tup, Result] => Task[A]): Task[A] =
Task(Info(), Action.FlatMapped(tasks, f))
Task(Action.FlatMapped(tasks, f))
override def mapN[A](f: Tup => A): Task[A] =
Task(Info(), Action.Mapped(tasks, f.compose(allM)))
Task(Action.Mapped(tasks, f.compose(allM)))
override def mapR[A](f: Tuple.Map[Tup, Result] => A): Task[A] =
Task(Info(), Action.Mapped(tasks, f))
Task(Action.Mapped(tasks, f))
override def flatFailure[A](f: Seq[Incomplete] => Task[A]): Task[A] =
Task(Info(), Action.FlatMapped(tasks, f.compose(anyFailM)))
Task(Action.FlatMapped(tasks, f.compose(anyFailM)))
override def mapFailure[A](f: Seq[Incomplete] => A): Task[A] =
Task(Info(), Action.Mapped(tasks, f.compose(anyFailM)))
Task(Action.Mapped(tasks, f.compose(anyFailM)))
final implicit def singleInputTask[S](in: Task[S]): SingleInTask[S] =
new SingleInTask[S]:
@ -164,21 +164,22 @@ trait TaskExtra extends TaskExtra0 {
def failure: Task[Incomplete] = mapFailure(identity)
def result: Task[Result[S]] = mapR(identity)
private def newInfo[A]: Info[A] = TaskExtra.newInfo(in.info)
private def newAttributes: AttributeMap = TaskExtra.newAttributes(in.attributes)
override def flatMapR[A](f: Result[S] => Task[A]): Task[A] =
Task(
newInfo,
newAttributes,
Action.FlatMapped[A, Tuple1[S]](Tuple1(in), { case Tuple1(a) => f(a) })
)
override def mapR[A](f: Result[S] => A): Task[A] =
Task(
newInfo,
newAttributes,
Action.Mapped[A, Tuple1[S]](Tuple1(in), { case Tuple1(a) => f(a) })
)
override def dependsOn(tasks: Task[?]*): Task[S] = Task(newInfo, Action.DependsOn(in, tasks))
override def dependsOn(tasks: Task[?]*): Task[S] =
Task(newAttributes, Action.DependsOn(in, tasks))
override def flatMapN[T](f: S => Task[T]): Task[T] = flatMapR(f.compose(successM))
@ -206,8 +207,8 @@ trait TaskExtra extends TaskExtra0 {
def &&[T](alt: Task[T]): Task[T] = flatMapN(_ => alt)
final implicit def toTaskInfo[S](in: Task[S]): TaskInfo[S] = new TaskInfo[S] {
def describedAs(s: String): Task[S] = in.copy(info = in.info.setDescription(s))
def named(s: String): Task[S] = in.copy(info = in.info.setName(s))
def describedAs(s: String): Task[S] = in.setDescription(s)
def named(s: String): Task[S] = in.setName(s)
}
final implicit def pipeToProcess[Key](
@ -327,10 +328,10 @@ object TaskExtra extends TaskExtra {
def incompleteDeps(incs: Seq[Incomplete]): Incomplete = Incomplete(None, causes = incs)
def select[A, B](fab: Task[Either[A, B]], f: Task[A => B]): Task[B] =
Task(newInfo(fab.info), Action.Selected[A, B](fab, f))
Task(newAttributes(fab.attributes), Action.Selected[A, B](fab, f))
// The "taskDefinitionKey" is used, at least, by the ".previous" functionality.
// But apparently it *cannot* survive a task map/flatMap/etc. See actions/depends-on.
private[sbt] def newInfo[A](info: Info[?]): Info[A] =
Info[A](AttributeMap(info.attributes.entries.filter(_.key.label != "taskDefinitionKey")))
private[sbt] def newAttributes[A](attributes: AttributeMap): AttributeMap =
AttributeMap(attributes.entries.filter(_.key.label != "taskDefinitionKey"))
}

View File

@ -17,7 +17,7 @@ import sbt.internal.util.Types.*
object Transform:
def fromDummy[A](original: Task[A])(action: => A): Task[A] =
Task(original.info, Action.Pure(() => action, false))
new Task(original.attributes, original.post, work = Action.Pure(() => action, false))
def fromDummyStrict[T](original: Task[T], value: T): Task[T] = fromDummy(original)(value)
@ -57,8 +57,8 @@ object Transform:
case Join(in, f) => uniform(in)(f)
def inline1[T](t: TaskId[T]): Option[() => T] = t match
case Task(_, Action.Pure(eval, true)) => Some(eval)
case _ => None
case Task(Action.Pure(eval, true)) => Some(eval)
case _ => None
def uniform[A1, D](tasks: Seq[Task[D]])(
f: Seq[Result[D]] => Either[Task[A1], A1]

View File

@ -158,123 +158,95 @@ object AttributeKey {
* keys with the same label but different types. Excluding this possibility is the responsibility of
* the client if desired.
*/
trait AttributeMap {
/**
* Gets the value of type `T` associated with the key `k`. If a key with the same label but
* different type is defined, this method will fail.
*/
def apply[T](k: AttributeKey[T]): T
/**
* Gets the value of type `T` associated with the key `k` or `None` if no value is associated. If
* a key with the same label but a different type is defined, this method will return `None`.
*/
def get[T](k: AttributeKey[T]): Option[T]
/**
* Returns this map without the mapping for `k`. This method will not remove a mapping for a key
* with the same label but a different type.
*/
def remove[T](k: AttributeKey[T]): AttributeMap
/**
* Returns true if this map contains a mapping for `k`. If a key with the same label but a
* different type is defined in this map, this method will return `false`.
*/
def contains[T](k: AttributeKey[T]): Boolean
/**
* Adds the mapping `k -> value` to this map, replacing any existing mapping for `k`. Any mappings
* for keys with the same label but different types are unaffected.
*/
def put[T](k: AttributeKey[T], value: T): AttributeMap
/**
* All keys with defined mappings. There may be multiple keys with the same `label`, but different
* types.
*/
def keys: Iterable[AttributeKey[?]]
/**
* Adds the mappings in `o` to this map, with mappings in `o` taking precedence over existing
* mappings.
*/
def ++(o: Iterable[AttributeEntry[?]]): AttributeMap
/**
* Combines the mappings in `o` with the mappings in this map, with mappings in `o` taking
* precedence over existing mappings.
*/
def ++(o: AttributeMap): AttributeMap
/**
* All mappings in this map. The [[AttributeEntry]] type preserves the typesafety of mappings,
* although the specific types are unknown.
*/
def entries: Iterable[AttributeEntry[?]]
/** `true` if there are no mappings in this map, `false` if there are. */
def isEmpty: Boolean
/**
* Adds the mapping `k -> opt.get` if opt is Some. Otherwise, it returns this map without the
* mapping for `k`.
*/
private[sbt] def setCond[T](k: AttributeKey[T], opt: Option[T]): AttributeMap
}
object AttributeMap {
opaque type AttributeMap = Map[AttributeKey[?], Any]
object AttributeMap:
/** An [[AttributeMap]] without any mappings. */
val empty: AttributeMap = new BasicAttributeMap(Map.empty)
val empty: AttributeMap = Map.empty
/** Constructs an [[AttributeMap]] containing the given `entries`. */
def apply(entries: Iterable[AttributeEntry[?]]): AttributeMap = empty ++ entries
def apply(entries: Iterable[AttributeEntry[?]]): AttributeMap = ++(empty)(entries)
/** Constructs an [[AttributeMap]] containing the given `entries`. */
def apply(entries: AttributeEntry[?]*): AttributeMap = empty ++ entries
def apply(entries: AttributeEntry[?]*): AttributeMap = ++(empty)(entries)
/** Presents an `AttributeMap` as a natural transformation. */
// implicit def toNatTrans(map: AttributeMap): AttributeKey ~> Id = λ[AttributeKey ~> Id](map(_))
}
extension (self: AttributeMap)
/**
* Gets the value of type `T` associated with the key `k`. If a key with the same label but
* different type is defined, this method will fail.
*/
def apply[T](k: AttributeKey[T]): T = self(k).asInstanceOf[T]
private class BasicAttributeMap(private val backing: Map[AttributeKey[?], Any])
extends AttributeMap {
/**
* Gets the value of type `T` associated with the key `k` or `None` if no value is associated. If
* a key with the same label but a different type is defined, this method will return `None`.
*/
def get[T](k: AttributeKey[T]): Option[T] = self.get(k).asInstanceOf[Option[T]]
def isEmpty: Boolean = backing.isEmpty
def apply[T](k: AttributeKey[T]) = backing(k).asInstanceOf[T]
def get[T](k: AttributeKey[T]) = backing.get(k).asInstanceOf[Option[T]]
def remove[T](k: AttributeKey[T]): AttributeMap = new BasicAttributeMap(backing - k)
def contains[T](k: AttributeKey[T]) = backing.contains(k)
/**
* Gets the value of type `T` associated with the key `k` or `default` if no value is associated. If
* a key with the same label but a different type is defined, this method will return `default`.
*/
def getOrElse[T](k: AttributeKey[T], default: => T): T =
self.getOrElse(k, default).asInstanceOf[T]
def put[T](k: AttributeKey[T], value: T): AttributeMap =
new BasicAttributeMap(backing.updated(k, value: Any))
/**
* Returns this map without the mapping for `k`. This method will not remove a mapping for a key
* with the same label but a different type.
*/
def remove[T](k: AttributeKey[T]): AttributeMap = self.removed(k)
def keys: Iterable[AttributeKey[?]] = backing.keys
/**
* Returns true if this map contains a mapping for `k`. If a key with the same label but a
* different type is defined in this map, this method will return `false`.
*/
def contains[T](k: AttributeKey[T]): Boolean = self.contains(k)
def ++(o: Iterable[AttributeEntry[?]]): AttributeMap =
new BasicAttributeMap(o.foldLeft(backing)((b, e) => b.updated(e.key, e.value: Any)))
/**
* Adds the mapping `k -> value` to this map, replacing any existing mapping for `k`. Any mappings
* for keys with the same label but different types are unaffected.
*/
def put[T](k: AttributeKey[T], value: T): AttributeMap = self.updated(k, value)
def ++(o: AttributeMap): AttributeMap = o match {
case bam: BasicAttributeMap =>
new BasicAttributeMap(Map(backing.toSeq ++ bam.backing.toSeq*))
case _ => o ++ this
}
/**
* All keys with defined mappings. There may be multiple keys with the same `label`, but different
* types.
*/
def keys: Iterable[AttributeKey[?]] = self.keys
def entries: Iterable[AttributeEntry[?]] =
backing.collect { case (k: AttributeKey[kt], v) =>
AttributeEntry(k, v.asInstanceOf[kt])
}
/**
* Adds the mappings in `o` to this map, with mappings in `o` taking precedence over existing
* mappings.
*/
def ++(o: Iterable[AttributeEntry[?]]): AttributeMap =
o.foldLeft(self)((b, e) => b.updated(e.key, e.value))
private[sbt] def setCond[T](k: AttributeKey[T], opt: Option[T]): AttributeMap =
opt match {
case Some(v) => put(k, v)
case None => remove(k)
}
/**
* Combines the mappings in `o` with the mappings in this map, with mappings in `o` taking
* precedence over existing mappings.
*/
def ++(o: AttributeMap): AttributeMap = self ++ o
override def toString = entries.mkString("(", ", ", ")")
}
/**
* All mappings in this map. The [[AttributeEntry]] type preserves the typesafety of mappings,
* although the specific types are unknown.
*/
def entries: Iterable[AttributeEntry[?]] =
self.collect { case (k: AttributeKey[x], v) => AttributeEntry(k, v.asInstanceOf[x]) }
/** `true` if there are no mappings in this map, `false` if there are. */
def isEmpty: Boolean = self.isEmpty
/**
* Adds the mapping `k -> opt.get` if opt is Some. Otherwise, it returns this map without the
* mapping for `k`.
*/
private[sbt] def setCond[T](k: AttributeKey[T], opt: Option[T]): AttributeMap =
opt match
case Some(v) => self.updated(k, v)
case None => self.removed(k)
end extension
end AttributeMap
/**
* An immutable map where both key and value are String.

View File

@ -20,13 +20,12 @@ enum EvaluationState:
case Calling
case Evaluated
abstract class EvaluateSettings[ScopeType]:
protected val init: Init[ScopeType]
class EvaluateSettings[I <: Init](
val init: I,
executor: Executor,
compiledSettings: Seq[init.Compiled[?]],
):
import init._
protected def executor: Executor
protected def compiledSettings: Seq[Compiled[?]]
import EvaluationState.*
private val complete = new LinkedBlockingQueue[Option[Throwable]]
@ -68,7 +67,7 @@ abstract class EvaluateSettings[ScopeType]:
private val running = new AtomicInteger
private val cancel = new AtomicBoolean(false)
def run(implicit delegates: ScopeType => Seq[ScopeType]): Settings[ScopeType] = {
def run(implicit delegates: ScopeType => Seq[ScopeType]): Settings = {
assert(running.get() == 0, "Already running")
startWork()
roots.foreach(_.registerIfNew())
@ -83,7 +82,7 @@ abstract class EvaluateSettings[ScopeType]:
private def getResults(implicit delegates: ScopeType => Seq[ScopeType]) =
static.toTypedSeq.foldLeft(empty) { case (ss, static.TPair(key, node)) =>
if key.key.isLocal then ss
else ss.set(key.scope, key.key, node.get)
else ss.set(key, node.get)
}
private lazy val getValue: [A] => INode[A] => A = [A] => (fa: INode[A]) => fa.get

View File

@ -12,53 +12,10 @@ import sbt.util.Show
import Util.{ nil, nilSeq }
import scala.jdk.CollectionConverters.*
sealed trait Settings[ScopeType]:
def data: Map[ScopeType, AttributeMap]
def keys(scope: ScopeType): Set[AttributeKey[?]]
def scopes: Set[ScopeType]
def definingScope(scope: ScopeType, key: AttributeKey[?]): Option[ScopeType]
def allKeys[A](f: (ScopeType, AttributeKey[?]) => A): Seq[A]
def get[A](scope: ScopeType, key: AttributeKey[A]): Option[A]
def getDirect[A](scope: ScopeType, key: AttributeKey[A]): Option[A]
def set[A](scope: ScopeType, key: AttributeKey[A], value: A): Settings[ScopeType]
end Settings
private final class Settings0[ScopeType](
val data: Map[ScopeType, AttributeMap],
val delegates: ScopeType => Seq[ScopeType]
) extends Settings[ScopeType]:
def scopes: Set[ScopeType] = data.keySet
def keys(scope: ScopeType) = data(scope).keys.toSet
def allKeys[A](f: (ScopeType, AttributeKey[?]) => A): Seq[A] =
data.flatMap { case (scope, map) =>
map.keys.map(k => f(scope, k))
}.toSeq
def get[A](scope: ScopeType, key: AttributeKey[A]): Option[A] =
delegates(scope).flatMap { sc =>
getDirect(sc, key)
}.headOption
def definingScope(scope: ScopeType, key: AttributeKey[?]): Option[ScopeType] =
delegates(scope).find { sc =>
getDirect(sc, key).isDefined
}
def getDirect[A](scope: ScopeType, key: AttributeKey[A]): Option[A] =
data.get(scope).flatMap(_.get(key))
def set[A](scope: ScopeType, key: AttributeKey[A], value: A): Settings[ScopeType] =
val map = data.getOrElse(scope, AttributeMap.empty)
val newData = data.updated(scope, map.put(key, value))
Settings0(newData, delegates)
end Settings0
// delegates should contain the input Scope as the first entry
// delegates should contain the input ScopeType as the first entry
// this trait is intended to be mixed into an object
trait Init[ScopeType]:
trait Init:
type ScopeType
/**
* The Show instance used when a detailed String needs to be generated.
@ -80,6 +37,58 @@ trait Init[ScopeType]:
type ScopeLocal = ScopedKey[?] => Seq[Setting[?]]
type MapConstant = [a] => ScopedKey[a] => Option[a]
sealed trait Settings:
def attributeKeys: Set[AttributeKey[?]]
def keys: Iterable[ScopedKey[?]]
def contains(key: ScopedKey[?]): Boolean
def values: Iterable[Any]
def data: Map[ScopedKey[?], Any]
def scopes: Set[ScopeType]
def getKeyValue[A](key: ScopedKey[A]): Option[(ScopedKey[A], A)]
def get[A](key: ScopedKey[A]): Option[A]
def definingKey[A](key: ScopedKey[A]): Option[ScopedKey[A]]
def getDirect[A](key: ScopedKey[A]): Option[A]
def set[A](key: ScopedKey[A], value: A): Settings
end Settings
private final class Settings0(
val scopes: Set[ScopeType],
val attributeKeys: Set[AttributeKey[?]],
// In 1.x it was a Map[Scope, AttributeMap]
// For the heap, it is better to store the ScopedKey[?] directly to avoid recreating
// abd duplicating them later.
val data: Map[ScopedKey[?], Any],
d: ScopeType => Seq[ScopeType]
) extends Settings:
def keys: Iterable[ScopedKey[?]] = data.keys
def contains(key: ScopedKey[?]): Boolean = data.contains(key)
def values: Iterable[Any] = data.values
def get[A](key: ScopedKey[A]): Option[A] =
delegates(key).flatMap(data.get).nextOption.asInstanceOf[Option[A]]
def definingKey[A](key: ScopedKey[A]): Option[ScopedKey[A]] =
delegates(key).find(data.contains)
def getKeyValue[A](key: ScopedKey[A]): Option[(ScopedKey[A], A)] =
delegates(key).flatMap { k =>
data.get(k) match
case None => None
case Some(v) => Some(k -> v.asInstanceOf[A])
}.nextOption
def getDirect[A](key: ScopedKey[A]): Option[A] = data.get(key).asInstanceOf[Option[A]]
def set[A](key: ScopedKey[A], value: A): Settings =
val newScopes = scopes + key.scope
val newAttributeKeys = attributeKeys + key.key
val newData = data.updated(key, value)
Settings0(newScopes, newAttributeKeys, newData, d)
private def delegates[A](key: ScopedKey[A]): Iterator[ScopedKey[A]] =
d(key.scope).iterator.map(s => key.copy(scope = s))
end Settings0
private[sbt] abstract class ValidateKeyRef {
def apply[T](key: ScopedKey[T], selfRefOk: Boolean): ValidatedRef[T]
}
@ -163,16 +172,16 @@ trait Init[ScopeType]:
private final val nextID = new java.util.concurrent.atomic.AtomicLong
private final def nextDefaultID(): Long = nextID.incrementAndGet()
def empty(implicit delegates: ScopeType => Seq[ScopeType]): Settings[ScopeType] =
Settings0(Map.empty, delegates)
def empty(implicit delegates: ScopeType => Seq[ScopeType]): Settings =
Settings0(Set.empty, Set.empty, Map.empty, delegates)
def asTransform(s: Settings[ScopeType]): [A] => ScopedKey[A] => A =
def asTransform(s: Settings): [A] => ScopedKey[A] => A =
[A] => (sk: ScopedKey[A]) => getValue(s, sk)
def getValue[T](s: Settings[ScopeType], k: ScopedKey[T]) =
s.get(k.scope, k.key) getOrElse (throw new InvalidReference(k))
def getValue[T](s: Settings, k: ScopedKey[T]) =
s.get(k).getOrElse(throw new InvalidReference(k))
def asFunction[A](s: Settings[ScopeType]): ScopedKey[A] => A = k => getValue(s, k)
def asFunction[A](s: Settings): ScopedKey[A] => A = k => getValue(s, k)
def mapScope(f: ScopeType => ScopeType): MapScoped =
[a] => (k: ScopedKey[a]) => k.copy(scope = f(k.scope))
@ -197,7 +206,7 @@ trait Init[ScopeType]:
// inject derived settings into scopes where their dependencies are directly defined
// and prepend per-scope settings
val derived = deriveAndLocal(initDefaults, mkDelegates(delegates))
// group by Scope/Key, dropping dead initializations
// group by ScopeType/Key, dropping dead initializations
val sMap: ScopedMap = grouped(derived)
// delegate references to undefined values according to 'delegates'
val dMap: ScopedMap =
@ -211,13 +220,13 @@ trait Init[ScopeType]:
delegates: ScopeType => Seq[ScopeType],
scopeLocal: ScopeLocal,
display: Show[ScopedKey[?]]
): Settings[ScopeType] = makeWithCompiledMap(init)._2
): Settings = makeWithCompiledMap(init)._2
def makeWithCompiledMap(init: Seq[Setting[?]])(using
delegates: ScopeType => Seq[ScopeType],
scopeLocal: ScopeLocal,
display: Show[ScopedKey[?]]
): (CompiledMap, Settings[ScopeType]) =
): (CompiledMap, Settings) =
val cMap = compiled(init)(using delegates, scopeLocal, display)
// order the initializations. cyclic references are detected here.
val ordered: Seq[Compiled[?]] = sort(cMap)
@ -235,16 +244,14 @@ trait Init[ScopeType]:
def compile(sMap: ScopedMap): CompiledMap =
sMap match
case m: IMap.IMap0[ScopedKey, SettingSeq] @unchecked =>
Par(m.backing.toVector)
import scala.collection.parallel.CollectionConverters.*
m.backing.par
.map { case (k, ss) =>
val deps = ss.flatMap(_.dependencies).toSet
(
k,
Compiled(k.asInstanceOf[ScopedKey[Any]], deps, ss.asInstanceOf[SettingSeq[Any]])
)
val deps = ss.iterator.flatMap(_.dependencies).toSet
k -> Compiled(k.asInstanceOf[ScopedKey[Any]], deps, ss.asInstanceOf[SettingSeq[Any]])
}
.toVector
.toMap
.to(Map)
case _ =>
sMap.toTypedSeq.map { case sMap.TPair(k, ss) =>
val deps = ss.flatMap(_.dependencies)
@ -324,16 +331,12 @@ trait Init[ScopeType]:
private def applyInits(ordered: Seq[Compiled[?]])(implicit
delegates: ScopeType => Seq[ScopeType]
): Settings[ScopeType] =
): Settings =
val x =
java.util.concurrent.Executors.newFixedThreadPool(Runtime.getRuntime.availableProcessors)
try {
val eval: EvaluateSettings[ScopeType] = new EvaluateSettings[ScopeType] {
override val init: Init.this.type = Init.this
def compiledSettings = ordered
def executor = x
}
eval.run
val eval: EvaluateSettings[Init.this.type] = new EvaluateSettings(Init.this, x, ordered)
eval.run(using delegates)
} finally {
x.shutdown()
}
@ -416,15 +419,9 @@ trait Init[ScopeType]:
final class Flattened(val key: ScopedKey[?], val dependencies: Iterable[ScopedKey[?]])
def flattenLocals(compiled: CompiledMap): Map[ScopedKey[?], Flattened] = {
val locals = compiled flatMap { case (key, comp) =>
if (key.key.isLocal) Seq(comp)
else nilSeq[Compiled[?]]
}
val locals = compiled.collect { case (key, comp) if key.key.isLocal => comp }
val ordered = Dag.topologicalSort(locals)(
_.dependencies.flatMap(dep =>
if (dep.key.isLocal) Seq[Compiled[?]](compiled(dep))
else nilSeq[Compiled[?]]
)
_.dependencies.collect { case dep if dep.key.isLocal => compiled(dep) }
)
def flatten(
cmap: Map[ScopedKey[?], Flattened],
@ -433,7 +430,7 @@ trait Init[ScopeType]:
): Flattened =
new Flattened(
key,
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else Seq[ScopedKey[?]](dep))
deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else Seq(dep))
)
val empty = Map.empty[ScopedKey[?], Flattened]
@ -442,10 +439,9 @@ trait Init[ScopeType]:
cmap.updated(c.key, flatten(cmap, c.key, c.dependencies))
}
compiled flatMap { case (key, comp) =>
if (key.key.isLocal) nilSeq[(ScopedKey[?], Flattened)]
else
Seq[(ScopedKey[?], Flattened)]((key, flatten(flattenedLocals, key, comp.dependencies)))
compiled.collect {
case (key, comp) if !key.key.isLocal =>
(key, flatten(flattenedLocals, key, comp.dependencies))
}
}
@ -653,7 +649,7 @@ trait Init[ScopeType]:
private[sbt] def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1]
def evaluate(map: Settings[ScopeType]): A1
def evaluate(map: Settings): A1
def zip[A2](o: Initialize[A2]): Initialize[(A1, A2)] = zipTupled(o)(identity)
def zipWith[A2, U](o: Initialize[A2])(f: (A1, A2) => U): Initialize[U] =
@ -799,7 +795,7 @@ trait Init[ScopeType]:
(fa: Initialize[A]) => (fa.mapReferenced(g))
private def mapConstantK(g: MapConstant): [A] => Initialize[A] => Initialize[A] = [A] =>
(fa: Initialize[A]) => (fa.mapConstant(g))
private def evaluateK(g: Settings[ScopeType]): [A] => Initialize[A] => A = [A] =>
private def evaluateK(g: Settings): [A] => Initialize[A] => A = [A] =>
(fa: Initialize[A]) => (fa.evaluate(g))
private def deps(ls: List[Initialize[?]]): Seq[ScopedKey[?]] =
ls.flatMap(_.dependencies)
@ -820,7 +816,7 @@ trait Init[ScopeType]:
extends Keyed[S, A1]:
override final def apply[A2](g: A1 => A2): Initialize[A2] =
GetValue(scopedKey, g compose transform)
override final def evaluate(ss: Settings[ScopeType]): A1 = transform(getValue(ss, scopedKey))
override final def evaluate(ss: Settings): A1 = transform(getValue(ss, scopedKey))
override final def mapReferenced(g: MapScoped): Initialize[A1] =
GetValue(g(scopedKey), transform)
@ -842,7 +838,7 @@ trait Init[ScopeType]:
trait KeyedInitialize[A1] extends Keyed[A1, A1]:
override final def apply[A2](g: A1 => A2): Initialize[A2] =
GetValue(scopedKey, g)
override final def evaluate(ss: Settings[ScopeType]): A1 = getValue(ss, scopedKey)
override final def evaluate(ss: Settings): A1 = getValue(ss, scopedKey)
override final def mapReferenced(g: MapScoped): Initialize[A1] = g(scopedKey)
private[sbt] override final def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1] =
@ -861,7 +857,7 @@ trait Init[ScopeType]:
override def dependencies: Seq[ScopedKey[?]] = Nil
override def apply[A2](g2: ([x] => Initialize[x] => Initialize[x]) => A2): Initialize[A2] =
map(this)(g2)
override def evaluate(ss: Settings[ScopeType]): [x] => Initialize[x] => Initialize[x] = f
override def evaluate(ss: Settings): [x] => Initialize[x] => Initialize[x] = f
override def mapReferenced(g: MapScoped): Initialize[[x] => Initialize[x] => Initialize[x]] =
TransformCapture(mapReferencedK(g) f)
override def mapConstant(g: MapConstant): Initialize[[x] => Initialize[x] => Initialize[x]] =
@ -880,7 +876,7 @@ trait Init[ScopeType]:
extends Initialize[ScopedKey[A1]]:
override def dependencies: Seq[ScopedKey[?]] = Nil
override def apply[A2](g2: ScopedKey[A1] => A2): Initialize[A2] = map(this)(g2)
override def evaluate(ss: Settings[ScopeType]): ScopedKey[A1] = key
override def evaluate(ss: Settings): ScopedKey[A1] = key
override def mapReferenced(g: MapScoped): Initialize[ScopedKey[A1]] =
ValidationCapture(g(key), selfRefOk)
override def mapConstant(g: MapConstant): Initialize[ScopedKey[A1]] = this
@ -898,7 +894,7 @@ trait Init[ScopeType]:
extends Initialize[A1]:
override def dependencies: Seq[ScopedKey[?]] = in.dependencies
override def apply[A2](g: A1 => A2): Initialize[A2] = Bind[S, A2](s => f(s)(g), in)
override def evaluate(ss: Settings[ScopeType]): A1 = f(in.evaluate(ss)).evaluate(ss)
override def evaluate(ss: Settings): A1 = f(in.evaluate(ss)).evaluate(ss)
override def mapReferenced(g: MapScoped) =
Bind[S, A1](s => f(s).mapReferenced(g), in.mapReferenced(g))
@ -927,7 +923,7 @@ trait Init[ScopeType]:
case Some(i) => Right(Optional(i.validateKeyReferenced(g).toOption, f))
override def mapConstant(g: MapConstant): Initialize[A1] = Optional(a map mapConstantK(g)[S], f)
override def evaluate(ss: Settings[ScopeType]): A1 =
override def evaluate(ss: Settings): A1 =
f(a.flatMap { i => trapBadRef(evaluateK(ss)(i)) })
// proper solution is for evaluate to be deprecated or for external use only and a new internal method returning Either be used
@ -946,7 +942,7 @@ trait Init[ScopeType]:
override def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1] = Right(this)
override def apply[A2](g: A1 => A2): Initialize[A2] = Value[A2](() => g(value()))
override def mapConstant(g: MapConstant): Initialize[A1] = this
override def evaluate(map: Settings[ScopeType]): A1 = value()
override def evaluate(map: Settings): A1 = value()
private[sbt] override def processAttributes[A2](init: A2)(f: (A2, AttributeMap) => A2): A2 =
init
end Value
@ -958,7 +954,7 @@ trait Init[ScopeType]:
Right(this)
override def apply[A2](g: Set[ScopeType] => A2) = map(this)(g)
override def mapConstant(g: MapConstant): Initialize[Set[ScopeType]] = this
override def evaluate(map: Settings[ScopeType]): Set[ScopeType] = map.scopes
override def evaluate(map: Settings): Set[ScopeType] = map.scopes
private[sbt] override def processAttributes[A2](init: A2)(f: (A2, AttributeMap) => A2): A2 =
init
end StaticScopes
@ -971,7 +967,7 @@ trait Init[ScopeType]:
override def mapConstant(g: MapConstant): Initialize[A2] =
Uniform(f, inputs.map(_.mapConstant(g)))
override def apply[A3](g: A2 => A3): Initialize[A3] = Uniform(g.compose(f), inputs)
override def evaluate(ss: Settings[ScopeType]): A2 = f(inputs.map(_.evaluate(ss)))
override def evaluate(ss: Settings): A2 = f(inputs.map(_.evaluate(ss)))
override def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A2] =
val tx = inputs.map(_.validateKeyReferenced(g))
@ -997,7 +993,7 @@ trait Init[ScopeType]:
override def apply[A2](g: A1 => A2): Initialize[A2] = Apply(g compose f, inputs)
override def evaluate(ss: Settings[ScopeType]): A1 = f(inputs.unmap(evaluateK(ss)))
override def evaluate(ss: Settings): A1 = f(inputs.unmap(evaluateK(ss)))
override def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[A1] =
val tx: Tuple.Map[Tup, ValidatedInit] = inputs.transform(validateKeyReferencedK(g))

View File

@ -191,7 +191,7 @@ object SettingsTest extends Properties("settings") {
checkKey(chk, Some(expected), eval)
}
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Settings[Scope]) = {
def checkKey[T](key: ScopedKey[T], expected: Option[T], settings: Def.Settings) = {
val value = settings.get(key.scope, key.key)
("Key: " + key) |:
("Value: " + value) |:
@ -199,7 +199,7 @@ object SettingsTest extends Properties("settings") {
(value == expected)
}
def evaluate(settings: Seq[Setting[_]]): Settings[Scope] =
def evaluate(settings: Seq[Setting[_]]): Def.Settings =
try {
makeWithCompiledMap(settings)(delegates, scopeLocal, showFullKey)._2
} catch {

View File

@ -19,7 +19,8 @@ final case class Scope(nestIndex: Int, idAtIndex: Int = 0)
// Lots of type constructors would become binary, which as you may know requires lots of type lambdas
// when you want a type function with only one parameter.
// That would be a general pain.)
case class SettingsExample() extends Init[Scope] {
case class SettingsExample() extends Init {
type ScopeType = Scope
// Provides a way of showing a Scope+AttributeKey[_]
val showFullKey: Show[ScopedKey[?]] = Show[ScopedKey[?]]((key: ScopedKey[?]) => {
s"${key.scope.nestIndex}(${key.scope.idAtIndex})/${key.key.label}"
@ -64,7 +65,7 @@ case class SettingsUsage(val settingsExample: SettingsExample) {
// "compiles" and applies the settings.
// This can be split into multiple steps to access intermediate results if desired.
// The 'inspect' command operates on the output of 'compile', for example.
val applied: Settings[Scope] =
val applied: Settings =
makeWithCompiledMap(mySettings)(using delegates, scopeLocal, showFullKey)._2
// Show results.