mirror of https://github.com/sbt/sbt.git
Refactor to build mainProj
This commit is contained in:
parent
100f1ac09c
commit
cba7a0efc3
20
build.sbt
20
build.sbt
|
|
@ -10,7 +10,7 @@ import scala.util.Try
|
|||
// ThisBuild settings take lower precedence,
|
||||
// but can be shared across the multi projects.
|
||||
ThisBuild / version := {
|
||||
val v = "2.0.0-alpha1-SNAPSHOT"
|
||||
val v = "2.0.0-alpha2-SNAPSHOT"
|
||||
nightlyVersion.getOrElse(v)
|
||||
}
|
||||
ThisBuild / version2_13 := "2.0.0-alpha1-SNAPSHOT"
|
||||
|
|
@ -53,6 +53,7 @@ Global / excludeLint := (Global / excludeLint).?.value.getOrElse(Set.empty)
|
|||
Global / excludeLint += componentID
|
||||
Global / excludeLint += scriptedBufferLog
|
||||
Global / excludeLint += checkPluginCross
|
||||
ThisBuild / evictionErrorLevel := Level.Info
|
||||
|
||||
def commonBaseSettings: Seq[Setting[_]] = Def.settings(
|
||||
headerLicense := Some(
|
||||
|
|
@ -180,17 +181,7 @@ def mimaSettingsSince(versions: Seq[String]): Seq[Def.Setting[_]] = Def settings
|
|||
val scriptedSbtReduxMimaSettings = Def.settings(mimaPreviousArtifacts := Set())
|
||||
|
||||
lazy val sbtRoot: Project = (project in file("."))
|
||||
.aggregate(
|
||||
(allProjects diff Seq(
|
||||
scriptedSbtReduxProj,
|
||||
scriptedSbtOldProj,
|
||||
scriptedPluginProj,
|
||||
dependencyTreeProj,
|
||||
mainProj,
|
||||
sbtProj,
|
||||
bundledLauncherProj,
|
||||
)).map(p => LocalProject(p.id)): _*
|
||||
)
|
||||
.aggregate(allProjects.map(p => LocalProject(p.id)): _*)
|
||||
.settings(
|
||||
minimalSettings,
|
||||
onLoadMessage := {
|
||||
|
|
@ -911,6 +902,7 @@ lazy val mainProj = (project in file("main"))
|
|||
.enablePlugins(ContrabandPlugin)
|
||||
.dependsOn(
|
||||
actionsProj,
|
||||
buildFileProj,
|
||||
mainSettingsProj,
|
||||
runProj,
|
||||
commandProj,
|
||||
|
|
@ -984,13 +976,15 @@ lazy val sbtProj = (project in file("sbt-app"))
|
|||
Tests.Argument(framework, s"-Dsbt.server.scala.version=${scalaVersion.value}") :: Nil
|
||||
},
|
||||
)
|
||||
.configure(addSbtIO, addSbtCompilerBridge)
|
||||
.configure(addSbtIO)
|
||||
// addSbtCompilerBridge
|
||||
|
||||
lazy val serverTestProj = (project in file("server-test"))
|
||||
.dependsOn(sbtProj % "compile->test", scriptedSbtReduxProj % "compile->test")
|
||||
.settings(
|
||||
testedBaseSettings,
|
||||
crossScalaVersions := Seq(baseScalaVersion),
|
||||
bspEnabled := false,
|
||||
publish / skip := true,
|
||||
// make server tests serial
|
||||
Test / watchTriggers += baseDirectory.value.toGlob / "src" / "server-test" / **,
|
||||
|
|
|
|||
|
|
@ -142,6 +142,9 @@ class Eval(
|
|||
valTypes: Seq[String],
|
||||
extraHash: String,
|
||||
): EvalDefinitions =
|
||||
println(s"""evalDefinitions(definitions = $definitions)
|
||||
classpath = $classpath
|
||||
""")
|
||||
require(definitions.nonEmpty, "definitions to evaluate cannot be empty.")
|
||||
val extraHash0 = extraHash
|
||||
val ev = new EvalType[Seq[String]]:
|
||||
|
|
@ -314,7 +317,7 @@ object Eval:
|
|||
class EvalSourceFile(name: String, startLine: Int, contents: String)
|
||||
extends SourceFile(
|
||||
new VirtualFile(name, contents.getBytes(StandardCharsets.UTF_8)),
|
||||
scala.io.Codec.UTF8
|
||||
contents.toArray
|
||||
):
|
||||
override def lineToOffset(line: Int): Int = super.lineToOffset((line + startLine) max 0)
|
||||
override def offsetToLine(offset: Int): Int = super.offsetToLine(offset) - startLine
|
||||
|
|
|
|||
|
|
@ -21,11 +21,11 @@ import java.io.File
|
|||
import java.nio.file.Path
|
||||
import sbt.internal.util.complete.DefaultParsers.validID
|
||||
import Def.{ ScopedKey, Setting }
|
||||
// import Scope.GlobalScope
|
||||
// import sbt.SlashSyntax0._
|
||||
import Scope.GlobalScope
|
||||
import sbt.SlashSyntax0.*
|
||||
import sbt.internal.parser.SbtParser
|
||||
import sbt.io.IO
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.JavaConverters.*
|
||||
import xsbti.VirtualFile
|
||||
import xsbti.VirtualFileRef
|
||||
|
||||
|
|
@ -386,44 +386,49 @@ object Index {
|
|||
private[this] def stringToKeyMap0(
|
||||
settings: Set[AttributeKey[_]]
|
||||
)(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = {
|
||||
// val multiMap = settings.groupBy(label)
|
||||
// val duplicates = multiMap.iterator
|
||||
// .collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) }
|
||||
// .collect {
|
||||
// case (k, xs) if xs.size > 1 => (k, xs)
|
||||
// }
|
||||
// .toVector
|
||||
// if (duplicates.isEmpty)
|
||||
// multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
|
||||
// else
|
||||
// sys.error(
|
||||
// duplicates map { case (k, tps) =>
|
||||
// "'" + k + "' (" + tps.mkString(", ") + ")"
|
||||
// } mkString ("Some keys were defined with the same name but different types: ", ", ", "")
|
||||
// )
|
||||
???
|
||||
val multiMap = settings.groupBy(label)
|
||||
val duplicates = multiMap.iterator
|
||||
.collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) }
|
||||
.collect {
|
||||
case (k, xs) if xs.size > 1 => (k, xs)
|
||||
}
|
||||
.toVector
|
||||
if (duplicates.isEmpty)
|
||||
multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
|
||||
else
|
||||
sys.error(
|
||||
duplicates map { case (k, tps) =>
|
||||
"'" + k + "' (" + tps.mkString(", ") + ")"
|
||||
} mkString ("Some keys were defined with the same name but different types: ", ", ", "")
|
||||
)
|
||||
}
|
||||
|
||||
private[this] type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]
|
||||
private[this] type TriggerMap = collection.mutable.HashMap[Task[Any], Seq[Task[Any]]]
|
||||
|
||||
def triggers(ss: Settings[Scope]): Triggers[Task] = {
|
||||
// val runBefore = new TriggerMap
|
||||
// val triggeredBy = new TriggerMap
|
||||
// ss.data.values foreach (
|
||||
// _.entries foreach {
|
||||
// case AttributeEntry(_, value: Task[_]) =>
|
||||
// val as = value.info.attributes
|
||||
// update(runBefore, value, as get Keys.runBefore)
|
||||
// update(triggeredBy, value, as get Keys.triggeredBy)
|
||||
// case _ => ()
|
||||
// }
|
||||
// )
|
||||
// val onComplete = (GlobalScope / Keys.onComplete) get ss getOrElse (() => ())
|
||||
// new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
|
||||
???
|
||||
val runBefore = new TriggerMap
|
||||
val triggeredBy = new TriggerMap
|
||||
ss.data.values foreach (
|
||||
_.entries foreach {
|
||||
case AttributeEntry(_, value: Task[Any]) =>
|
||||
val as = value.info.attributes
|
||||
update(runBefore, value, as.get(Def.runBefore.asInstanceOf))
|
||||
update(triggeredBy, value, as.get(Def.triggeredBy.asInstanceOf))
|
||||
case _ => ()
|
||||
}
|
||||
)
|
||||
val onComplete = (GlobalScope / Def.onComplete) get ss getOrElse (() => ())
|
||||
new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
|
||||
}
|
||||
|
||||
private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit =
|
||||
for (tasks <- tasksOpt; task <- tasks)
|
||||
private[this] def update(
|
||||
map: TriggerMap,
|
||||
base: Task[Any],
|
||||
tasksOpt: Option[Seq[Task[Any]]]
|
||||
): Unit =
|
||||
for {
|
||||
tasks <- tasksOpt
|
||||
task <- tasks
|
||||
}
|
||||
map(task) = base +: map.getOrElse(task, Nil)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,6 +57,13 @@ private[sbt] object SbtParser:
|
|||
private final val defaultClasspath =
|
||||
sbt.io.Path.makeString(sbt.io.IO.classLocationPath[Product].toFile :: Nil)
|
||||
|
||||
def isIdentifier(ident: String): Boolean =
|
||||
val code = s"val $ident = 0; val ${ident}${ident} = $ident"
|
||||
try
|
||||
val p = SbtParser(FAKE_FILE, List(code))
|
||||
true
|
||||
catch case e: Throwable => false
|
||||
|
||||
/**
|
||||
* Provides the previous error reporting functionality in
|
||||
* [[scala.tools.reflect.ToolBox]].
|
||||
|
|
|
|||
|
|
@ -9,31 +9,36 @@ package sbt
|
|||
package internal
|
||||
package parser
|
||||
|
||||
/*
|
||||
import java.io.File
|
||||
import dotty.tools.dotc.ast.untpd
|
||||
import dotty.tools.dotc.core.Contexts.Context
|
||||
|
||||
private[sbt] object SbtRefactorings {
|
||||
private[sbt] object SbtRefactorings:
|
||||
|
||||
import sbt.internal.parser.SbtParser.{ END_OF_LINE, FAKE_FILE }
|
||||
import sbt.internal.SessionSettings.{ SessionSetting, SbtConfigFile }
|
||||
|
||||
/** A session setting is simply a tuple of a Setting[_] and the strings which define it. */
|
||||
type SessionSetting = (Def.Setting[_], Seq[String])
|
||||
type SbtConfigFile = (File, Seq[String])
|
||||
val emptyString = ""
|
||||
val reverseOrderingInt = Ordering[Int].reverse
|
||||
|
||||
/**
|
||||
* Refactoring a `.sbt` file so that the new settings are used instead of any existing settings.
|
||||
* @param configFile SbtConfigFile with the lines of an sbt file as a List[String] where each string is one line
|
||||
* @param commands A List of settings (space separate) that should be inserted into the current file.
|
||||
* If the settings replaces a value, it will replace the original line in the .sbt file.
|
||||
* If in the `.sbt` file we have multiply value for one settings -
|
||||
* the first will be replaced and the other will be removed.
|
||||
* @return a SbtConfigFile with new lines which represent the contents of the refactored .sbt file.
|
||||
*/
|
||||
* Refactoring a `.sbt` file so that the new settings are used instead of any existing settings.
|
||||
* @param configFile SbtConfigFile with the lines of an sbt file as a List[String] where each string is one line
|
||||
* @param commands A List of settings (space separate) that should be inserted into the current file.
|
||||
* If the settings replaces a value, it will replace the original line in the .sbt file.
|
||||
* If in the `.sbt` file we have multiply value for one settings -
|
||||
* the first will be replaced and the other will be removed.
|
||||
* @return a SbtConfigFile with new lines which represent the contents of the refactored .sbt file.
|
||||
*/
|
||||
def applySessionSettings(
|
||||
configFile: SbtConfigFile,
|
||||
commands: Seq[SessionSetting]
|
||||
): SbtConfigFile = {
|
||||
val (file, lines) = configFile
|
||||
val split = SbtParser(FAKE_FILE, lines)
|
||||
given ctx: Context = SbtParser.defaultGlobalForParser.compileCtx
|
||||
val recordedCommands = recordCommands(commands, split)
|
||||
val sortedRecordedCommands = recordedCommands.sortBy(_._1)(reverseOrderingInt)
|
||||
|
||||
|
|
@ -58,20 +63,22 @@ private[sbt] object SbtRefactorings {
|
|||
if (trimmed.isEmpty) trimmed else text
|
||||
}
|
||||
|
||||
private def recordCommands(commands: Seq[SessionSetting], split: SbtParser) =
|
||||
private def recordCommands(commands: Seq[SessionSetting], split: SbtParser)(using Context) =
|
||||
commands.flatMap { case (_, command) =>
|
||||
val map = toTreeStringMap(command)
|
||||
map.flatMap { case (name, _) => treesToReplacements(split, name, command) }
|
||||
}
|
||||
|
||||
private def treesToReplacements(split: SbtParser, name: String, command: Seq[String]) =
|
||||
private def treesToReplacements(split: SbtParser, name: String, command: Seq[String])(using
|
||||
Context
|
||||
) =
|
||||
split.settingsTrees.foldLeft(Seq.empty[(Int, String, String)]) { case (acc, (st, tree)) =>
|
||||
val treeName = extractSettingName(tree)
|
||||
if (name == treeName) {
|
||||
val replacement =
|
||||
if (acc.isEmpty) command.mkString(END_OF_LINE)
|
||||
else emptyString
|
||||
(tree.pos.start, st, replacement) +: acc
|
||||
(tree.sourcePos.start, st, replacement) +: acc
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
|
|
@ -86,14 +93,14 @@ private[sbt] object SbtRefactorings {
|
|||
seq.toMap
|
||||
}
|
||||
|
||||
import scala.tools.nsc.Global
|
||||
private def extractSettingName(tree: Global#Tree): String =
|
||||
tree.children match {
|
||||
case h :: _ =>
|
||||
extractSettingName(h)
|
||||
case _ =>
|
||||
tree.toString()
|
||||
}
|
||||
// todo: revisit
|
||||
private def extractSettingName(tree: untpd.Tree): String =
|
||||
tree.toString()
|
||||
// tree.children match {
|
||||
// case h :: _ =>
|
||||
// extractSettingName(h)
|
||||
// case _ =>
|
||||
// tree.toString()
|
||||
// }
|
||||
|
||||
}
|
||||
*/
|
||||
end SbtRefactorings
|
||||
|
|
|
|||
|
|
@ -36,4 +36,8 @@ lazy val foo = project
|
|||
assert(p.settings(1) == ("""lazy val foo = project
|
||||
.settings(x := y)""" -> LineRange(7, 8)))
|
||||
}
|
||||
|
||||
test("isIdentifier") {
|
||||
assert(SbtParser.isIdentifier("1a") == false)
|
||||
}
|
||||
end SbtParserTest
|
||||
|
|
|
|||
|
|
@ -13,21 +13,22 @@ object ConvertTestMacro:
|
|||
def someMacroImpl(expr: Expr[Boolean])(using qctx: Quotes) =
|
||||
val convert1: Convert[qctx.type] = new InputInitConvert(qctx)
|
||||
import convert1.qctx.reflect.*
|
||||
def addTypeCon(tpe: TypeRepr, qual: Term, selection: Term): Term =
|
||||
tpe.asType match
|
||||
def addTypeCon[A](tpe: Type[A], qual: Term, selection: Term): Term =
|
||||
tpe match
|
||||
case '[a] =>
|
||||
'{
|
||||
Option[a](${ selection.asExprOf[a] })
|
||||
}.asTerm
|
||||
def substitute(name: String, tpe: TypeRepr, qual: Term, replace: Term) =
|
||||
convert1.convert[Boolean](name, qual) transform { (tree: Term) =>
|
||||
addTypeCon(tpe, tree, replace)
|
||||
val substitute = [a] =>
|
||||
(name: String, tpe: Type[a], qual: Term, replace: Term) =>
|
||||
convert1.convert[Boolean](name, qual) transform { (tree: Term) =>
|
||||
addTypeCon(tpe, tree, replace)
|
||||
}
|
||||
convert1.transformWrappers(expr.asTerm, substitute, Symbol.spliceOwner).asExprOf[Boolean]
|
||||
|
||||
class InputInitConvert[C <: Quotes & scala.Singleton](override val qctx: C)
|
||||
extends Convert[C](qctx)
|
||||
with ContextUtil[C](qctx):
|
||||
with ContextUtil[C](qctx, 0):
|
||||
// with TupleBuilder[C](qctx)
|
||||
// with TupleNBuilder[C](qctx):
|
||||
import qctx.reflect.*
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ package sbt
|
|||
|
||||
import java.nio.file.Paths
|
||||
import sbt.util.Level
|
||||
import sbt.internal.inc.PlainVirtualFile
|
||||
import sbt.internal.util.{ AttributeKey, FullReader, LineReader, Terminal }
|
||||
import sbt.internal.util.complete.{
|
||||
Completion,
|
||||
|
|
@ -39,6 +40,7 @@ import sbt.util.Level
|
|||
import scala.Function.tupled
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import scala.util.control.NonFatal
|
||||
import xsbti.VirtualFile
|
||||
|
||||
object BasicCommands {
|
||||
lazy val allBasicCommands: Seq[Command] = Seq(
|
||||
|
|
@ -110,8 +112,9 @@ object BasicCommands {
|
|||
*/
|
||||
def addPluginSbtFile: Command = Command.arb(_ => addPluginSbtFileParser, addPluginSbtFileHelp()) {
|
||||
(s, extraSbtFile) =>
|
||||
val extraFiles = s.get(BasicKeys.extraMetaSbtFiles).toList.flatten
|
||||
s.put(BasicKeys.extraMetaSbtFiles, (extraFiles: Seq[File]) :+ extraSbtFile)
|
||||
val existing: Seq[VirtualFile] = s.get(BasicKeys.extraMetaSbtFiles).toList.flatten
|
||||
val vf = PlainVirtualFile(extraSbtFile.toPath())
|
||||
s.put(BasicKeys.extraMetaSbtFiles, existing :+ vf)
|
||||
}
|
||||
|
||||
def help: Command = Command.make(HelpCommand, helpBrief, helpDetailed)(helpParser)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,8 @@ import sbt.librarymanagement.ModuleID
|
|||
import sbt.util.Level
|
||||
import scala.annotation.nowarn
|
||||
import scala.concurrent.duration.FiniteDuration
|
||||
import xsbti.VirtualFile
|
||||
import sbt.librarymanagement.Configuration
|
||||
|
||||
object BasicKeys {
|
||||
val historyPath = AttributeKey[Option[File]](
|
||||
|
|
@ -25,7 +27,7 @@ object BasicKeys {
|
|||
40
|
||||
)
|
||||
|
||||
val extraMetaSbtFiles = AttributeKey[Seq[File]](
|
||||
val extraMetaSbtFiles = AttributeKey[Seq[VirtualFile]](
|
||||
"extraMetaSbtFile",
|
||||
"Additional plugin.sbt files.",
|
||||
10000
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import sbt.internal.util.{ Terminal => ITerminal, * }
|
|||
import Util._
|
||||
import sbt.util.Show
|
||||
import xsbti.VirtualFile
|
||||
import sjsonnew.JsonFormat
|
||||
|
||||
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
|
||||
object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
||||
|
|
@ -29,6 +30,10 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
|
||||
def settings(ss: SettingsDefinition*): Seq[Setting[_]] = ss.flatMap(_.settings)
|
||||
|
||||
val onComplete = SettingKey[() => Unit](
|
||||
"onComplete",
|
||||
"Hook to run when task evaluation completes. The type of this setting is subject to change, pending the resolution of SI-2915."
|
||||
) // .withRank(DSetting)
|
||||
val triggeredBy = AttributeKey[Seq[Task[_]]]("triggered-by")
|
||||
val runBefore = AttributeKey[Seq[Task[_]]]("run-before")
|
||||
val resolvedScoped = SettingKey[ScopedKey[_]](
|
||||
|
|
@ -232,7 +237,8 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
|
||||
inline def setting[A1](inline a: A1): Def.Initialize[A1] = ${ settingMacroImpl[A1]('a) }
|
||||
|
||||
// def settingDyn[T](t: Def.Initialize[T]): Def.Initialize[T] = macro settingDynMacroImpl[T]
|
||||
inline def settingDyn[A1](inline a1: Def.Initialize[A1]): Def.Initialize[A1] =
|
||||
${ SettingMacro.settingDynImpl('a1) }
|
||||
|
||||
inline def input[A1](inline p: State => Parser[A1]): ParserGen[A1] =
|
||||
${ SettingMacro.inputMacroImpl[A1]('p) }
|
||||
|
|
@ -330,6 +336,20 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
|
|||
|
||||
inline def evaluated: A1 = InputWrapper.`wrapInitInputTask_\u2603\u2603`[A1](in)
|
||||
|
||||
def toTask(arg: String): Initialize[Task[A1]] =
|
||||
import TaskExtra.singleInputTask
|
||||
FullInstance.flatten(
|
||||
(Def.stateKey zipWith in)((sTask, it) =>
|
||||
sTask map { s =>
|
||||
Parser.parse(arg, it.parser(s)) match
|
||||
case Right(a) => Def.value[Task[A1]](a)
|
||||
case Left(msg) =>
|
||||
val indented = msg.linesIterator.map(" " + _).mkString("\n")
|
||||
sys.error(s"Invalid programmatic input:\n$indented")
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
inline def settingKey[A1](inline description: String): SettingKey[A1] =
|
||||
${ std.KeyMacro.settingKeyImpl[A1]('description) }
|
||||
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions {
|
|||
/**
|
||||
* This AutoPlugin requires the plugins the Plugins matcher returned by this method. See [[trigger]].
|
||||
*/
|
||||
def requires: Plugins = ???
|
||||
def requires: Plugins = Plugins.defaultRequires
|
||||
// plugins.JvmPlugin
|
||||
|
||||
val label: String = getClass.getName.stripSuffix("$")
|
||||
|
|
@ -163,6 +163,8 @@ sealed trait PluginsFunctions {
|
|||
|
||||
object Plugins extends PluginsFunctions {
|
||||
|
||||
private[sbt] var defaultRequires: Plugins = _
|
||||
|
||||
/**
|
||||
* Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[AutoPlugin]]s.
|
||||
* The [[AutoPlugin]]s are topologically sorted so that a required [[AutoPlugin]] comes before its requiring [[AutoPlugin]].
|
||||
|
|
|
|||
|
|
@ -55,7 +55,8 @@ object Previous {
|
|||
// private[sbt] def task: ScopedKey[Task[T]] = key.task
|
||||
|
||||
lazy val stamped: JsonFormat[T] =
|
||||
StampedFormat.withStamp(key.task.key.classTag.toString)(format)
|
||||
StampedFormat.withStamp(key.task.key.manifest.toString)(format)
|
||||
|
||||
def setTask(newTask: ScopedKey[Task[T]]) = new Referenced(newTask, format)
|
||||
private[sbt] def read(streams: Streams): Option[T] =
|
||||
try Option(streams(key.cacheKey).cacheStoreFactory.make(StreamName).read[T]()(stamped))
|
||||
|
|
|
|||
|
|
@ -8,10 +8,13 @@
|
|||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.util.Locale
|
||||
import sbt.librarymanagement.Configuration
|
||||
import sbt.Def.{ Flattened, Initialize, ScopedKey, Setting }
|
||||
import sbt.internal.util.Dag
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util.complete.DefaultParsers
|
||||
import Scope.{ Global, ThisScope }
|
||||
|
||||
sealed trait ProjectDefinition[PR <: ProjectReference] {
|
||||
|
||||
|
|
@ -158,6 +161,19 @@ sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeP
|
|||
/** Definitively set the [[ProjectOrigin]] for this project. */
|
||||
private[sbt] def setProjectOrigin(origin: ProjectOrigin): Project = copy(projectOrigin = origin)
|
||||
|
||||
/**
|
||||
* Applies the given functions to this Project.
|
||||
* The second function is applied to the result of applying the first to this Project and so on.
|
||||
* The intended use is a convenience for applying default configuration provided by a plugin.
|
||||
*/
|
||||
def configure(transforms: (Project => Project)*): Project =
|
||||
Function.chain(transforms)(this)
|
||||
|
||||
def withId(id: String): Project = copy(id = id)
|
||||
|
||||
/** Sets the base directory for this project. */
|
||||
def in(dir: File): Project = copy(base = dir)
|
||||
|
||||
private[sbt] def copy(
|
||||
id: String = id,
|
||||
base: File = base,
|
||||
|
|
@ -180,9 +196,52 @@ sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeP
|
|||
autoPlugins,
|
||||
projectOrigin
|
||||
)
|
||||
|
||||
private[sbt] def resolveBuild(resolveRef: ProjectReference => ProjectReference): Project =
|
||||
def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef
|
||||
def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep
|
||||
def resolveDep(d: ClasspathDep[ProjectReference]) =
|
||||
ClasspathDep.ClasspathDependency(resolveRef(d.project), d.configuration)
|
||||
copy(
|
||||
aggregate = resolveRefs(aggregate),
|
||||
dependencies = resolveDeps(dependencies),
|
||||
)
|
||||
|
||||
private[sbt] def resolve(resolveRef: ProjectReference => ProjectRef): ResolvedProject =
|
||||
def resolveRefs(prs: Seq[ProjectReference]) = prs.map(resolveRef)
|
||||
def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds.map(resolveDep)
|
||||
def resolveDep(d: ClasspathDep[ProjectReference]) =
|
||||
ClasspathDep.ResolvedClasspathDependency(resolveRef(d.project), d.configuration)
|
||||
Project.resolved(
|
||||
id,
|
||||
base,
|
||||
aggregate = resolveRefs(aggregate),
|
||||
dependencies = resolveDeps(dependencies),
|
||||
settings,
|
||||
configurations,
|
||||
plugins,
|
||||
autoPlugins,
|
||||
projectOrigin
|
||||
)
|
||||
end Project
|
||||
|
||||
object Project:
|
||||
def apply(id: String, base: File): Project =
|
||||
unresolved(id, base, Nil, Nil, Nil, Nil, Plugins.empty, Nil, ProjectOrigin.Organic)
|
||||
|
||||
/** This is a variation of def apply that mixes in GeneratedRootProject. */
|
||||
private[sbt] def mkGeneratedRoot(
|
||||
id: String,
|
||||
base: File,
|
||||
aggregate: Seq[ProjectReference]
|
||||
): Project =
|
||||
validProjectID(id).foreach(errMsg => sys.error(s"Invalid project ID: $errMsg"))
|
||||
val plugins = Plugins.empty
|
||||
val origin = ProjectOrigin.GenericRoot
|
||||
new ProjectDef(id, base, aggregate, Nil, Nil, Nil, plugins, Nil, origin)
|
||||
with Project
|
||||
with GeneratedRootProject
|
||||
|
||||
private abstract class ProjectDef[PR <: ProjectReference](
|
||||
val id: String,
|
||||
val base: File,
|
||||
|
|
@ -198,7 +257,9 @@ object Project:
|
|||
Dag.topologicalSort(configurations)(_.extendsConfigs)
|
||||
}
|
||||
|
||||
private def unresolved(
|
||||
// Data structure representing an unresolved Project in terms of the project references.
|
||||
// This is created in build.sbt by the build user.
|
||||
private[sbt] def unresolved(
|
||||
id: String,
|
||||
base: File,
|
||||
aggregate: Seq[ProjectReference],
|
||||
|
|
@ -208,7 +269,7 @@ object Project:
|
|||
plugins: Plugins,
|
||||
autoPlugins: Seq[AutoPlugin],
|
||||
origin: ProjectOrigin
|
||||
): Project = {
|
||||
): Project =
|
||||
validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg))
|
||||
new ProjectDef[ProjectReference](
|
||||
id,
|
||||
|
|
@ -221,11 +282,106 @@ object Project:
|
|||
autoPlugins,
|
||||
origin
|
||||
) with Project
|
||||
}
|
||||
|
||||
// Data structure representing resolved Project in terms of references to
|
||||
// other projects in dependencies etc.
|
||||
private def resolved(
|
||||
id: String,
|
||||
base: File,
|
||||
aggregate: Seq[ProjectRef],
|
||||
dependencies: Seq[ClasspathDep[ProjectRef]],
|
||||
settings: Seq[Def.Setting[_]],
|
||||
configurations: Seq[Configuration],
|
||||
plugins: Plugins,
|
||||
autoPlugins: Seq[AutoPlugin],
|
||||
origin: ProjectOrigin
|
||||
): ResolvedProject =
|
||||
new ProjectDef[ProjectRef](
|
||||
id,
|
||||
base,
|
||||
aggregate,
|
||||
dependencies,
|
||||
settings,
|
||||
configurations,
|
||||
plugins,
|
||||
autoPlugins,
|
||||
origin
|
||||
) with ResolvedProject
|
||||
|
||||
/** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not. */
|
||||
def validProjectID(id: String): Option[String] =
|
||||
DefaultParsers.parse(id, DefaultParsers.ID).left.toOption
|
||||
|
||||
private[this] def validProjectIDStart(id: String): Boolean =
|
||||
DefaultParsers.parse(id, DefaultParsers.IDStart).isRight
|
||||
|
||||
def fillTaskAxis(scoped: ScopedKey[_]): ScopedKey[_] =
|
||||
ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key)
|
||||
|
||||
def mapScope(f: Scope => Scope): [a] => ScopedKey[a] => ScopedKey[a] =
|
||||
[a] => (k: ScopedKey[a]) => ScopedKey(f(k.scope), k.key)
|
||||
|
||||
def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] =
|
||||
val f = mapScope(g)
|
||||
ss.map { setting =>
|
||||
setting.mapKey(f).mapReferenced(f)
|
||||
}
|
||||
|
||||
def transformRef(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] =
|
||||
val f = mapScope(g)
|
||||
ss.map(_ mapReferenced f)
|
||||
|
||||
def inThisBuild(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
inScope(ThisScope.copy(project = Select(ThisBuild)))(ss)
|
||||
|
||||
private[sbt] def inThisBuild[T](i: Initialize[T]): Initialize[T] =
|
||||
inScope(ThisScope.copy(project = Select(ThisBuild)), i)
|
||||
|
||||
private[sbt] def inConfig[T](conf: Configuration, i: Initialize[T]): Initialize[T] =
|
||||
inScope(ThisScope.copy(config = Select(conf)), i)
|
||||
|
||||
def inTask(t: Scoped)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
inScope(ThisScope.copy(task = Select(t.key)))(ss)
|
||||
|
||||
private[sbt] def inTask[A](t: Scoped, i: Initialize[A]): Initialize[A] =
|
||||
inScope(ThisScope.copy(task = Select(t.key)), i)
|
||||
|
||||
def inScope(scope: Scope)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
Project.transform(Scope.replaceThis(scope), ss)
|
||||
|
||||
private[sbt] def inScope[A](scope: Scope, i: Initialize[A]): Initialize[A] =
|
||||
i.mapReferenced(Project.mapScope(Scope.replaceThis(scope)))
|
||||
|
||||
/**
|
||||
* Normalize a String so that it is suitable for use as a dependency management module identifier.
|
||||
* This is a best effort implementation, since valid characters are not documented or consistent.
|
||||
*/
|
||||
def normalizeModuleID(id: String): String = normalizeBase(id)
|
||||
|
||||
/** Constructs a valid Project ID based on `id` and returns it in Right or returns the error message in Left if one cannot be constructed. */
|
||||
private[sbt] def normalizeProjectID(id: String): Either[String, String] = {
|
||||
val attempt = normalizeBase(id)
|
||||
val refined =
|
||||
if (attempt.length < 1) "root"
|
||||
else if (!validProjectIDStart(attempt.substring(0, 1))) "root-" + attempt
|
||||
else attempt
|
||||
validProjectID(refined).toLeft(refined)
|
||||
}
|
||||
|
||||
private[this] def normalizeBase(s: String) =
|
||||
s.toLowerCase(Locale.ENGLISH).replaceAll("""\W+""", "-")
|
||||
|
||||
private[sbt] enum LoadAction:
|
||||
case Return
|
||||
case Current
|
||||
case Plugins
|
||||
|
||||
private[sbt] lazy val loadActionParser: Parser[LoadAction] = {
|
||||
import DefaultParsers.*
|
||||
token(
|
||||
Space ~> ("plugins" ^^^ LoadAction.Plugins | "return" ^^^ LoadAction.Return)
|
||||
) ?? LoadAction.Current
|
||||
}
|
||||
end Project
|
||||
|
||||
sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] {
|
||||
|
|
@ -234,3 +390,5 @@ sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] {
|
|||
def autoPlugins: Seq[AutoPlugin]
|
||||
|
||||
}
|
||||
|
||||
private[sbt] trait GeneratedRootProject
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import sbt.ConcurrentRestrictions.Tag
|
|||
import sbt.Def.{ Initialize, ScopedKey, Setting, setting }
|
||||
import std.TaskMacro
|
||||
import std.TaskExtra.{ task => mktask, _ }
|
||||
import scala.reflect.ManifestFactory
|
||||
|
||||
/** An abstraction on top of Settings for build configuration and task definition. */
|
||||
sealed trait Scoped extends Equals:
|
||||
|
|
@ -195,7 +196,7 @@ sealed abstract class TaskKey[A1]
|
|||
): Setting[Task[A1]] =
|
||||
set0(
|
||||
this.zipWith(other) { (ta1: Task[A1], ta2: Task[A2]) =>
|
||||
multT2Task((ta1, ta2)) map f.tupled
|
||||
multT2Task((ta1, ta2)).mapN(f.tupled)
|
||||
},
|
||||
source
|
||||
)
|
||||
|
|
@ -204,7 +205,7 @@ sealed abstract class TaskKey[A1]
|
|||
f: (A1, A2) => A1
|
||||
): Setting[Task[A1]] =
|
||||
set(this.zipWith(other) { (ta1: Task[A1], ta2: Task[A2]) =>
|
||||
multT2Task((ta1, ta2)) map f.tupled
|
||||
multT2Task((ta1, ta2)).mapN(f.tupled)
|
||||
})
|
||||
|
||||
final def withRank(rank: Int): TaskKey[A1] =
|
||||
|
|
@ -440,6 +441,9 @@ object Scoped:
|
|||
// }
|
||||
}
|
||||
|
||||
private def coerceToAnyTaskSeq(tasks: Seq[AnyInitTask]): Seq[Def.Initialize[Task[Any]]] =
|
||||
tasks.asInstanceOf[Seq[Def.Initialize[Task[Any]]]]
|
||||
|
||||
/**
|
||||
* Enriches `Initialize[Task[S]]` types.
|
||||
*
|
||||
|
|
@ -449,9 +453,13 @@ object Scoped:
|
|||
final class RichInitializeTask[S](i: Initialize[Task[S]]) extends RichInitTaskBase[S, Task] {
|
||||
protected def onTask[T](f: Task[S] => Task[T]): Initialize[Task[T]] = i apply f
|
||||
|
||||
def dependsOn(tasks: AnyInitTask*): Initialize[Task[S]] = {
|
||||
i.zipWith(Initialize.joinAny[Task](tasks))((thisTask, deps) => thisTask.dependsOn(deps: _*))
|
||||
}
|
||||
def dependsOn[B1](task1: Initialize[Task[B1]]): Initialize[Task[S]] =
|
||||
dependsOn(Seq[AnyInitTask](task1.asInstanceOf[AnyInitTask]))
|
||||
|
||||
def dependsOn(tasks: Seq[AnyInitTask]): Initialize[Task[S]] =
|
||||
i.zipWith(
|
||||
Initialize.joinAny[Task](coerceToAnyTaskSeq(tasks))
|
||||
)((thisTask, deps) => thisTask.dependsOn(deps: _*))
|
||||
|
||||
def failure: Initialize[Task[Incomplete]] = i(_.failure)
|
||||
def result: Initialize[Task[Result[S]]] = i(_.result)
|
||||
|
|
@ -469,7 +477,9 @@ object Scoped:
|
|||
tasks: Seq[AnyInitTask],
|
||||
key: AttributeKey[Seq[Task[_]]]
|
||||
): Initialize[Task[S]] =
|
||||
Initialize.joinAny[Task](tasks).zipWith(i)((ts, i) => i.copy(info = i.info.set(key, ts)))
|
||||
Initialize
|
||||
.joinAny[Task](coerceToAnyTaskSeq(tasks))
|
||||
.zipWith(i)((ts, i) => i.copy(info = i.info.set(key, ts)))
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -483,11 +493,10 @@ object Scoped:
|
|||
|
||||
protected def onTask[T](f: Task[S] => Task[T]): Initialize[InputTask[T]] = i(_ mapTask f)
|
||||
|
||||
def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] = {
|
||||
i.zipWith(Initialize.joinAny[Task](tasks))((thisTask, deps) =>
|
||||
def dependsOn(tasks: AnyInitTask*): Initialize[InputTask[S]] =
|
||||
i.zipWith(Initialize.joinAny[Task](coerceToAnyTaskSeq(tasks)))((thisTask, deps) =>
|
||||
thisTask.mapTask(_.dependsOn(deps: _*))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -538,7 +547,7 @@ object Scoped:
|
|||
def mapFailure[T](f: Incomplete => T): Initialize[R[T]] = onTask(_.result map (f compose failM))
|
||||
}
|
||||
|
||||
type AnyInitTask = Initialize[Task[Any]]
|
||||
type AnyInitTask = Initialize[Task[_]]
|
||||
|
||||
implicit def richTaskSeq[T](in: Seq[Initialize[Task[T]]]): RichTaskSeq[T] = new RichTaskSeq(in)
|
||||
final class RichTaskSeq[T](keys: Seq[Initialize[Task[T]]]) {
|
||||
|
|
@ -549,7 +558,9 @@ object Scoped:
|
|||
implicit def richAnyTaskSeq(in: Seq[AnyInitTask]): RichAnyTaskSeq = new RichAnyTaskSeq(in)
|
||||
final class RichAnyTaskSeq(keys: Seq[AnyInitTask]) {
|
||||
def dependOn: Initialize[Task[Unit]] =
|
||||
Initialize.joinAny[Task](keys).apply(deps => nop.dependsOn(deps: _*))
|
||||
Initialize
|
||||
.joinAny[Task](coerceToAnyTaskSeq(keys))
|
||||
.apply(deps => nop.dependsOn(deps: _*))
|
||||
}
|
||||
|
||||
sealed abstract class RichTaskables[K[L[x]]](final val keys: K[Taskable])(using
|
||||
|
|
@ -761,6 +772,8 @@ object InputKey:
|
|||
description: String = "",
|
||||
rank: Int = KeyRanks.DefaultInputRank
|
||||
): InputKey[A1] =
|
||||
given mf: Manifest[InputTask[A1]] =
|
||||
ManifestFactory.classType[InputTask[A1]](classOf[InputTask[A1]], manifest[A1])
|
||||
apply(AttributeKey[InputTask[A1]](label, description, rank))
|
||||
|
||||
def apply[A1: Manifest](
|
||||
|
|
@ -777,6 +790,8 @@ object InputKey:
|
|||
extend1: Scoped,
|
||||
extendN: Scoped*
|
||||
): InputKey[A1] =
|
||||
given mf: Manifest[InputTask[A1]] =
|
||||
ManifestFactory.classType[InputTask[A1]](classOf[InputTask[A1]], manifest[A1])
|
||||
apply(AttributeKey[InputTask[A1]](label, description, extendScoped(extend1, extendN), rank))
|
||||
|
||||
def apply[A1](akey: AttributeKey[InputTask[A1]]): InputKey[A1] =
|
||||
|
|
@ -791,6 +806,8 @@ object TaskKey:
|
|||
description: String = "",
|
||||
rank: Int = KeyRanks.DefaultTaskRank
|
||||
): TaskKey[A1] =
|
||||
given mf: Manifest[Task[A1]] =
|
||||
ManifestFactory.classType[Task[A1]](classOf[Task[A1]], manifest[A1])
|
||||
apply(AttributeKey[Task[A1]](label, description, rank))
|
||||
|
||||
def apply[A1: Manifest](
|
||||
|
|
@ -799,6 +816,8 @@ object TaskKey:
|
|||
extend1: Scoped,
|
||||
extendN: Scoped*
|
||||
): TaskKey[A1] =
|
||||
given mf: Manifest[Task[A1]] =
|
||||
ManifestFactory.classType[Task[A1]](classOf[Task[A1]], manifest[A1])
|
||||
apply(AttributeKey[Task[A1]](label, description, extendScoped(extend1, extendN)))
|
||||
|
||||
def apply[A1: Manifest](
|
||||
|
|
@ -808,12 +827,17 @@ object TaskKey:
|
|||
extend1: Scoped,
|
||||
extendN: Scoped*
|
||||
): TaskKey[A1] =
|
||||
given mf: Manifest[Task[A1]] =
|
||||
ManifestFactory.classType[Task[A1]](classOf[Task[A1]], manifest[A1])
|
||||
apply(AttributeKey[Task[A1]](label, description, extendScoped(extend1, extendN), rank))
|
||||
|
||||
def apply[A1](akey: AttributeKey[Task[A1]]): TaskKey[A1] =
|
||||
Scoped.scopedTask(Scope.ThisScope, akey)
|
||||
|
||||
def local[A1: Manifest]: TaskKey[A1] = apply[A1](AttributeKey.local[Task[A1]])
|
||||
def local[A1: Manifest]: TaskKey[A1] =
|
||||
given mf: Manifest[Task[A1]] =
|
||||
ManifestFactory.classType[Task[A1]](classOf[Task[A1]], manifest[A1])
|
||||
apply[A1](AttributeKey.local[Task[A1]])
|
||||
|
||||
end TaskKey
|
||||
|
||||
|
|
@ -821,8 +845,27 @@ end TaskKey
|
|||
object SettingKey:
|
||||
def apply[A1: Manifest: OptJsonWriter](
|
||||
label: String,
|
||||
description: String = "",
|
||||
rank: Int = KeyRanks.DefaultSettingRank
|
||||
): SettingKey[A1] =
|
||||
apply[A1](
|
||||
label = label,
|
||||
description = "",
|
||||
rank = KeyRanks.DefaultSettingRank
|
||||
)
|
||||
|
||||
def apply[A1: Manifest: OptJsonWriter](
|
||||
label: String,
|
||||
description: String,
|
||||
): SettingKey[A1] =
|
||||
apply[A1](
|
||||
label = label,
|
||||
description = description,
|
||||
rank = KeyRanks.DefaultSettingRank,
|
||||
)
|
||||
|
||||
def apply[A1: Manifest: OptJsonWriter](
|
||||
label: String,
|
||||
description: String,
|
||||
rank: Int
|
||||
): SettingKey[A1] =
|
||||
apply(AttributeKey[A1](label, description, rank))
|
||||
|
||||
|
|
|
|||
|
|
@ -153,22 +153,6 @@ object InputWrapper:
|
|||
unexpectedType(c)(pos, tpe)
|
||||
}
|
||||
|
||||
/** Translates <task: TaskKey[T]>.previous(format) to Previous.runtime(<task>)(format).value */
|
||||
def previousMacroImpl[T: c.WeakTypeTag](
|
||||
using qctx: Quotes
|
||||
)(format: c.Expr[sjsonnew.JsonFormat[T]]): c.Expr[Option[T]] = {
|
||||
import c.universe._
|
||||
c.macroApplication match {
|
||||
case a @ Apply(Select(Apply(_, t :: Nil), _), _) =>
|
||||
if (t.tpe <:< c.weakTypeOf[TaskKey[T]]) {
|
||||
val tsTyped = c.Expr[TaskKey[T]](t)
|
||||
val newTree = c.universe.reify { Previous.runtime[T](tsTyped.splice)(format.splice) }
|
||||
wrapPrevious[T](c)(newTree, a.pos)
|
||||
} else unexpectedType(c)(a.pos, t.tpe)
|
||||
case x => ContextUtil.unexpectedTree(x)
|
||||
}
|
||||
}
|
||||
|
||||
private def unexpectedType(using qctx: Quotes)(pos: c.Position, tpe: c.Type) =
|
||||
c.abort(pos, s"Internal sbt error. Unexpected type ${tpe.widen}")
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -77,15 +77,18 @@ object FullInstance:
|
|||
flatten(nested)
|
||||
|
||||
override def flatten[A1](in: Initialize[Task[Initialize[Task[A1]]]]): Initialize[Task[A1]] =
|
||||
type K[L[x]] =
|
||||
AList.Tuple3K[Task[Initialize[Task[A1]]], Task[SS], [a] => Initialize[a] => Initialize[a]][
|
||||
L
|
||||
]
|
||||
Def.app[K, Task[A1]]((in, settingsData, Def.capturedTransformations)) {
|
||||
case (a: Task[Initialize[Task[A1]]], data: Task[SS], f) =>
|
||||
import TaskExtra.multT2Task
|
||||
(a, data).flatMap { case (a, d) => f(a) evaluate d }
|
||||
}(AList.tuple3[Task[Initialize[Task[A1]]], Task[SS], [a] => Initialize[a] => Initialize[a]])
|
||||
FullInstance.flatten[A1](in)
|
||||
|
||||
def flatten[A1](in: Initialize[Task[Initialize[Task[A1]]]]): Initialize[Task[A1]] =
|
||||
type K[L[x]] =
|
||||
AList.Tuple3K[Task[Initialize[Task[A1]]], Task[SS], [a] => Initialize[a] => Initialize[a]][
|
||||
L
|
||||
]
|
||||
Def.app[K, Task[A1]]((in, settingsData, Def.capturedTransformations)) {
|
||||
case (a: Task[Initialize[Task[A1]]], data: Task[SS], f) =>
|
||||
import TaskExtra.multT2Task
|
||||
(a, data).flatMapN { case (a, d) => f(a) evaluate d }
|
||||
}(AList.tuple3[Task[Initialize[Task[A1]]], Task[SS], [a] => Initialize[a] => Initialize[a]])
|
||||
|
||||
def flattenFun[A1, A2](
|
||||
in: Initialize[Task[A1 => Initialize[Task[A2]]]]
|
||||
|
|
@ -98,7 +101,7 @@ object FullInstance:
|
|||
case (a: Task[A1 => Initialize[Task[A2]]] @unchecked, data: Task[SS] @unchecked, f) => {
|
||||
(s: A1) =>
|
||||
import TaskExtra.multT2Task
|
||||
(a, data) flatMap { case (af, d) => f(af(s)) evaluate d }
|
||||
(a, data) flatMapN { case (af, d) => f(af(s)) evaluate d }
|
||||
}
|
||||
}(
|
||||
AList.tuple3[Task[A1 => Initialize[Task[A2]]], Task[SS], [a] => Initialize[a] => Initialize[
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
package sbt
|
||||
package std
|
||||
|
||||
import java.io.File
|
||||
import scala.annotation.tailrec
|
||||
import scala.quoted.*
|
||||
|
||||
|
|
@ -59,9 +60,18 @@ private[sbt] object KeyMacro:
|
|||
Expr.summon[OptJsonWriter[A1]].getOrElse(sys.error("OptJsonWriter[A] not found for $tpe")),
|
||||
)
|
||||
|
||||
def projectImpl(using qctx: Quotes): Expr[Project] =
|
||||
val name = Expr(definingValName(errorMsg2("project")))
|
||||
'{
|
||||
Project($name, new File($name))
|
||||
}
|
||||
|
||||
private def errorMsg(methodName: String): String =
|
||||
s"""$methodName must be directly assigned to a val, such as `val x = $methodName[Int]("description")`."""
|
||||
|
||||
private def errorMsg2(methodName: String): String =
|
||||
s"""$methodName must be directly assigned to a val, such as `val x = ($methodName in file("core"))`."""
|
||||
|
||||
private def definingValName(errorMsg: String)(using qctx: Quotes): String =
|
||||
val term = enclosingTerm
|
||||
if term.isValDef then term.name
|
||||
|
|
@ -75,5 +85,4 @@ private[sbt] object KeyMacro:
|
|||
case sym if !sym.isTerm => enclosingTerm0(sym.owner)
|
||||
case _ => sym
|
||||
enclosingTerm0(Symbol.spliceOwner)
|
||||
|
||||
end KeyMacro
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ import language.experimental.macros
|
|||
import scala.annotation.tailrec
|
||||
import scala.reflect.internal.util.UndefinedPosition
|
||||
import scala.quoted.*
|
||||
import sjsonnew.JsonFormat
|
||||
|
||||
object TaskMacro:
|
||||
final val AssignInitName = "set"
|
||||
|
|
@ -76,23 +77,20 @@ object TaskMacro:
|
|||
def taskDynMacroImpl[A1: Type](
|
||||
t: Expr[Initialize[Task[A1]]]
|
||||
)(using qctx: Quotes): Expr[Initialize[Task[A1]]] =
|
||||
val convert1 = new FullConvert(qctx, 0)
|
||||
val convert1 = new FullConvert(qctx, 1000)
|
||||
convert1.contFlatMap[A1, F, Id](t, convert1.appExpr)
|
||||
|
||||
/*
|
||||
def taskIfMacroImpl[A: Type](
|
||||
c: blackbox.Context
|
||||
)(a: c.Expr[A]): c.Expr[Initialize[Task[A]]] = {
|
||||
import c.universe._
|
||||
a.tree match {
|
||||
case Block(stat, If(cond, thenp, elsep)) =>
|
||||
c.Expr[Initialize[Task[A]]](mkIfS(c)(Block(stat, cond), thenp, elsep))
|
||||
case If(cond, thenp, elsep) =>
|
||||
c.Expr[Initialize[Task[A]]](mkIfS(c)(cond, thenp, elsep))
|
||||
case x => ContextUtil.unexpectedTree(x)
|
||||
}
|
||||
}
|
||||
*/
|
||||
/** Translates <task: TaskKey[T]>.previous(format) to Previous.runtime(<task>)(format).value */
|
||||
def previousImpl[A1: Type](t: Expr[TaskKey[A1]])(using
|
||||
qctx: Quotes
|
||||
): Expr[Option[A1]] =
|
||||
import qctx.reflect.*
|
||||
Expr.summon[JsonFormat[A1]] match
|
||||
case Some(ev) =>
|
||||
'{
|
||||
InputWrapper.`wrapInitTask_\u2603\u2603`[Option[A1]](Previous.runtime[A1]($t)($ev))
|
||||
}
|
||||
case _ => report.errorAndAbort(s"JsonFormat[${Type.of[A1]}] missing")
|
||||
|
||||
/** Implementation of := macro for settings. */
|
||||
def settingAssignMacroImpl[A1: Type](rec: Expr[Scoped.DefinableSetting[A1]], v: Expr[A1])(using
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ import sbt.internal.util.complete.DefaultParsers
|
|||
import sbt.{ Def, InputTask, Task }
|
||||
import sbt.Def.parsed
|
||||
import sbt.Def.value
|
||||
import sbt.Def.previous
|
||||
import sbt.util.CacheImplicits.given
|
||||
|
||||
object UseTask:
|
||||
val set = Def.setting { 23 }
|
||||
|
|
@ -42,6 +44,7 @@ object Assign {
|
|||
val ak = taskKey[Int]("a")
|
||||
val bk = taskKey[Seq[Int]]("b")
|
||||
val ck = settingKey[File]("c")
|
||||
val intTask = taskKey[Int]("int")
|
||||
val sk = taskKey[Set[_]]("s")
|
||||
val bgList = taskKey[Seq[Int]]("")
|
||||
|
||||
|
|
@ -76,6 +79,10 @@ object Assign {
|
|||
bgList := { mk.value.toString.toList.map(_.toInt) },
|
||||
)
|
||||
|
||||
val sd = Def.settingDyn {
|
||||
name
|
||||
}
|
||||
|
||||
val zz = Def.task {
|
||||
mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value + mk.value + tk.value
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,13 @@
|
|||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt
|
||||
/**
|
||||
* Indicate whether the project was created organically, synthesized by a plugin,
|
||||
* or is a "generic root" project supplied by sbt when a project doesn't exist for `file(".")`.
|
||||
* Type for AutoPlugin's trigger method.
|
||||
* Determines whether an AutoPlugin will be activated for a project when the
|
||||
* `requires` clause is satisfied.
|
||||
*/
|
||||
final class JavaVersion private (
|
||||
val numbers: Vector[Long],
|
||||
val tags: Vector[String],
|
||||
|
|
|
|||
|
|
@ -4,15 +4,15 @@
|
|||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt
|
||||
|
||||
/**
|
||||
* Type for AutoPlugin's trigger method.
|
||||
* Determines whether an AutoPlugin will be activated for a project when the
|
||||
* `requires` clause is satisfied.
|
||||
*/
|
||||
sealed abstract class PluginTrigger extends Serializable
|
||||
object PluginTrigger {
|
||||
|
||||
|
||||
case object AllRequirements extends PluginTrigger
|
||||
case object NoTrigger extends PluginTrigger
|
||||
}
|
||||
// sealed abstract class PluginTrigger extends Serializable
|
||||
// object PluginTrigger {
|
||||
|
||||
// case object AllRequirements extends PluginTrigger
|
||||
// case object NoTrigger extends PluginTrigger
|
||||
// }
|
||||
|
|
|
|||
|
|
@ -4,16 +4,16 @@
|
|||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt
|
||||
|
||||
/**
|
||||
* Indicate whether the project was created organically, synthesized by a plugin,
|
||||
* or is a "generic root" project supplied by sbt when a project doesn't exist for `file(".")`.
|
||||
*/
|
||||
sealed abstract class ProjectOrigin extends Serializable
|
||||
object ProjectOrigin {
|
||||
|
||||
|
||||
case object Organic extends ProjectOrigin
|
||||
case object ExtraProject extends ProjectOrigin
|
||||
case object DerivedProject extends ProjectOrigin
|
||||
case object GenericRoot extends ProjectOrigin
|
||||
}
|
||||
// sealed abstract class ProjectOrigin extends Serializable
|
||||
// object ProjectOrigin {
|
||||
|
||||
// case object Organic extends ProjectOrigin
|
||||
// case object ExtraProject extends ProjectOrigin
|
||||
// case object DerivedProject extends ProjectOrigin
|
||||
// case object GenericRoot extends ProjectOrigin
|
||||
// }
|
||||
|
|
|
|||
|
|
@ -3,20 +3,20 @@ package sbt
|
|||
|
||||
## Indicate whether the project was created organically, synthesized by a plugin,
|
||||
## or is a "generic root" project supplied by sbt when a project doesn't exist for `file(".")`.
|
||||
enum ProjectOrigin {
|
||||
Organic
|
||||
ExtraProject
|
||||
DerivedProject
|
||||
GenericRoot
|
||||
}
|
||||
#enum ProjectOrigin {
|
||||
# Organic
|
||||
# ExtraProject
|
||||
# DerivedProject
|
||||
# GenericRoot
|
||||
#}
|
||||
|
||||
## Type for AutoPlugin's trigger method.
|
||||
## Determines whether an AutoPlugin will be activated for a project when the
|
||||
## `requires` clause is satisfied.
|
||||
enum PluginTrigger {
|
||||
AllRequirements
|
||||
NoTrigger
|
||||
}
|
||||
#enum PluginTrigger {
|
||||
# AllRequirements
|
||||
# NoTrigger
|
||||
#}
|
||||
|
||||
type JavaVersion {
|
||||
numbers: [Long] @since("1.2.0")
|
||||
|
|
|
|||
|
|
@ -10,14 +10,21 @@ package sbt
|
|||
import sbt.internal.DslEntry
|
||||
import sbt.librarymanagement.Configuration
|
||||
|
||||
private[sbt] trait BuildSyntax {
|
||||
private[sbt] trait BuildSyntax:
|
||||
import scala.language.experimental.macros
|
||||
def settingKey[A](description: String): SettingKey[A] = ???
|
||||
// macro std.KeyMacro.settingKeyImpl[T]
|
||||
def taskKey[A](description: String): TaskKey[A] = ???
|
||||
// macro std.KeyMacro.taskKeyImpl[T]
|
||||
def inputKey[A](description: String): InputKey[A] = ???
|
||||
// macro std.KeyMacro.inputKeyImpl[T]
|
||||
|
||||
/**
|
||||
* Creates a new Project. This is a macro that expects to be assigned directly to a val.
|
||||
* The name of the val is used as the project ID and the name of the base directory of the project.
|
||||
*/
|
||||
inline def project: Project =
|
||||
${ std.KeyMacro.projectImpl }
|
||||
inline def settingKey[A1](inline description: String): SettingKey[A1] =
|
||||
${ std.KeyMacro.settingKeyImpl[A1]('description) }
|
||||
inline def taskKey[A1](inline description: String): TaskKey[A1] =
|
||||
${ std.KeyMacro.taskKeyImpl[A1]('description) }
|
||||
inline def inputKey[A1](inline description: String): InputKey[A1] =
|
||||
${ std.KeyMacro.inputKeyImpl[A1]('description) }
|
||||
|
||||
def enablePlugins(ps: AutoPlugin*): DslEntry = DslEntry.DslEnablePlugins(ps)
|
||||
def disablePlugins(ps: AutoPlugin*): DslEntry = DslEntry.DslDisablePlugins(ps)
|
||||
|
|
@ -28,5 +35,6 @@ private[sbt] trait BuildSyntax {
|
|||
|
||||
implicit def sbtStateToUpperStateOps(s: State): UpperStateOps =
|
||||
new UpperStateOps.UpperStateOpsImpl(s)
|
||||
}
|
||||
end BuildSyntax
|
||||
|
||||
private[sbt] object BuildSyntax extends BuildSyntax
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ package sbt
|
|||
import java.io.File
|
||||
import sbt.Def.{ ScopedKey, Setting }
|
||||
import sbt.Keys._
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.Act
|
||||
import sbt.internal.CommandStrings._
|
||||
|
|
|
|||
|
|
@ -18,19 +18,20 @@ import org.apache.ivy.core.module.descriptor.ModuleDescriptor
|
|||
import org.apache.ivy.core.module.id.ModuleRevisionId
|
||||
import org.apache.logging.log4j.core.{ Appender => XAppender }
|
||||
import org.scalasbt.ipcsocket.Win32SecurityLevel
|
||||
import sbt.Def.{ Initialize, ScopedKey, Setting, SettingsDefinition }
|
||||
import sbt.Def.{ Initialize, ScopedKey, Setting, SettingsDefinition, parsed }
|
||||
import sbt.Keys._
|
||||
import sbt.OptionSyntax._
|
||||
import sbt.Project.{
|
||||
inConfig,
|
||||
inScope,
|
||||
inTask,
|
||||
richInitialize,
|
||||
richInitializeTask,
|
||||
richTaskSessionVar,
|
||||
sbtRichTaskPromise
|
||||
// richInitialize,
|
||||
// richInitializeTask,
|
||||
// richTaskSessionVar,
|
||||
// sbtRichTaskPromise
|
||||
}
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.Scope.{ GlobalScope, ThisScope, fillTaskAxis }
|
||||
import sbt.State.StateOpsImpl
|
||||
import sbt.coursierint._
|
||||
import sbt.internal.CommandStrings.ExportStream
|
||||
import sbt.internal._
|
||||
|
|
@ -81,7 +82,7 @@ import sbt.nio.Keys._
|
|||
import sbt.nio.file.syntax._
|
||||
import sbt.nio.file.{ FileTreeView, Glob, RecursiveGlob }
|
||||
import sbt.nio.Watch
|
||||
import sbt.std.TaskExtra._
|
||||
import sbt.std.TaskExtra.*
|
||||
import sbt.testing.{ AnnotatedFingerprint, Framework, Runner, SubclassFingerprint }
|
||||
import sbt.util.CacheImplicits._
|
||||
import sbt.util.InterfaceUtil.{ t2, toJavaFunction => f1 }
|
||||
|
|
@ -308,7 +309,7 @@ object Defaults extends BuildCommon {
|
|||
try onUnload.value(s)
|
||||
finally IO.delete(taskTemporaryDirectory.value)
|
||||
},
|
||||
// extraLoggers is deprecated
|
||||
// // extraLoggers is deprecated
|
||||
SettingKey[ScopedKey[_] => Seq[XAppender]]("extraLoggers") :== { _ =>
|
||||
Nil
|
||||
},
|
||||
|
|
@ -904,7 +905,7 @@ object Defaults extends BuildCommon {
|
|||
tastyFiles := Def.taskIf {
|
||||
if (ScalaArtifacts.isScala3(scalaVersion.value)) {
|
||||
val _ = compile.value
|
||||
val tastyFiles = classDirectory.value.**("*.tasty").get
|
||||
val tastyFiles = classDirectory.value.**("*.tasty").get()
|
||||
tastyFiles.map(_.getAbsoluteFile)
|
||||
} else Nil
|
||||
}.value,
|
||||
|
|
@ -919,7 +920,7 @@ object Defaults extends BuildCommon {
|
|||
override def afterEarlyOutput(isSuccess: Boolean): Unit = {
|
||||
if (isSuccess) s.log.debug(s"[$mn / $c] early output is success")
|
||||
else s.log.debug(s"[$mn / $c] early output can't be made because of macros")
|
||||
promise.complete(Value(isSuccess))
|
||||
promise.complete(Result.Value(isSuccess))
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
@ -1090,11 +1091,10 @@ object Defaults extends BuildCommon {
|
|||
override def triggeredMessage(s: WatchState) = trigMsg(s)
|
||||
override def watchService() = getService()
|
||||
override def watchSources(s: State) =
|
||||
EvaluateTask(Project structure s, key, s, base) match {
|
||||
case Some((_, Value(ps))) => ps
|
||||
case Some((_, Inc(i))) => throw i
|
||||
case None => sys.error("key not found: " + Def.displayFull(key))
|
||||
}
|
||||
EvaluateTask(Project structure s, key, s, base) match
|
||||
case Some((_, Result.Value(ps))) => ps
|
||||
case Some((_, Result.Inc(i))) => throw i
|
||||
case None => sys.error("key not found: " + Def.displayFull(key))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1252,22 +1252,34 @@ object Defaults extends BuildCommon {
|
|||
_.name
|
||||
).distinct) storeAs definedTestNames triggeredBy compile).value,
|
||||
testQuick / testFilter := testQuickFilter.value,
|
||||
executeTests := (
|
||||
Def.taskDyn {
|
||||
executeTests := {
|
||||
import sbt.TupleSyntax.*
|
||||
(
|
||||
test / streams,
|
||||
loadedTestFrameworks,
|
||||
testLoader,
|
||||
(test / testGrouping),
|
||||
(test / testExecution),
|
||||
(test / fullClasspath),
|
||||
testForkedParallel,
|
||||
(test / javaOptions),
|
||||
(classLoaderLayeringStrategy),
|
||||
thisProject,
|
||||
).flatMapN { case (s, lt, tl, gp, ex, cp, fp, jo, clls, thisProj) =>
|
||||
allTestGroupsTask(
|
||||
(test / streams).value,
|
||||
loadedTestFrameworks.value,
|
||||
testLoader.value,
|
||||
(test / testGrouping).value,
|
||||
(test / testExecution).value,
|
||||
(test / fullClasspath).value,
|
||||
testForkedParallel.value,
|
||||
(test / javaOptions).value,
|
||||
(classLoaderLayeringStrategy).value,
|
||||
projectId = s"${thisProject.value.id} / ",
|
||||
s,
|
||||
lt,
|
||||
tl,
|
||||
gp,
|
||||
ex,
|
||||
cp,
|
||||
fp,
|
||||
jo,
|
||||
clls,
|
||||
projectId = s"${thisProj.id} / ",
|
||||
)
|
||||
}
|
||||
).value,
|
||||
}.value,
|
||||
// ((streams in test, loadedTestFrameworks, testLoader, testGrouping in test, testExecution in test, fullClasspath in test, javaHome in test, testForkedParallel, javaOptions in test) flatMap allTestGroupsTask).value,
|
||||
Test / test / testResultLogger :== TestResultLogger.SilentWhenNoTests, // https://github.com/sbt/sbt/issues/1185
|
||||
test := {
|
||||
|
|
@ -1285,6 +1297,7 @@ object Defaults extends BuildCommon {
|
|||
finally close(testLoader.value)
|
||||
}
|
||||
)
|
||||
|
||||
private def close(sbtLoader: ClassLoader): Unit = sbtLoader match {
|
||||
case u: AutoCloseable => u.close()
|
||||
case c: ClasspathFilter => c.close()
|
||||
|
|
@ -1439,8 +1452,8 @@ object Defaults extends BuildCommon {
|
|||
val s = streams.value
|
||||
val filter = testFilter.value
|
||||
val config = testExecution.value
|
||||
|
||||
implicit val display = Project.showContextKey(state.value)
|
||||
val st = state.value
|
||||
given display: Show[ScopedKey[_]] = Project.showContextKey(st)
|
||||
val modifiedOpts =
|
||||
Tests.Filters(filter(selected)) +: Tests.Argument(frameworkOptions: _*) +: config.options
|
||||
val newConfig = config.copy(options = modifiedOpts)
|
||||
|
|
@ -1458,7 +1471,9 @@ object Defaults extends BuildCommon {
|
|||
)
|
||||
val taskName = display.show(resolvedScoped.value)
|
||||
val trl = testResultLogger.value
|
||||
output.map(out => trl.run(s.log, out, taskName))
|
||||
(Def
|
||||
.value[Task[Tests.Output]] { output })
|
||||
.map { out => trl.run(s.log, out, taskName) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1486,7 +1501,7 @@ object Defaults extends BuildCommon {
|
|||
groups: Seq[Tests.Group],
|
||||
config: Tests.Execution,
|
||||
cp: Classpath,
|
||||
): Initialize[Task[Tests.Output]] = {
|
||||
): Task[Tests.Output] = {
|
||||
allTestGroupsTask(
|
||||
s,
|
||||
frameworks,
|
||||
|
|
@ -1509,7 +1524,7 @@ object Defaults extends BuildCommon {
|
|||
config: Tests.Execution,
|
||||
cp: Classpath,
|
||||
forkedParallelExecution: Boolean
|
||||
): Initialize[Task[Tests.Output]] = {
|
||||
): Task[Tests.Output] = {
|
||||
allTestGroupsTask(
|
||||
s,
|
||||
frameworks,
|
||||
|
|
@ -1535,7 +1550,7 @@ object Defaults extends BuildCommon {
|
|||
javaOptions: Seq[String],
|
||||
strategy: ClassLoaderLayeringStrategy,
|
||||
projectId: String
|
||||
): Initialize[Task[Tests.Output]] = {
|
||||
): Task[Tests.Output] = {
|
||||
val processedOptions: Map[Tests.Group, Tests.ProcessedOptions] =
|
||||
groups
|
||||
.map(group => group -> Tests.processOptions(config, group.tests.toVector, s.log))
|
||||
|
|
@ -1632,7 +1647,8 @@ object Defaults extends BuildCommon {
|
|||
}
|
||||
out.copy(summaries = summaries)
|
||||
}
|
||||
Def.value { result }
|
||||
// Def.value[Task[Tests.Output]] {
|
||||
result
|
||||
}
|
||||
|
||||
def selectedFilter(args: Seq[String]): Seq[String => Boolean] = {
|
||||
|
|
@ -1710,13 +1726,20 @@ object Defaults extends BuildCommon {
|
|||
packageTaskSettings(packageDoc, packageDocMappings) ++
|
||||
Seq(Keys.`package` := packageBin.value)
|
||||
|
||||
def packageBinMappings = products map { _ flatMap Path.allSubpaths }
|
||||
def packageDocMappings = doc map { Path.allSubpaths(_).toSeq }
|
||||
def packageSrcMappings = concatMappings(resourceMappings, sourceMappings)
|
||||
def packageBinMappings: Initialize[Task[Seq[(File, String)]]] =
|
||||
products.map { _ flatMap Path.allSubpaths }
|
||||
def packageDocMappings: Initialize[Task[Seq[(File, String)]]] =
|
||||
doc.map { x => Path.allSubpaths(x).toSeq }
|
||||
def packageSrcMappings: Initialize[Task[Seq[(File, String)]]] =
|
||||
concatMappings(resourceMappings, sourceMappings)
|
||||
|
||||
private type Mappings = Initialize[Task[Seq[(File, String)]]]
|
||||
def concatMappings(as: Mappings, bs: Mappings) =
|
||||
(as zipWith bs)((a, b) => (a, b) map { case (a, b) => a ++ b })
|
||||
def concatMappings(as: Mappings, bs: Mappings): Mappings =
|
||||
as.zipWith(bs) { (a: Task[Seq[(File, String)]], b: Task[Seq[(File, String)]]) =>
|
||||
(a, b).mapN { case (seq1: Seq[(File, String)], seq2: Seq[(File, String)]) =>
|
||||
seq1 ++ seq2
|
||||
}
|
||||
}
|
||||
|
||||
// drop base directories, since there are no valid mappings for these
|
||||
def sourceMappings: Initialize[Task[Seq[(File, String)]]] =
|
||||
|
|
@ -1752,7 +1775,7 @@ object Defaults extends BuildCommon {
|
|||
excludes: Taskable[FileFilter]
|
||||
): Initialize[Task[Seq[File]]] =
|
||||
Def.task {
|
||||
dirs.toTask.value.descendantsExcept(filter.toTask.value, excludes.toTask.value).get
|
||||
dirs.toTask.value.descendantsExcept(filter.toTask.value, excludes.toTask.value).get()
|
||||
}
|
||||
|
||||
def relativeMappings( // forward to widened variant
|
||||
|
|
@ -1972,10 +1995,10 @@ object Defaults extends BuildCommon {
|
|||
mainClassTask: Initialize[Task[Option[String]]],
|
||||
copyClasspath: Initialize[Boolean],
|
||||
scalaRun: Initialize[Task[ScalaRun]]
|
||||
): Initialize[InputTask[JobHandle]] = {
|
||||
import Def.parserToInput
|
||||
): Initialize[InputTask[JobHandle]] =
|
||||
val parser = Def.spaceDelimited()
|
||||
Def.inputTask {
|
||||
val args = parser.parsed
|
||||
val service = bgJobService.value
|
||||
val mainClass = mainClassTask.value getOrElse sys.error("No main class detected.")
|
||||
val hashClasspath = (bgRun / bgHashClasspath).value
|
||||
|
|
@ -1986,17 +2009,14 @@ object Defaults extends BuildCommon {
|
|||
service.copyClasspath(products.value, classpath.value, workingDir, hashClasspath)
|
||||
else classpath.value
|
||||
val cp = data(files)
|
||||
val args = parser.parsed
|
||||
scalaRun.value match {
|
||||
scalaRun.value match
|
||||
case r: Run =>
|
||||
val loader = r.newLoader(cp)
|
||||
(Some(loader), wrapper(() => r.runWithLoader(loader, cp, mainClass, args, logger).get))
|
||||
case sr =>
|
||||
(None, wrapper(() => sr.run(mainClass, cp, args, logger).get))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// runMain calls bgRunMain in the background and waits for the result.
|
||||
def foregroundRunMainTask: Initialize[InputTask[Unit]] =
|
||||
|
|
@ -2031,14 +2051,13 @@ object Defaults extends BuildCommon {
|
|||
classpath: Initialize[Task[Classpath]],
|
||||
mainClassTask: Initialize[Task[Option[String]]],
|
||||
scalaRun: Initialize[Task[ScalaRun]]
|
||||
): Initialize[InputTask[Unit]] = {
|
||||
import Def.parserToInput
|
||||
): Initialize[InputTask[Unit]] =
|
||||
val parser = Def.spaceDelimited()
|
||||
Def.inputTask {
|
||||
val in = parser.parsed
|
||||
val mainClass = mainClassTask.value getOrElse sys.error("No main class detected.")
|
||||
scalaRun.value.run(mainClass, data(classpath.value), parser.parsed, streams.value.log).get
|
||||
scalaRun.value.run(mainClass, data(classpath.value), in, streams.value.log).get
|
||||
}
|
||||
}
|
||||
|
||||
def runnerTask: Setting[Task[ScalaRun]] = runner := runnerInit.value
|
||||
|
||||
|
|
@ -2837,7 +2856,7 @@ object Classpaths {
|
|||
key: Scoped.ScopingSetting[SettingKey[T]], // should be just SettingKey[T] (mea culpa)
|
||||
pkgTasks: Seq[TaskKey[_]],
|
||||
): Initialize[Seq[T]] =
|
||||
pkgTasks.map(pkg => key in pkg.scope in pkg).join
|
||||
pkgTasks.map(pkg => (pkg.scope / pkg / key)).join
|
||||
|
||||
private[this] def publishGlobalDefaults =
|
||||
Defaults.globalDefaults(
|
||||
|
|
@ -2982,7 +3001,10 @@ object Classpaths {
|
|||
Resolver.reorganizeAppResolvers(ars, uj, useMavenCentral)
|
||||
}
|
||||
},
|
||||
bootResolvers := (appConfiguration map bootRepositories).value,
|
||||
bootResolvers := {
|
||||
import Scoped.syntax.richInitialize
|
||||
(appConfiguration map bootRepositories).value
|
||||
},
|
||||
fullResolvers :=
|
||||
(Def.task {
|
||||
val proj = projectResolver.value
|
||||
|
|
@ -4547,26 +4569,21 @@ trait BuildExtra extends BuildCommon with DefExtra {
|
|||
mainClass: String,
|
||||
baseArguments: String*
|
||||
): Vector[Setting[_]] = {
|
||||
// TODO: Re-write to avoid InputTask.apply which is deprecated
|
||||
// I tried "Def.spaceDelimited().parsed" (after importing Def.parserToInput)
|
||||
// but it broke actions/run-task
|
||||
// Maybe it needs to be defined inside a Def.inputTask?
|
||||
@nowarn
|
||||
def inputTask[T](f: TaskKey[Seq[String]] => Initialize[Task[T]]): Initialize[InputTask[T]] =
|
||||
InputTask.apply(Def.value((s: State) => Def.spaceDelimited()))(f)
|
||||
|
||||
Vector(
|
||||
scoped := inputTask { result =>
|
||||
initScoped(
|
||||
scoped.scopedKey,
|
||||
ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config))
|
||||
).zipWith(Def.task { ((config / fullClasspath).value, streams.value, result.value) }) {
|
||||
(rTask, t) =>
|
||||
(t, rTask) map { case ((cp, s, args), r) =>
|
||||
r.run(mainClass, data(cp), baseArguments ++ args, s.log).get
|
||||
}
|
||||
}
|
||||
}.evaluated
|
||||
scoped := (Def
|
||||
.input((s: State) => Def.spaceDelimited())
|
||||
.flatMapTask { result =>
|
||||
initScoped(
|
||||
scoped.scopedKey,
|
||||
ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config))
|
||||
).zipWith(Def.task { ((config / fullClasspath).value, streams.value, result) }) {
|
||||
(rTask, t) =>
|
||||
(t, rTask) mapN { case ((cp, s, args), r) =>
|
||||
r.run(mainClass, data(cp), baseArguments ++ args, s.log).get
|
||||
}
|
||||
}
|
||||
})
|
||||
.value
|
||||
) ++ inTask(scoped)((config / forkOptions) := forkOptionsTask.value)
|
||||
}
|
||||
|
||||
|
|
@ -4584,7 +4601,7 @@ trait BuildExtra extends BuildCommon with DefExtra {
|
|||
scoped.scopedKey,
|
||||
ClassLoaders.runner mapReferenced Project.mapScope(s => s.in(config))
|
||||
).zipWith(Def.task { ((config / fullClasspath).value, streams.value) }) { case (rTask, t) =>
|
||||
(t, rTask) map { case ((cp, s), r) =>
|
||||
(t, rTask).mapN { case ((cp: Keys.Classpath, s: Streams), r: ScalaRun) =>
|
||||
r.run(mainClass, data(cp), arguments, s.log).get
|
||||
}
|
||||
}.value
|
||||
|
|
@ -4628,7 +4645,7 @@ trait BuildCommon {
|
|||
final class RichPathFinder private[sbt] (s: PathFinder) {
|
||||
|
||||
/** Converts the `PathFinder` to a `Classpath`, which is an alias for `Seq[Attributed[File]]`. */
|
||||
def classpath: Classpath = Attributed blankSeq s.get
|
||||
def classpath: Classpath = Attributed.blankSeq(s.get())
|
||||
}
|
||||
final class RichAttributed private[sbt] (s: Seq[Attributed[File]]) {
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ import java.util.concurrent.atomic.AtomicReference
|
|||
|
||||
import sbt.Def.{ ScopedKey, Setting, dummyState }
|
||||
import sbt.Keys.{ TaskProgress => _, name => _, _ }
|
||||
import sbt.Project.richInitializeTask
|
||||
// import sbt.Project.richInitializeTask
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.Scope.Global
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.Aggregation.KeyValue
|
||||
|
|
@ -390,17 +391,20 @@ object EvaluateTask {
|
|||
def logIncomplete(result: Incomplete, state: State, streams: Streams): Unit = {
|
||||
val all = Incomplete linearize result
|
||||
val keyed =
|
||||
all collect { case Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) => (key, msg, ex) }
|
||||
all collect { case Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) =>
|
||||
(key, msg, ex)
|
||||
}
|
||||
|
||||
import ExceptionCategory._
|
||||
for ((key, msg, Some(ex)) <- keyed) {
|
||||
for {
|
||||
(key, msg, Some(ex)) <- keyed
|
||||
} do
|
||||
def log = getStreams(key, streams).log
|
||||
ExceptionCategory(ex) match {
|
||||
case AlreadyHandled => ()
|
||||
case m: MessageOnly => if (msg.isEmpty) log.error(m.message)
|
||||
case f: Full => log.trace(f.exception)
|
||||
}
|
||||
}
|
||||
|
||||
for ((key, msg, ex) <- keyed if msg.isDefined || ex.isDefined) {
|
||||
val msgString = (msg.toList ++ ex.toList.map(ErrorHandling.reducedToString)).mkString("\n\t")
|
||||
|
|
@ -633,7 +637,7 @@ object EvaluateTask {
|
|||
val injectStreams: ScopedKey[_] => Seq[Setting[_]] = scoped =>
|
||||
if (scoped.key == streams.key) {
|
||||
Seq(scoped.scope / streams := {
|
||||
(streamsManager map { mgr =>
|
||||
(streamsManager.map { mgr =>
|
||||
val stream = mgr(scoped)
|
||||
stream.open()
|
||||
stream
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ import sbt.internal.util.AttributeKey
|
|||
import sbt.util.Show
|
||||
import std.Transform.DummyTaskMap
|
||||
import sbt.EvaluateTask.extractedTaskConfig
|
||||
import sbt.ProjectExtra.setProject
|
||||
import scala.annotation.nowarn
|
||||
|
||||
final case class Extracted(
|
||||
|
|
@ -148,6 +149,7 @@ final case class Extracted(
|
|||
state: State,
|
||||
sessionSettings: Seq[Setting[_]],
|
||||
): State = {
|
||||
import sbt.ProjectExtra.extract
|
||||
val appendSettings =
|
||||
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
|
||||
val newStructure = Load.reapply(sessionSettings ++ appendSettings, structure)
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ object Keys {
|
|||
val onLoadMessage = settingKey[String]("Message to display when the project is loaded.").withRank(DSetting)
|
||||
val transformState = AttributeKey[State => State]("transformState", "State transformation to apply after tasks run.", DSetting)
|
||||
|
||||
val onComplete = settingKey[() => Unit]("Hook to run when task evaluation completes. The type of this setting is subject to change, pending the resolution of SI-2915.").withRank(DSetting)
|
||||
val onComplete = Def.onComplete // settingKey[() => Unit]("Hook to run when task evaluation completes. The type of this setting is subject to change, pending the resolution of SI-2915.").withRank(DSetting)
|
||||
|
||||
// Command keys
|
||||
val historyPath = SettingKey(BasicKeys.historyPath)
|
||||
|
|
|
|||
|
|
@ -17,7 +17,8 @@ import java.util.concurrent.atomic.AtomicBoolean
|
|||
|
||||
import sbt.BasicCommandStrings.{ JavaClient, Shell, Shutdown, TemplateCommand }
|
||||
import sbt.Project.LoadAction
|
||||
import sbt.compiler.EvalImports
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.internal.EvalImports
|
||||
import sbt.internal.Aggregation.AnyKeys
|
||||
import sbt.internal.CommandStrings.BootCommand
|
||||
import sbt.internal._
|
||||
|
|
@ -41,10 +42,11 @@ import scala.concurrent.duration.Duration
|
|||
import scala.util.control.NonFatal
|
||||
|
||||
/** This class is the entry point for sbt. */
|
||||
final class xMain extends xsbti.AppMain {
|
||||
final class xMain extends xsbti.AppMain:
|
||||
def run(configuration: xsbti.AppConfiguration): xsbti.MainResult =
|
||||
new XMainConfiguration().run("xMain", configuration)
|
||||
}
|
||||
end xMain
|
||||
|
||||
private[sbt] object xMain {
|
||||
private[sbt] def dealiasBaseDirectory(config: xsbti.AppConfiguration): xsbti.AppConfiguration = {
|
||||
val dealiasedBase = config.baseDirectory.getCanonicalFile
|
||||
|
|
@ -56,6 +58,7 @@ private[sbt] object xMain {
|
|||
override def provider: AppProvider = config.provider()
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = {
|
||||
try {
|
||||
import BasicCommandStrings.{ DashDashClient, DashDashServer, runEarly }
|
||||
|
|
@ -64,6 +67,7 @@ private[sbt] object xMain {
|
|||
import sbt.internal.CommandStrings.{ BootCommand, DefaultsCommand, InitCommand }
|
||||
import sbt.internal.client.NetworkClient
|
||||
|
||||
Plugins.defaultRequires = sbt.plugins.JvmPlugin
|
||||
// if we detect -Dsbt.client=true or -client, run thin client.
|
||||
val clientModByEnv = SysProp.client
|
||||
val userCommands = configuration.arguments
|
||||
|
|
@ -127,8 +131,9 @@ private[sbt] object xMain {
|
|||
)
|
||||
.put(BasicKeys.detachStdio, detachStdio)
|
||||
val state = bootServerSocket match {
|
||||
case Some(l) => state0.put(Keys.bootServerSocket, l)
|
||||
case _ => state0
|
||||
// todo: fix this
|
||||
// case Some(l) => state0.put(Keys.bootServerSocket, l)
|
||||
case _ => state0
|
||||
}
|
||||
try StandardMain.runManaged(state)
|
||||
finally bootServerSocket.foreach(_.close())
|
||||
|
|
@ -557,10 +562,10 @@ object BuiltinCommands {
|
|||
def continuous: Command = Continuous.continuous
|
||||
|
||||
private[this] def loadedEval(s: State, arg: String): Unit = {
|
||||
val extracted = Project extract s
|
||||
val extracted = Project.extract(s)
|
||||
import extracted._
|
||||
val result =
|
||||
session.currentEval().eval(arg, srcName = "<eval>", imports = autoImports(extracted))
|
||||
session.currentEval().evalInfer(expression = arg, imports = autoImports(extracted))
|
||||
s.log.info(s"ans: ${result.tpe} = ${result.getValue(currentLoader)}")
|
||||
}
|
||||
|
||||
|
|
@ -568,8 +573,8 @@ object BuiltinCommands {
|
|||
val app = s.configuration.provider
|
||||
val classpath = app.mainClasspath ++ app.scalaProvider.jars
|
||||
val result = Load
|
||||
.mkEval(classpath, s.baseDir, Nil)
|
||||
.eval(arg, srcName = "<eval>", imports = new EvalImports(Nil, ""))
|
||||
.mkEval(classpath.map(_.toPath()), s.baseDir, Nil)
|
||||
.evalInfer(expression = arg, imports = EvalImports(Nil))
|
||||
s.log.info(s"ans: ${result.tpe} = ${result.getValue(app.loader)}")
|
||||
}
|
||||
|
||||
|
|
@ -646,7 +651,7 @@ object BuiltinCommands {
|
|||
(s, sks) match {
|
||||
case (s, (pattern, Some(sks))) =>
|
||||
val (str, _, display) = extractLast(s)
|
||||
Output.lastGrep(sks, str.streams(s), pattern, printLast)(display)
|
||||
Output.lastGrep(sks, str.streams(s), pattern, printLast)(using display)
|
||||
keepLastLog(s)
|
||||
case (s, (pattern, None)) =>
|
||||
for (logFile <- lastLogFile(s)) yield Output.lastGrep(logFile, pattern, printLast)
|
||||
|
|
@ -668,7 +673,8 @@ object BuiltinCommands {
|
|||
}
|
||||
|
||||
import Def.ScopedKey
|
||||
type KeysParser = Parser[Seq[ScopedKey[T]] forSome { type T }]
|
||||
// type PolyStateKeysParser = [a] => State => Parser[Seq[ScopedKey[a]]]
|
||||
type KeysParser = Parser[Seq[ScopedKey[Any]]]
|
||||
|
||||
val spacedAggregatedParser: State => KeysParser = (s: State) =>
|
||||
Act.requireSession(s, token(Space) ~> Act.aggregatedKeyParser(s))
|
||||
|
|
@ -728,7 +734,7 @@ object BuiltinCommands {
|
|||
|
||||
private[this] def lastImpl(s: State, sks: AnyKeys, sid: Option[String]): State = {
|
||||
val (str, _, display) = extractLast(s)
|
||||
Output.last(sks, str.streams(s), printLast, sid)(display)
|
||||
Output.last(sks, str.streams(s), printLast, sid)(using display)
|
||||
keepLastLog(s)
|
||||
}
|
||||
|
||||
|
|
@ -759,7 +765,7 @@ object BuiltinCommands {
|
|||
def printLast: Seq[String] => Unit = _ foreach println
|
||||
|
||||
def autoImports(extracted: Extracted): EvalImports =
|
||||
new EvalImports(imports(extracted), "<auto-imports>")
|
||||
new EvalImports(imports(extracted).map(_._1)) // <auto-imports>
|
||||
|
||||
def imports(extracted: Extracted): Seq[(String, Int)] = {
|
||||
val curi = extracted.currentRef.build
|
||||
|
|
@ -864,7 +870,7 @@ object BuiltinCommands {
|
|||
@tailrec
|
||||
private[this] def doLoadFailed(s: State, loadArg: String): State = {
|
||||
s.log.warn("Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? (default: r)")
|
||||
val result =
|
||||
val result: Int =
|
||||
try
|
||||
ITerminal.get.withRawInput(System.in.read) match {
|
||||
case -1 => 'q'.toInt
|
||||
|
|
@ -944,7 +950,7 @@ object BuiltinCommands {
|
|||
state.log.info(s"welcome to sbt $appVersion ($javaVersion)")
|
||||
}
|
||||
|
||||
def doLoadProject(s0: State, action: LoadAction.Value): State = {
|
||||
def doLoadProject(s0: State, action: LoadAction): State = {
|
||||
welcomeBanner(s0)
|
||||
checkSBTVersionChanged(s0)
|
||||
val (s1, base) = Project.loadAction(SessionVar.clear(s0), action)
|
||||
|
|
@ -954,7 +960,7 @@ object BuiltinCommands {
|
|||
val (eval, structure) =
|
||||
try Load.defaultLoad(s2, base, s2.log, Project.inPluginProject(s2), Project.extraBuilds(s2))
|
||||
catch {
|
||||
case ex: compiler.EvalException =>
|
||||
case ex: sbt.internal.EvalException =>
|
||||
s0.log.debug(ex.getMessage)
|
||||
ex.getStackTrace map (ste => s"\tat $ste") foreach (s0.log.debug(_))
|
||||
ex.setStackTrace(Array.empty)
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
package sbt
|
||||
|
||||
import sbt.BasicCommandStrings.{ StashOnFailure, networkExecPrefix }
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.internal.langserver.ErrorCodes
|
||||
import sbt.internal.nio.CheckBuildSources.CheckBuildSourcesKey
|
||||
import sbt.internal.protocol.JsonRpcResponseError
|
||||
|
|
@ -97,7 +98,7 @@ object MainLoop {
|
|||
} else None
|
||||
val sbtVersion = sbtVersionOpt.getOrElse(appId.version)
|
||||
val currentArtDirs = defaultBoot * "*" / appId.groupID / appId.name / sbtVersion
|
||||
currentArtDirs.get foreach { dir =>
|
||||
currentArtDirs.get().foreach { dir =>
|
||||
state.log.info(s"deleting $dir")
|
||||
IO.delete(dir)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ object DefaultOptions {
|
|||
import Opts._
|
||||
import sbt.io.syntax._
|
||||
import BuildPaths.{ getGlobalBase, getGlobalSettingsDirectory }
|
||||
import Project.extract
|
||||
import sbt.ProjectExtra.extract
|
||||
import Def.Setting
|
||||
|
||||
def javac: Seq[String] = compile.encoding("UTF-8")
|
||||
|
|
@ -92,6 +92,10 @@ object DefaultOptions {
|
|||
|
||||
def shellPrompt(version: String): State => String =
|
||||
s =>
|
||||
"%s:%s:%s> ".format(s.configuration.provider.id.name, extract(s).currentProject.id, version)
|
||||
"%s:%s:%s> ".format(
|
||||
s.configuration.provider.id.name,
|
||||
Project.extract(s).currentProject.id,
|
||||
version
|
||||
)
|
||||
def setupShellPrompt: Setting[_] = Keys.shellPrompt := { shellPrompt(Keys.version.value) }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import sbt.internal.CommandStrings._
|
|||
import Cross.{ spacedFirst, requireSession }
|
||||
import sbt.librarymanagement.VersionNumber
|
||||
import Project.inScope
|
||||
import ProjectExtra.{ extract, getProject, setProject }
|
||||
import scala.annotation.nowarn
|
||||
|
||||
/**
|
||||
|
|
@ -47,7 +48,7 @@ private[sbt] object PluginCross {
|
|||
val x = Project.extract(state)
|
||||
import x._
|
||||
state.log.info(s"Setting `sbtVersion in pluginCrossBuild` to $version")
|
||||
val add = List(sbtVersion in GlobalScope in pluginCrossBuild :== version) ++
|
||||
val add = List(GlobalScope / pluginCrossBuild / sbtVersion :== version) ++
|
||||
List(scalaVersion := scalaVersionSetting.value) ++
|
||||
inScope(GlobalScope.copy(project = Select(currentRef)))(
|
||||
Seq(scalaVersion := scalaVersionSetting.value)
|
||||
|
|
|
|||
|
|
@ -1,936 +0,0 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.util.Locale
|
||||
import Project._
|
||||
import BasicKeys.serverLogLevel
|
||||
import Keys.{
|
||||
stateBuildStructure,
|
||||
bspEnabled,
|
||||
colorShellPrompt,
|
||||
commands,
|
||||
configuration,
|
||||
historyPath,
|
||||
projectCommand,
|
||||
sessionSettings,
|
||||
shellPrompt,
|
||||
templateResolverInfos,
|
||||
autoStartServer,
|
||||
serverHost,
|
||||
serverIdleTimeout,
|
||||
serverLog,
|
||||
serverPort,
|
||||
serverUseJni,
|
||||
serverAuthentication,
|
||||
serverConnectionType,
|
||||
fullServerHandlers,
|
||||
logLevel,
|
||||
windowsServerSecurityLevel,
|
||||
}
|
||||
import Scope.{ Global, ThisScope }
|
||||
import sbt.SlashSyntax0._
|
||||
import Def.{ Flattened, Initialize, ScopedKey, Setting }
|
||||
import sbt.internal.{
|
||||
Load,
|
||||
BuildStructure,
|
||||
LoadedBuild,
|
||||
LoadedBuildUnit,
|
||||
SettingGraph,
|
||||
SettingCompletions,
|
||||
SessionSettings
|
||||
}
|
||||
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Relation, Settings, ~> }
|
||||
import sbt.internal.util.Types.{ const, idFun }
|
||||
import sbt.internal.util.complete.DefaultParsers
|
||||
import sbt.internal.server.ServerHandler
|
||||
import sbt.librarymanagement.Configuration
|
||||
import sbt.util.{ Show, Level }
|
||||
import sjsonnew.JsonFormat
|
||||
|
||||
import language.experimental.macros
|
||||
import scala.concurrent.TimeoutException
|
||||
import scala.concurrent.duration.FiniteDuration
|
||||
|
||||
trait CompositeProject {
|
||||
def componentProjects: Seq[Project]
|
||||
}
|
||||
|
||||
private[sbt] object CompositeProject {
|
||||
|
||||
/**
|
||||
* Expand user defined projects with the component projects of `compositeProjects`.
|
||||
*
|
||||
* If two projects with the same id appear in the user defined projects and
|
||||
* in `compositeProjects.componentProjects`, the user defined project wins.
|
||||
* This is necessary for backward compatibility with the idioms:
|
||||
* {{{
|
||||
* lazy val foo = crossProject
|
||||
* lazy val fooJS = foo.js.settings(...)
|
||||
* lazy val fooJVM = foo.jvm.settings(...)
|
||||
* }}}
|
||||
* and the rarer:
|
||||
* {{{
|
||||
* lazy val fooJS = foo.js.settings(...)
|
||||
* lazy val foo = crossProject
|
||||
* lazy val fooJVM = foo.jvm.settings(...)
|
||||
* }}}
|
||||
*/
|
||||
def expand(compositeProjects: Seq[CompositeProject]): Seq[Project] = {
|
||||
val userProjects = compositeProjects.collect { case p: Project => p }
|
||||
for (p <- compositeProjects.flatMap(_.componentProjects)) yield {
|
||||
userProjects.find(_.id == p.id) match {
|
||||
case Some(userProject) => userProject
|
||||
case None => p
|
||||
}
|
||||
}
|
||||
}.distinct
|
||||
|
||||
}
|
||||
|
||||
sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject {
|
||||
def componentProjects: Seq[Project] = this :: Nil
|
||||
|
||||
private[sbt] def copy(
|
||||
id: String = id,
|
||||
base: File = base,
|
||||
aggregate: Seq[ProjectReference] = aggregate,
|
||||
dependencies: Seq[ClasspathDep[ProjectReference]] = dependencies,
|
||||
settings: Seq[Setting[_]] = settings,
|
||||
configurations: Seq[Configuration] = configurations
|
||||
): Project =
|
||||
copy2(id, base, aggregate, dependencies, settings, configurations)
|
||||
|
||||
private[this] def copy2(
|
||||
id: String = id,
|
||||
base: File = base,
|
||||
aggregate: Seq[ProjectReference] = aggregate,
|
||||
dependencies: Seq[ClasspathDep[ProjectReference]] = dependencies,
|
||||
settings: Seq[Setting[_]] = settings,
|
||||
configurations: Seq[Configuration] = configurations,
|
||||
plugins: Plugins = plugins,
|
||||
autoPlugins: Seq[AutoPlugin] = autoPlugins,
|
||||
projectOrigin: ProjectOrigin = projectOrigin,
|
||||
): Project =
|
||||
unresolved(
|
||||
id,
|
||||
base,
|
||||
aggregate = aggregate,
|
||||
dependencies = dependencies,
|
||||
settings = settings,
|
||||
configurations,
|
||||
plugins,
|
||||
autoPlugins,
|
||||
projectOrigin
|
||||
)
|
||||
|
||||
def resolve(resolveRef: ProjectReference => ProjectRef): ResolvedProject = {
|
||||
def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef
|
||||
def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep
|
||||
def resolveDep(d: ClasspathDep[ProjectReference]) =
|
||||
ResolvedClasspathDependency(resolveRef(d.project), d.configuration)
|
||||
resolved(
|
||||
id,
|
||||
base,
|
||||
aggregate = resolveRefs(aggregate),
|
||||
dependencies = resolveDeps(dependencies),
|
||||
settings,
|
||||
configurations,
|
||||
plugins,
|
||||
autoPlugins,
|
||||
projectOrigin
|
||||
)
|
||||
}
|
||||
|
||||
def resolveBuild(resolveRef: ProjectReference => ProjectReference): Project = {
|
||||
def resolveRefs(prs: Seq[ProjectReference]) = prs map resolveRef
|
||||
def resolveDeps(ds: Seq[ClasspathDep[ProjectReference]]) = ds map resolveDep
|
||||
def resolveDep(d: ClasspathDep[ProjectReference]) =
|
||||
ClasspathDependency(resolveRef(d.project), d.configuration)
|
||||
copy2(aggregate = resolveRefs(aggregate), dependencies = resolveDeps(dependencies))
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies the given functions to this Project.
|
||||
* The second function is applied to the result of applying the first to this Project and so on.
|
||||
* The intended use is a convenience for applying default configuration provided by a plugin.
|
||||
*/
|
||||
def configure(transforms: (Project => Project)*): Project = Function.chain(transforms)(this)
|
||||
|
||||
def withId(id: String) = copy(id = id)
|
||||
|
||||
/** Sets the base directory for this project. */
|
||||
def in(dir: File): Project = copy(base = dir)
|
||||
|
||||
/** Adds configurations to this project. Added configurations replace existing configurations with the same name. */
|
||||
def overrideConfigs(cs: Configuration*): Project =
|
||||
copy(configurations = Defaults.overrideConfigs(cs: _*)(configurations))
|
||||
|
||||
/**
|
||||
* Adds configuration at the *start* of the configuration list for this project. Previous configurations replace this prefix
|
||||
* list with the same name.
|
||||
*/
|
||||
private[sbt] def prefixConfigs(cs: Configuration*): Project =
|
||||
copy(configurations = Defaults.overrideConfigs(configurations: _*)(cs))
|
||||
|
||||
/** Adds new configurations directly to this project. To override an existing configuration, use `overrideConfigs`. */
|
||||
def configs(cs: Configuration*): Project = copy(configurations = configurations ++ cs)
|
||||
|
||||
/** Adds classpath dependencies on internal or external projects. */
|
||||
def dependsOn(deps: ClasspathDep[ProjectReference]*): Project =
|
||||
copy(dependencies = dependencies ++ deps)
|
||||
|
||||
/**
|
||||
* Adds projects to be aggregated. When a user requests a task to run on this project from the command line,
|
||||
* the task will also be run in aggregated projects.
|
||||
*/
|
||||
def aggregate(refs: ProjectReference*): Project =
|
||||
copy(aggregate = (aggregate: Seq[ProjectReference]) ++ refs)
|
||||
|
||||
/** Appends settings to the current settings sequence for this project. */
|
||||
def settings(ss: Def.SettingsDefinition*): Project =
|
||||
copy(settings = (settings: Seq[Def.Setting[_]]) ++ Def.settings(ss: _*))
|
||||
|
||||
/**
|
||||
* Sets the [[AutoPlugin]]s of this project.
|
||||
* A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to enable on a project.
|
||||
*/
|
||||
def enablePlugins(ns: Plugins*): Project = setPlugins(ns.foldLeft(plugins)(Plugins.and))
|
||||
|
||||
/** Disable the given plugins on this project. */
|
||||
def disablePlugins(ps: AutoPlugin*): Project =
|
||||
setPlugins(Plugins.and(plugins, Plugins.And(ps.map(p => Plugins.Exclude(p)).toList)))
|
||||
|
||||
private[sbt] def setPlugins(ns: Plugins): Project = copy2(plugins = ns)
|
||||
|
||||
/** Definitively set the [[AutoPlugin]]s for this project. */
|
||||
private[sbt] def setAutoPlugins(autos: Seq[AutoPlugin]): Project = copy2(autoPlugins = autos)
|
||||
|
||||
/** Definitively set the [[ProjectOrigin]] for this project. */
|
||||
private[sbt] def setProjectOrigin(origin: ProjectOrigin): Project = copy2(projectOrigin = origin)
|
||||
}
|
||||
|
||||
sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] {
|
||||
|
||||
/** The [[AutoPlugin]]s enabled for this project as computed from [[plugins]]. */
|
||||
def autoPlugins: Seq[AutoPlugin]
|
||||
|
||||
}
|
||||
|
||||
sealed trait ClasspathDep[PR <: ProjectReference] {
|
||||
def project: PR; def configuration: Option[String]
|
||||
}
|
||||
|
||||
final case class ResolvedClasspathDependency(project: ProjectRef, configuration: Option[String])
|
||||
extends ClasspathDep[ProjectRef]
|
||||
|
||||
final case class ClasspathDependency(project: ProjectReference, configuration: Option[String])
|
||||
extends ClasspathDep[ProjectReference]
|
||||
|
||||
object Project extends ProjectExtra {
|
||||
|
||||
private abstract class ProjectDef[PR <: ProjectReference](
|
||||
val id: String,
|
||||
val base: File,
|
||||
val aggregate: Seq[PR],
|
||||
val dependencies: Seq[ClasspathDep[PR]],
|
||||
val settings: Seq[Def.Setting[_]],
|
||||
val configurations: Seq[Configuration],
|
||||
val plugins: Plugins,
|
||||
val autoPlugins: Seq[AutoPlugin],
|
||||
val projectOrigin: ProjectOrigin
|
||||
) extends ProjectDefinition[PR] {
|
||||
// checks for cyclic references here instead of having to do it in Scope.delegates
|
||||
Dag.topologicalSort(configurations)(_.extendsConfigs)
|
||||
}
|
||||
|
||||
def apply(id: String, base: File): Project =
|
||||
unresolved(id, base, Nil, Nil, Nil, Nil, Plugins.empty, Nil, ProjectOrigin.Organic)
|
||||
|
||||
def showContextKey(state: State): Show[ScopedKey[_]] =
|
||||
showContextKey(state, None)
|
||||
|
||||
def showContextKey(state: State, keyNameColor: Option[String]): Show[ScopedKey[_]] =
|
||||
if (isProjectLoaded(state)) showContextKey2(session(state), keyNameColor)
|
||||
else Def.showFullKey
|
||||
|
||||
@deprecated("Use showContextKey2 which doesn't take the unused structure param", "1.1.1")
|
||||
def showContextKey(
|
||||
session: SessionSettings,
|
||||
structure: BuildStructure,
|
||||
keyNameColor: Option[String] = None
|
||||
): Show[ScopedKey[_]] =
|
||||
showContextKey2(session, keyNameColor)
|
||||
|
||||
def showContextKey2(
|
||||
session: SessionSettings,
|
||||
keyNameColor: Option[String] = None
|
||||
): Show[ScopedKey[_]] =
|
||||
Def.showRelativeKey2(session.current, keyNameColor)
|
||||
|
||||
def showLoadingKey(
|
||||
loaded: LoadedBuild,
|
||||
keyNameColor: Option[String] = None
|
||||
): Show[ScopedKey[_]] =
|
||||
Def.showRelativeKey2(
|
||||
ProjectRef(loaded.root, loaded.units(loaded.root).rootProjects.head),
|
||||
keyNameColor
|
||||
)
|
||||
|
||||
/** This is a variation of def apply that mixes in GeneratedRootProject. */
|
||||
private[sbt] def mkGeneratedRoot(
|
||||
id: String,
|
||||
base: File,
|
||||
aggregate: Seq[ProjectReference]
|
||||
): Project = {
|
||||
validProjectID(id).foreach(errMsg => sys.error(s"Invalid project ID: $errMsg"))
|
||||
val plugins = Plugins.empty
|
||||
val origin = ProjectOrigin.GenericRoot
|
||||
new ProjectDef(id, base, aggregate, Nil, Nil, Nil, plugins, Nil, origin)
|
||||
with Project
|
||||
with GeneratedRootProject
|
||||
}
|
||||
|
||||
/** Returns None if `id` is a valid Project ID or Some containing the parser error message if it is not. */
|
||||
def validProjectID(id: String): Option[String] =
|
||||
DefaultParsers.parse(id, DefaultParsers.ID).left.toOption
|
||||
|
||||
private[this] def validProjectIDStart(id: String): Boolean =
|
||||
DefaultParsers.parse(id, DefaultParsers.IDStart).isRight
|
||||
|
||||
/** Constructs a valid Project ID based on `id` and returns it in Right or returns the error message in Left if one cannot be constructed. */
|
||||
def normalizeProjectID(id: String): Either[String, String] = {
|
||||
val attempt = normalizeBase(id)
|
||||
val refined =
|
||||
if (attempt.length < 1) "root"
|
||||
else if (!validProjectIDStart(attempt.substring(0, 1))) "root-" + attempt
|
||||
else attempt
|
||||
validProjectID(refined).toLeft(refined)
|
||||
}
|
||||
private[this] def normalizeBase(s: String) =
|
||||
s.toLowerCase(Locale.ENGLISH).replaceAll("""\W+""", "-")
|
||||
|
||||
/**
|
||||
* Normalize a String so that it is suitable for use as a dependency management module identifier.
|
||||
* This is a best effort implementation, since valid characters are not documented or consistent.
|
||||
*/
|
||||
def normalizeModuleID(id: String): String = normalizeBase(id)
|
||||
|
||||
private def resolved(
|
||||
id: String,
|
||||
base: File,
|
||||
aggregate: Seq[ProjectRef],
|
||||
dependencies: Seq[ClasspathDep[ProjectRef]],
|
||||
settings: Seq[Def.Setting[_]],
|
||||
configurations: Seq[Configuration],
|
||||
plugins: Plugins,
|
||||
autoPlugins: Seq[AutoPlugin],
|
||||
origin: ProjectOrigin
|
||||
): ResolvedProject =
|
||||
new ProjectDef[ProjectRef](
|
||||
id,
|
||||
base,
|
||||
aggregate,
|
||||
dependencies,
|
||||
settings,
|
||||
configurations,
|
||||
plugins,
|
||||
autoPlugins,
|
||||
origin
|
||||
) with ResolvedProject
|
||||
|
||||
private def unresolved(
|
||||
id: String,
|
||||
base: File,
|
||||
aggregate: Seq[ProjectReference],
|
||||
dependencies: Seq[ClasspathDep[ProjectReference]],
|
||||
settings: Seq[Def.Setting[_]],
|
||||
configurations: Seq[Configuration],
|
||||
plugins: Plugins,
|
||||
autoPlugins: Seq[AutoPlugin],
|
||||
origin: ProjectOrigin
|
||||
): Project = {
|
||||
validProjectID(id).foreach(errMsg => sys.error("Invalid project ID: " + errMsg))
|
||||
new ProjectDef[ProjectReference](
|
||||
id,
|
||||
base,
|
||||
aggregate,
|
||||
dependencies,
|
||||
settings,
|
||||
configurations,
|
||||
plugins,
|
||||
autoPlugins,
|
||||
origin
|
||||
) with Project
|
||||
}
|
||||
|
||||
final class Constructor(p: ProjectReference) {
|
||||
def %(conf: Configuration): ClasspathDependency = %(conf.name)
|
||||
|
||||
def %(conf: String): ClasspathDependency = ClasspathDependency(p, Some(conf))
|
||||
}
|
||||
|
||||
def getOrError[T](state: State, key: AttributeKey[T], msg: String): T =
|
||||
state get key getOrElse sys.error(msg)
|
||||
|
||||
def structure(state: State): BuildStructure =
|
||||
getOrError(state, stateBuildStructure, "No build loaded.")
|
||||
|
||||
def session(state: State): SessionSettings =
|
||||
getOrError(state, sessionSettings, "Session not initialized.")
|
||||
|
||||
def isProjectLoaded(state: State): Boolean =
|
||||
(state has sessionSettings) && (state has stateBuildStructure)
|
||||
|
||||
def extract(state: State): Extracted = extract(session(state), structure(state))
|
||||
|
||||
private[sbt] def extract(se: SessionSettings, st: BuildStructure): Extracted =
|
||||
Extracted(st, se, se.current)(showContextKey2(se))
|
||||
|
||||
def getProjectForReference(ref: Reference, structure: BuildStructure): Option[ResolvedProject] =
|
||||
ref match { case pr: ProjectRef => getProject(pr, structure); case _ => None }
|
||||
|
||||
def getProject(ref: ProjectRef, structure: BuildStructure): Option[ResolvedProject] =
|
||||
getProject(ref, structure.units)
|
||||
|
||||
def getProject(ref: ProjectRef, structure: LoadedBuild): Option[ResolvedProject] =
|
||||
getProject(ref, structure.units)
|
||||
|
||||
def getProject(ref: ProjectRef, units: Map[URI, LoadedBuildUnit]): Option[ResolvedProject] =
|
||||
(units get ref.build).flatMap(_.defined get ref.project)
|
||||
|
||||
def runUnloadHooks(s: State): State = {
|
||||
val previousOnUnload = orIdentity(s get Keys.onUnload.key)
|
||||
previousOnUnload(s.runExitHooks())
|
||||
}
|
||||
|
||||
def setProject(session: SessionSettings, structure: BuildStructure, s: State): State =
|
||||
setProject(session, structure, s, identity)
|
||||
|
||||
def setProject(
|
||||
session: SessionSettings,
|
||||
structure: BuildStructure,
|
||||
s: State,
|
||||
preOnLoad: State => State
|
||||
): State = {
|
||||
val unloaded = runUnloadHooks(s)
|
||||
val (onLoad, onUnload) = getHooks(structure.data)
|
||||
val newAttrs = unloaded.attributes
|
||||
.put(stateBuildStructure, structure)
|
||||
.put(sessionSettings, session)
|
||||
.put(Keys.onUnload.key, onUnload)
|
||||
val newState = unloaded.copy(attributes = newAttrs)
|
||||
// TODO: Fix this
|
||||
onLoad(
|
||||
preOnLoad(
|
||||
updateCurrent(newState)
|
||||
) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/
|
||||
)
|
||||
}
|
||||
|
||||
def orIdentity[T](opt: Option[T => T]): T => T = opt getOrElse idFun
|
||||
|
||||
def getHook[T](key: SettingKey[T => T], data: Settings[Scope]): T => T =
|
||||
orIdentity((Global / key) get data)
|
||||
|
||||
def getHooks(data: Settings[Scope]): (State => State, State => State) =
|
||||
(getHook(Keys.onLoad, data), getHook(Keys.onUnload, data))
|
||||
|
||||
def current(state: State): ProjectRef = session(state).current
|
||||
|
||||
def updateCurrent(s: State): State = {
|
||||
val structure = Project.structure(s)
|
||||
val ref = Project.current(s)
|
||||
Load.getProject(structure.units, ref.build, ref.project)
|
||||
val msg = (ref / Keys.onLoadMessage) get structure.data getOrElse ""
|
||||
if (!msg.isEmpty) s.log.info(msg)
|
||||
def get[T](k: SettingKey[T]): Option[T] = (ref / k) get structure.data
|
||||
def commandsIn(axis: ResolvedReference) = (axis / commands) get structure.data toList
|
||||
|
||||
val allCommands = commandsIn(ref) ++ commandsIn(
|
||||
BuildRef(ref.build)
|
||||
) ++ ((Global / commands) get structure.data toList)
|
||||
val history = get(historyPath) flatMap idFun
|
||||
val prompt = get(shellPrompt)
|
||||
val newPrompt = get(colorShellPrompt)
|
||||
val trs = ((Global / templateResolverInfos) get structure.data).toList.flatten
|
||||
val startSvr: Option[Boolean] = get(autoStartServer)
|
||||
val host: Option[String] = get(serverHost)
|
||||
val port: Option[Int] = get(serverPort)
|
||||
val enabledBsp: Option[Boolean] = get(bspEnabled)
|
||||
val timeout: Option[Option[FiniteDuration]] = get(serverIdleTimeout)
|
||||
val authentication: Option[Set[ServerAuthentication]] = get(serverAuthentication)
|
||||
val connectionType: Option[ConnectionType] = get(serverConnectionType)
|
||||
val srvLogLevel: Option[Level.Value] = (ref / serverLog / logLevel).get(structure.data)
|
||||
val hs: Option[Seq[ServerHandler]] = get(ThisBuild / fullServerHandlers)
|
||||
val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true))
|
||||
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(
|
||||
s.definedCommands,
|
||||
projectCommand
|
||||
)
|
||||
val winSecurityLevel = get(windowsServerSecurityLevel).getOrElse(2)
|
||||
val useJni = get(serverUseJni).getOrElse(false)
|
||||
val newAttrs =
|
||||
s.attributes
|
||||
.put(historyPath.key, history)
|
||||
.put(windowsServerSecurityLevel.key, winSecurityLevel)
|
||||
.put(serverUseJni.key, useJni)
|
||||
.setCond(bspEnabled.key, enabledBsp)
|
||||
.setCond(autoStartServer.key, startSvr)
|
||||
.setCond(serverPort.key, port)
|
||||
.setCond(serverHost.key, host)
|
||||
.setCond(serverAuthentication.key, authentication)
|
||||
.setCond(serverConnectionType.key, connectionType)
|
||||
.setCond(serverIdleTimeout.key, timeout)
|
||||
.put(historyPath.key, history)
|
||||
.put(templateResolverInfos.key, trs)
|
||||
.setCond(shellPrompt.key, prompt)
|
||||
.setCond(colorShellPrompt.key, newPrompt)
|
||||
.setCond(serverLogLevel, srvLogLevel)
|
||||
.setCond(fullServerHandlers.key, hs)
|
||||
s.copy(
|
||||
attributes = newAttrs,
|
||||
definedCommands = newDefinedCommands
|
||||
)
|
||||
}
|
||||
|
||||
def setCond[T](key: AttributeKey[T], vopt: Option[T], attributes: AttributeMap): AttributeMap =
|
||||
attributes.setCond(key, vopt)
|
||||
|
||||
private[sbt] def checkTargets(data: Settings[Scope]): Option[String] = {
|
||||
val dups = overlappingTargets(allTargets(data))
|
||||
if (dups.isEmpty) None
|
||||
else {
|
||||
val dupStrs = dups map { case (dir, scopes) =>
|
||||
s"${dir.getAbsolutePath}:\n\t${scopes.mkString("\n\t")}"
|
||||
}
|
||||
Some(s"Overlapping output directories:${dupStrs.mkString}")
|
||||
}
|
||||
}
|
||||
private[this] def overlappingTargets(
|
||||
targets: Seq[(ProjectRef, File)]
|
||||
): Map[File, Seq[ProjectRef]] =
|
||||
targets.groupBy(_._2).filter(_._2.size > 1).mapValues(_.map(_._1)).toMap
|
||||
|
||||
private[this] def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = {
|
||||
import ScopeFilter._
|
||||
val allProjects = ScopeFilter(Make.inAnyProject)
|
||||
val targetAndRef = Def.setting { (Keys.thisProjectRef.value, Keys.target.value) }
|
||||
new SettingKeyAll(Def.optional(targetAndRef)(idFun))
|
||||
.all(allProjects)
|
||||
.evaluate(data)
|
||||
.flatMap(x => x)
|
||||
}
|
||||
|
||||
def equal(a: ScopedKey[_], b: ScopedKey[_], mask: ScopeMask): Boolean =
|
||||
a.key == b.key && Scope.equal(a.scope, b.scope, mask)
|
||||
|
||||
def fillTaskAxis(scoped: ScopedKey[_]): ScopedKey[_] =
|
||||
ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key)
|
||||
|
||||
def mapScope(f: Scope => Scope): [a] => ScopedKey[a] => ScopedKey[a] =
|
||||
[a] => (k: ScopedKey[a]) => ScopedKey(f(k.scope), k.key)
|
||||
|
||||
def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] =
|
||||
val f = mapScope(g)
|
||||
ss.map { setting =>
|
||||
setting.mapKey(f).mapReferenced(f)
|
||||
}
|
||||
|
||||
def transformRef(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] = {
|
||||
val f = mapScope(g)
|
||||
ss.map(_ mapReferenced f)
|
||||
}
|
||||
|
||||
def delegates(structure: BuildStructure, scope: Scope, key: AttributeKey[_]): Seq[ScopedKey[_]] =
|
||||
structure.delegates(scope).map(d => ScopedKey(d, key))
|
||||
|
||||
def scopedKeyData(
|
||||
structure: BuildStructure,
|
||||
scope: Scope,
|
||||
key: AttributeKey[_]
|
||||
): Option[ScopedKeyData[_]] =
|
||||
structure.data.get(scope, key) map { v =>
|
||||
ScopedKeyData(ScopedKey(scope, key), v)
|
||||
}
|
||||
|
||||
def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[_])(
|
||||
implicit display: Show[ScopedKey[_]]
|
||||
): String = {
|
||||
val scoped = ScopedKey(scope, key)
|
||||
|
||||
val data = scopedKeyData(structure, scope, key) map { _.description } getOrElse {
|
||||
"No entry for key."
|
||||
}
|
||||
val description = key.description match {
|
||||
case Some(desc) => "Description:\n\t" + desc + "\n"; case None => ""
|
||||
}
|
||||
|
||||
val definingScope = structure.data.definingScope(scope, key)
|
||||
val providedBy = definingScope match {
|
||||
case Some(sc) => "Provided by:\n\t" + Scope.display(sc, key.label) + "\n"
|
||||
case None => ""
|
||||
}
|
||||
val definingScoped = definingScope match {
|
||||
case Some(sc) => ScopedKey(sc, key); case None => scoped
|
||||
}
|
||||
val comp =
|
||||
Def.compiled(structure.settings, actual)(structure.delegates, structure.scopeLocal, display)
|
||||
val definedAt = comp get definingScoped map { c =>
|
||||
Def.definedAtString(c.settings).capitalize
|
||||
} getOrElse ""
|
||||
|
||||
val cMap = Def.flattenLocals(comp)
|
||||
val related = cMap.keys.filter(k => k.key == key && k.scope != scope)
|
||||
def derivedDependencies(c: ScopedKey[_]): List[ScopedKey[_]] =
|
||||
comp
|
||||
.get(c)
|
||||
.map(_.settings.flatMap(s => if (s.isDerived) s.dependencies else Nil))
|
||||
.toList
|
||||
.flatten
|
||||
|
||||
val depends = cMap.get(scoped) match {
|
||||
case Some(c) => c.dependencies.toSet; case None => Set.empty
|
||||
}
|
||||
val derivedDepends: Set[ScopedKey[_]] = derivedDependencies(definingScoped).toSet
|
||||
|
||||
val reverse = reverseDependencies(cMap, scoped)
|
||||
val derivedReverse = reverse.filter(r => derivedDependencies(r).contains(definingScoped)).toSet
|
||||
|
||||
def printDepScopes(
|
||||
baseLabel: String,
|
||||
derivedLabel: String,
|
||||
scopes: Iterable[ScopedKey[_]],
|
||||
derived: Set[ScopedKey[_]]
|
||||
): String = {
|
||||
val label = s"$baseLabel${if (derived.isEmpty) "" else s" (D=$derivedLabel)"}"
|
||||
val prefix: ScopedKey[_] => String =
|
||||
if (derived.isEmpty) const("") else sk => if (derived(sk)) "D " else " "
|
||||
printScopes(label, scopes, prefix = prefix)
|
||||
}
|
||||
|
||||
def printScopes(
|
||||
label: String,
|
||||
scopes: Iterable[ScopedKey[_]],
|
||||
max: Int = Int.MaxValue,
|
||||
prefix: ScopedKey[_] => String = const("")
|
||||
) =
|
||||
if (scopes.isEmpty) ""
|
||||
else {
|
||||
val (limited, more) =
|
||||
if (scopes.size <= max) (scopes, "\n") else (scopes.take(max), "\n...\n")
|
||||
limited.map(sk => prefix(sk) + display.show(sk)).mkString(label + ":\n\t", "\n\t", more)
|
||||
}
|
||||
|
||||
data + "\n" +
|
||||
description +
|
||||
providedBy +
|
||||
definedAt +
|
||||
printDepScopes("Dependencies", "derived from", depends, derivedDepends) +
|
||||
printDepScopes("Reverse dependencies", "derives", reverse, derivedReverse) +
|
||||
printScopes("Delegates", delegates(structure, scope, key)) +
|
||||
printScopes("Related", related, 10)
|
||||
}
|
||||
def settingGraph(structure: BuildStructure, basedir: File, scoped: ScopedKey[_])(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): SettingGraph =
|
||||
SettingGraph(structure, basedir, scoped, 0)
|
||||
def graphSettings(structure: BuildStructure, basedir: File)(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): Unit = {
|
||||
def graph(actual: Boolean, name: String) =
|
||||
graphSettings(structure, actual, name, new File(basedir, name + ".dot"))
|
||||
graph(true, "actual_dependencies")
|
||||
graph(false, "declared_dependencies")
|
||||
}
|
||||
def graphSettings(structure: BuildStructure, actual: Boolean, graphName: String, file: File)(
|
||||
implicit display: Show[ScopedKey[_]]
|
||||
): Unit = {
|
||||
val rel = relation(structure, actual)
|
||||
val keyToString = display.show _
|
||||
DotGraph.generateGraph(file, graphName, rel, keyToString, keyToString)
|
||||
}
|
||||
def relation(structure: BuildStructure, actual: Boolean)(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): Relation[ScopedKey[_], ScopedKey[_]] =
|
||||
relation(structure.settings, actual)(structure.delegates, structure.scopeLocal, display)
|
||||
|
||||
private[sbt] def relation(settings: Seq[Def.Setting[_]], actual: Boolean)(implicit
|
||||
delegates: Scope => Seq[Scope],
|
||||
scopeLocal: Def.ScopeLocal,
|
||||
display: Show[ScopedKey[_]]
|
||||
): Relation[ScopedKey[_], ScopedKey[_]] = {
|
||||
val cMap = Def.flattenLocals(Def.compiled(settings, actual))
|
||||
val emptyRelation = Relation.empty[ScopedKey[_], ScopedKey[_]]
|
||||
cMap.foldLeft(emptyRelation) { case (r, (key, value)) => r + (key, value.dependencies) }
|
||||
}
|
||||
|
||||
def showDefinitions(key: AttributeKey[_], defs: Seq[Scope])(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): String =
|
||||
showKeys(defs.map(scope => ScopedKey(scope, key)))
|
||||
|
||||
def showUses(defs: Seq[ScopedKey[_]])(implicit display: Show[ScopedKey[_]]): String =
|
||||
showKeys(defs)
|
||||
|
||||
private[this] def showKeys(s: Seq[ScopedKey[_]])(implicit display: Show[ScopedKey[_]]): String =
|
||||
s.map(display.show).sorted.mkString("\n\t", "\n\t", "\n\n")
|
||||
|
||||
def definitions(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): Seq[Scope] =
|
||||
relation(structure, actual)(display)._1s.toSeq flatMap { sk =>
|
||||
if (sk.key == key) sk.scope :: Nil else Nil
|
||||
}
|
||||
def usedBy(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): Seq[ScopedKey[_]] =
|
||||
relation(structure, actual)(display).all.toSeq flatMap { case (a, b) =>
|
||||
if (b.key == key) List[ScopedKey[_]](a) else Nil
|
||||
}
|
||||
def reverseDependencies(
|
||||
cMap: Map[ScopedKey[_], Flattened],
|
||||
scoped: ScopedKey[_]
|
||||
): Iterable[ScopedKey[_]] =
|
||||
for ((key, compiled) <- cMap; dep <- compiled.dependencies if dep == scoped) yield key
|
||||
|
||||
def setAll(extracted: Extracted, settings: Seq[Def.Setting[_]]): SessionSettings =
|
||||
SettingCompletions.setAll(extracted, settings).session
|
||||
|
||||
val ExtraBuilds = AttributeKey[List[URI]](
|
||||
"extra-builds",
|
||||
"Extra build URIs to load in addition to the ones defined by the project."
|
||||
)
|
||||
def extraBuilds(s: State): List[URI] = getOrNil(s, ExtraBuilds)
|
||||
def getOrNil[T](s: State, key: AttributeKey[List[T]]): List[T] = s get key getOrElse Nil
|
||||
def setExtraBuilds(s: State, extra: List[URI]): State = s.put(ExtraBuilds, extra)
|
||||
def addExtraBuilds(s: State, extra: List[URI]): State =
|
||||
setExtraBuilds(s, extra ::: extraBuilds(s))
|
||||
def removeExtraBuilds(s: State, remove: List[URI]): State =
|
||||
updateExtraBuilds(s, _.filterNot(remove.toSet))
|
||||
def updateExtraBuilds(s: State, f: List[URI] => List[URI]): State =
|
||||
setExtraBuilds(s, f(extraBuilds(s)))
|
||||
|
||||
// used by Coursier integration
|
||||
private[sbt] def transitiveInterDependencies(
|
||||
state: State,
|
||||
projectRef: ProjectRef
|
||||
): Seq[ProjectRef] = {
|
||||
def dependencies(map: Map[ProjectRef, Seq[ProjectRef]], id: ProjectRef): Set[ProjectRef] = {
|
||||
def helper(map: Map[ProjectRef, Seq[ProjectRef]], acc: Set[ProjectRef]): Set[ProjectRef] =
|
||||
if (acc.exists(map.contains)) {
|
||||
val (kept, rem) = map.partition { case (k, _) => acc(k) }
|
||||
helper(rem, acc ++ kept.valuesIterator.flatten)
|
||||
} else acc
|
||||
helper(map - id, map.getOrElse(id, Nil).toSet)
|
||||
}
|
||||
val allProjectsDeps: Map[ProjectRef, Seq[ProjectRef]] =
|
||||
(for {
|
||||
(p, ref) <- Project.structure(state).allProjectPairs
|
||||
} yield ref -> p.dependencies.map(_.project)).toMap
|
||||
val deps = dependencies(allProjectsDeps.toMap, projectRef)
|
||||
Project.structure(state).allProjectRefs.filter(p => deps(p))
|
||||
}
|
||||
|
||||
object LoadAction extends Enumeration {
|
||||
val Return, Current, Plugins = Value
|
||||
}
|
||||
import LoadAction._
|
||||
import DefaultParsers._
|
||||
|
||||
val loadActionParser = token(Space ~> ("plugins" ^^^ Plugins | "return" ^^^ Return)) ?? Current
|
||||
|
||||
val ProjectReturn =
|
||||
AttributeKey[List[File]]("project-return", "Maintains a stack of builds visited using reload.")
|
||||
def projectReturn(s: State): List[File] = getOrNil(s, ProjectReturn)
|
||||
def inPluginProject(s: State): Boolean = projectReturn(s).length > 1
|
||||
def setProjectReturn(s: State, pr: List[File]): State =
|
||||
s.copy(attributes = s.attributes.put(ProjectReturn, pr))
|
||||
|
||||
def loadAction(s: State, action: LoadAction.Value): (State, File) = action match {
|
||||
case Return =>
|
||||
projectReturn(s) match {
|
||||
case _ /* current */ :: returnTo :: rest =>
|
||||
(setProjectReturn(s, returnTo :: rest), returnTo)
|
||||
case _ => sys.error("Not currently in a plugin definition")
|
||||
}
|
||||
|
||||
case Current =>
|
||||
val base = s.configuration.baseDirectory
|
||||
projectReturn(s) match {
|
||||
case Nil => (setProjectReturn(s, base :: Nil), base); case x :: _ => (s, x)
|
||||
}
|
||||
|
||||
case Plugins =>
|
||||
val (newBase, oldStack) =
|
||||
if (Project.isProjectLoaded(s))
|
||||
(Project.extract(s).currentUnit.unit.plugins.base, projectReturn(s))
|
||||
else // support changing to the definition project if it fails to load
|
||||
(BuildPaths.projectStandard(s.baseDir), s.baseDir :: Nil)
|
||||
val newS = setProjectReturn(s, newBase :: oldStack)
|
||||
(newS, newBase)
|
||||
}
|
||||
|
||||
def runTask[T](
|
||||
taskKey: ScopedKey[Task[T]],
|
||||
state: State,
|
||||
checkCycles: Boolean = false
|
||||
): Option[(State, Result[T])] = {
|
||||
val extracted = Project.extract(state)
|
||||
val ch = EvaluateTask.cancelStrategy(extracted, extracted.structure, state)
|
||||
val p = EvaluateTask.executeProgress(extracted, extracted.structure, state)
|
||||
val r = EvaluateTask.restrictions(state)
|
||||
val fgc = EvaluateTask.forcegc(extracted, extracted.structure)
|
||||
val mfi = EvaluateTask.minForcegcInterval(extracted, extracted.structure)
|
||||
runTask(taskKey, state, EvaluateTaskConfig(r, checkCycles, p, ch, fgc, mfi))
|
||||
}
|
||||
|
||||
def runTask[T](
|
||||
taskKey: ScopedKey[Task[T]],
|
||||
state: State,
|
||||
config: EvaluateTaskConfig
|
||||
): Option[(State, Result[T])] = {
|
||||
val extracted = Project.extract(state)
|
||||
EvaluateTask(extracted.structure, taskKey, state, extracted.currentRef, config)
|
||||
}
|
||||
|
||||
def projectToRef(p: Project): ProjectReference = LocalProject(p.id)
|
||||
|
||||
implicit def projectToLocalProject(p: Project): LocalProject = LocalProject(p.id)
|
||||
|
||||
final class RichTaskSessionVar[S](i: Def.Initialize[Task[S]]) {
|
||||
import SessionVar.{ persistAndSet, resolveContext, set, transform => tx }
|
||||
|
||||
def updateState(f: (State, S) => State): Def.Initialize[Task[S]] = i(t => tx(t, f))
|
||||
|
||||
def storeAs(key: TaskKey[S])(implicit f: JsonFormat[S]): Def.Initialize[Task[S]] = {
|
||||
Keys.resolvedScoped.zipWith(i) { (scoped, task) =>
|
||||
tx(
|
||||
task,
|
||||
(state, value) => persistAndSet(resolveContext(key, scoped.scope, state), state, value)(f)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def keepAs(key: TaskKey[S]): Def.Initialize[Task[S]] = {
|
||||
i.zipWith(Keys.resolvedScoped) { (t, scoped) =>
|
||||
tx(t, (state, value) => set(resolveContext(key, scoped.scope, state), state, value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* implicitly injected to tasks that return PromiseWrap.
|
||||
*/
|
||||
final class RichTaskPromise[A](i: Def.Initialize[Task[PromiseWrap[A]]]) {
|
||||
import scala.concurrent.Await
|
||||
import scala.concurrent.duration._
|
||||
|
||||
def await: Def.Initialize[Task[A]] = await(Duration.Inf)
|
||||
|
||||
def await(atMost: Duration): Def.Initialize[Task[A]] =
|
||||
(Def
|
||||
.task {
|
||||
val p = i.value
|
||||
var result: Option[A] = None
|
||||
if (atMost == Duration.Inf) {
|
||||
while (result.isEmpty) {
|
||||
try {
|
||||
result = Some(Await.result(p.underlying.future, Duration("1s")))
|
||||
Thread.sleep(10)
|
||||
} catch {
|
||||
case _: TimeoutException => ()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result = Some(Await.result(p.underlying.future, atMost))
|
||||
}
|
||||
result.get
|
||||
})
|
||||
.tag(Tags.Sentinel)
|
||||
}
|
||||
|
||||
import scala.reflect.macros._
|
||||
|
||||
def projectMacroImpl(c: blackbox.Context): c.Expr[Project] = {
|
||||
import c.universe._
|
||||
val enclosingValName = std.KeyMacro.definingValName(
|
||||
c,
|
||||
methodName =>
|
||||
s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`"""
|
||||
)
|
||||
val name = c.Expr[String](Literal(Constant(enclosingValName)))
|
||||
reify { Project(name.splice, new File(name.splice)) }
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] trait GeneratedRootProject
|
||||
|
||||
trait ProjectExtra {
|
||||
implicit def configDependencyConstructor[T](
|
||||
p: T
|
||||
)(implicit ev: T => ProjectReference): Constructor =
|
||||
new Constructor(p)
|
||||
|
||||
implicit def classpathDependency[T](
|
||||
p: T
|
||||
)(implicit ev: T => ProjectReference): ClasspathDependency = ClasspathDependency(p, None)
|
||||
|
||||
// These used to be in Project so that they didn't need to get imported (due to Initialize being nested in Project).
|
||||
// Moving Initialize and other settings types to Def and decoupling Project, Def, and Structure means these go here for now
|
||||
implicit def richInitializeTask[T](init: Initialize[Task[T]]): Scoped.RichInitializeTask[T] =
|
||||
new Scoped.RichInitializeTask(init)
|
||||
|
||||
implicit def richInitializeInputTask[T](
|
||||
init: Initialize[InputTask[T]]
|
||||
): Scoped.RichInitializeInputTask[T] =
|
||||
new Scoped.RichInitializeInputTask(init)
|
||||
|
||||
implicit def richInitialize[T](i: Initialize[T]): Scoped.RichInitialize[T] =
|
||||
new Scoped.RichInitialize[T](i)
|
||||
|
||||
implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] =
|
||||
new Project.RichTaskSessionVar(init)
|
||||
|
||||
implicit def sbtRichTaskPromise[A](
|
||||
i: Initialize[Task[PromiseWrap[A]]]
|
||||
): Project.RichTaskPromise[A] =
|
||||
new Project.RichTaskPromise(i)
|
||||
|
||||
def inThisBuild(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
inScope(ThisScope.copy(project = Select(ThisBuild)))(ss)
|
||||
|
||||
def inConfig(conf: Configuration)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
inScope(ThisScope.copy(config = Select(conf)))((configuration :== conf) +: ss)
|
||||
|
||||
def inTask(t: Scoped)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
inScope(ThisScope.copy(task = Select(t.key)))(ss)
|
||||
|
||||
def inScope(scope: Scope)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
Project.transform(Scope.replaceThis(scope), ss)
|
||||
|
||||
private[sbt] def inThisBuild[T](i: Initialize[T]): Initialize[T] =
|
||||
inScope(ThisScope.copy(project = Select(ThisBuild)), i)
|
||||
|
||||
private[sbt] def inConfig[T](conf: Configuration, i: Initialize[T]): Initialize[T] =
|
||||
inScope(ThisScope.copy(config = Select(conf)), i)
|
||||
|
||||
private[sbt] def inTask[T](t: Scoped, i: Initialize[T]): Initialize[T] =
|
||||
inScope(ThisScope.copy(task = Select(t.key)), i)
|
||||
|
||||
private[sbt] def inScope[T](scope: Scope, i: Initialize[T]): Initialize[T] =
|
||||
i mapReferenced Project.mapScope(Scope.replaceThis(scope))
|
||||
|
||||
/**
|
||||
* Creates a new Project. This is a macro that expects to be assigned directly to a val.
|
||||
* The name of the val is used as the project ID and the name of the base directory of the project.
|
||||
*/
|
||||
def project: Project = macro Project.projectMacroImpl
|
||||
}
|
||||
|
|
@ -0,0 +1,764 @@
|
|||
/*
|
||||
* sbt
|
||||
* Copyright 2011 - 2018, Lightbend, Inc.
|
||||
* Copyright 2008 - 2010, Mark Harrah
|
||||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt
|
||||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.util.Locale
|
||||
// import Project._
|
||||
import Keys.{
|
||||
stateBuildStructure,
|
||||
bspEnabled,
|
||||
colorShellPrompt,
|
||||
commands,
|
||||
historyPath,
|
||||
projectCommand,
|
||||
sessionSettings,
|
||||
shellPrompt,
|
||||
templateResolverInfos,
|
||||
autoStartServer,
|
||||
serverHost,
|
||||
serverIdleTimeout,
|
||||
serverLog,
|
||||
serverPort,
|
||||
serverUseJni,
|
||||
serverAuthentication,
|
||||
serverConnectionType,
|
||||
fullServerHandlers,
|
||||
logLevel,
|
||||
windowsServerSecurityLevel,
|
||||
}
|
||||
import Project.LoadAction
|
||||
import Scope.{ Global, ThisScope }
|
||||
import sbt.SlashSyntax0._
|
||||
import Def.{ Flattened, Initialize, ScopedKey, Setting }
|
||||
import sbt.internal.{
|
||||
Load,
|
||||
BuildStructure,
|
||||
LoadedBuild,
|
||||
LoadedBuildUnit,
|
||||
SettingGraph,
|
||||
SettingCompletions,
|
||||
SessionSettings
|
||||
}
|
||||
import sbt.internal.util.{ AttributeKey, AttributeMap, Dag, Relation, Settings, ~> }
|
||||
import sbt.internal.util.Types.const // , idFun }
|
||||
import sbt.internal.util.complete.DefaultParsers
|
||||
import sbt.internal.server.ServerHandler
|
||||
import sbt.librarymanagement.Configuration
|
||||
import sbt.util.{ Show, Level }
|
||||
import sjsonnew.JsonFormat
|
||||
import scala.concurrent.{ Await, TimeoutException }
|
||||
import scala.concurrent.duration.*
|
||||
|
||||
/*
|
||||
sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeProject {
|
||||
def componentProjects: Seq[Project] = this :: Nil
|
||||
|
||||
private[sbt] def copy(
|
||||
id: String = id,
|
||||
base: File = base,
|
||||
aggregate: Seq[ProjectReference] = aggregate,
|
||||
dependencies: Seq[ClasspathDep[ProjectReference]] = dependencies,
|
||||
settings: Seq[Setting[_]] = settings,
|
||||
configurations: Seq[Configuration] = configurations
|
||||
): Project =
|
||||
copy2(id, base, aggregate, dependencies, settings, configurations)
|
||||
|
||||
private[this] def copy2(
|
||||
id: String = id,
|
||||
base: File = base,
|
||||
aggregate: Seq[ProjectReference] = aggregate,
|
||||
dependencies: Seq[ClasspathDep[ProjectReference]] = dependencies,
|
||||
settings: Seq[Setting[_]] = settings,
|
||||
configurations: Seq[Configuration] = configurations,
|
||||
plugins: Plugins = plugins,
|
||||
autoPlugins: Seq[AutoPlugin] = autoPlugins,
|
||||
projectOrigin: ProjectOrigin = projectOrigin,
|
||||
): Project =
|
||||
unresolved(
|
||||
id,
|
||||
base,
|
||||
aggregate = aggregate,
|
||||
dependencies = dependencies,
|
||||
settings = settings,
|
||||
configurations,
|
||||
plugins,
|
||||
autoPlugins,
|
||||
projectOrigin
|
||||
)
|
||||
*/
|
||||
|
||||
/*
|
||||
/** Adds new configurations directly to this project. To override an existing configuration, use `overrideConfigs`. */
|
||||
def configs(cs: Configuration*): Project = copy(configurations = configurations ++ cs)
|
||||
|
||||
/** Adds classpath dependencies on internal or external projects. */
|
||||
def dependsOn(deps: ClasspathDep[ProjectReference]*): Project =
|
||||
copy(dependencies = dependencies ++ deps)
|
||||
|
||||
/**
|
||||
* Adds projects to be aggregated. When a user requests a task to run on this project from the command line,
|
||||
* the task will also be run in aggregated projects.
|
||||
*/
|
||||
def aggregate(refs: ProjectReference*): Project =
|
||||
copy(aggregate = (aggregate: Seq[ProjectReference]) ++ refs)
|
||||
|
||||
/** Appends settings to the current settings sequence for this project. */
|
||||
def settings(ss: Def.SettingsDefinition*): Project =
|
||||
copy(settings = (settings: Seq[Def.Setting[_]]) ++ Def.settings(ss: _*))
|
||||
|
||||
/**
|
||||
* Sets the [[AutoPlugin]]s of this project.
|
||||
* A [[AutoPlugin]] is a common label that is used by plugins to determine what settings, if any, to enable on a project.
|
||||
*/
|
||||
def enablePlugins(ns: Plugins*): Project = setPlugins(ns.foldLeft(plugins)(Plugins.and))
|
||||
|
||||
/** Disable the given plugins on this project. */
|
||||
def disablePlugins(ps: AutoPlugin*): Project =
|
||||
setPlugins(Plugins.and(plugins, Plugins.And(ps.map(p => Plugins.Exclude(p)).toList)))
|
||||
|
||||
private[sbt] def setPlugins(ns: Plugins): Project = copy2(plugins = ns)
|
||||
|
||||
/** Definitively set the [[AutoPlugin]]s for this project. */
|
||||
private[sbt] def setAutoPlugins(autos: Seq[AutoPlugin]): Project = copy2(autoPlugins = autos)
|
||||
|
||||
/** Definitively set the [[ProjectOrigin]] for this project. */
|
||||
private[sbt] def setProjectOrigin(origin: ProjectOrigin): Project = copy2(projectOrigin = origin)
|
||||
}
|
||||
|
||||
sealed trait ResolvedProject extends ProjectDefinition[ProjectRef] {
|
||||
|
||||
/** The [[AutoPlugin]]s enabled for this project as computed from [[plugins]]. */
|
||||
def autoPlugins: Seq[AutoPlugin]
|
||||
|
||||
}
|
||||
*/
|
||||
|
||||
object ProjectExtra extends ProjectExtra:
|
||||
val extraBuildsKey: AttributeKey[List[URI]] = AttributeKey[List[URI]](
|
||||
"extra-builds",
|
||||
"Extra build URIs to load in addition to the ones defined by the project."
|
||||
)
|
||||
val projectReturnKey: AttributeKey[List[File]] =
|
||||
AttributeKey[List[File]]("project-return", "Maintains a stack of builds visited using reload.")
|
||||
|
||||
trait ProjectExtra:
|
||||
import ProjectExtra.projectReturnKey
|
||||
|
||||
def inConfig(conf: Configuration)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
|
||||
Project.inScope(ThisScope.copy(config = Select(conf)))((Keys.configuration :== conf) +: ss)
|
||||
|
||||
extension (self: Project)
|
||||
/** Adds configurations to this project. Added configurations replace existing configurations with the same name. */
|
||||
def overrideConfigs(cs: Configuration*): Project =
|
||||
self.copy(
|
||||
configurations = Defaults.overrideConfigs(cs: _*)(self.configurations),
|
||||
)
|
||||
|
||||
/**
|
||||
* Adds configuration at the *start* of the configuration list for this project. Previous configurations replace this prefix
|
||||
* list with the same name.
|
||||
*/
|
||||
private[sbt] def prefixConfigs(cs: Configuration*): Project =
|
||||
self.copy(
|
||||
configurations = Defaults.overrideConfigs(self.configurations: _*)(cs),
|
||||
)
|
||||
|
||||
extension (m: Project.type)
|
||||
/*
|
||||
|
||||
*/
|
||||
|
||||
/*
|
||||
private abstract class ProjectDef[PR <: ProjectReference](
|
||||
val id: String,
|
||||
val base: File,
|
||||
val aggregate: Seq[PR],
|
||||
val dependencies: Seq[ClasspathDep[PR]],
|
||||
val settings: Seq[Def.Setting[_]],
|
||||
val configurations: Seq[Configuration],
|
||||
val plugins: Plugins,
|
||||
val autoPlugins: Seq[AutoPlugin],
|
||||
val projectOrigin: ProjectOrigin
|
||||
) extends ProjectDefinition[PR] {
|
||||
// checks for cyclic references here instead of having to do it in Scope.delegates
|
||||
Dag.topologicalSort(configurations)(_.extendsConfigs)
|
||||
}
|
||||
*/
|
||||
|
||||
def showContextKey(state: State): Show[ScopedKey[_]] =
|
||||
showContextKey(state, None)
|
||||
|
||||
def showContextKey(state: State, keyNameColor: Option[String]): Show[ScopedKey[_]] =
|
||||
if (isProjectLoaded(state)) showContextKey2(session(state), keyNameColor)
|
||||
else Def.showFullKey
|
||||
|
||||
// @deprecated("Use showContextKey2 which doesn't take the unused structure param", "1.1.1")
|
||||
// def showContextKey(
|
||||
// session: SessionSettings,
|
||||
// structure: BuildStructure,
|
||||
// keyNameColor: Option[String] = None
|
||||
// ): Show[ScopedKey[_]] =
|
||||
// showContextKey2(session, keyNameColor)
|
||||
|
||||
def showContextKey2(
|
||||
session: SessionSettings,
|
||||
keyNameColor: Option[String] = None
|
||||
): Show[ScopedKey[_]] =
|
||||
Def.showRelativeKey2(session.current, keyNameColor)
|
||||
|
||||
def showLoadingKey(
|
||||
loaded: LoadedBuild,
|
||||
keyNameColor: Option[String] = None
|
||||
): Show[ScopedKey[_]] =
|
||||
Def.showRelativeKey2(
|
||||
ProjectRef(loaded.root, loaded.units(loaded.root).rootProjects.head),
|
||||
keyNameColor
|
||||
)
|
||||
|
||||
/*
|
||||
|
||||
|
||||
|
||||
final class Constructor(p: ProjectReference) {
|
||||
def %(conf: Configuration): ClasspathDependency = %(conf.name)
|
||||
|
||||
def %(conf: String): ClasspathDependency = ClasspathDependency(p, Some(conf))
|
||||
}
|
||||
*/
|
||||
|
||||
def getOrError[T](state: State, key: AttributeKey[T], msg: String): T =
|
||||
state.get(key).getOrElse(sys.error(msg))
|
||||
|
||||
def structure(state: State): BuildStructure =
|
||||
Project.getOrError(state, Keys.stateBuildStructure, "No build loaded.")
|
||||
|
||||
def session(state: State): SessionSettings =
|
||||
Project.getOrError(state, Keys.sessionSettings, "Session not initialized.")
|
||||
|
||||
def isProjectLoaded(state: State): Boolean =
|
||||
(state has Keys.sessionSettings) && (state has Keys.stateBuildStructure)
|
||||
|
||||
def extract(state: State): Extracted =
|
||||
Project.extract(Project.session(state), Project.structure(state))
|
||||
|
||||
private[sbt] def extract(se: SessionSettings, st: BuildStructure): Extracted =
|
||||
Extracted(st, se, se.current)(Project.showContextKey2(se))
|
||||
|
||||
def getProjectForReference(ref: Reference, structure: BuildStructure): Option[ResolvedProject] =
|
||||
ref match
|
||||
case pr: ProjectRef => getProject(pr, structure)
|
||||
case _ => None
|
||||
|
||||
def getProject(ref: ProjectRef, structure: BuildStructure): Option[ResolvedProject] =
|
||||
getProject(ref, structure.units)
|
||||
|
||||
def getProject(ref: ProjectRef, structure: LoadedBuild): Option[ResolvedProject] =
|
||||
getProject(ref, structure.units)
|
||||
|
||||
def getProject(ref: ProjectRef, units: Map[URI, LoadedBuildUnit]): Option[ResolvedProject] =
|
||||
(units get ref.build).flatMap(_.defined get ref.project)
|
||||
|
||||
def runUnloadHooks(s: State): State =
|
||||
val previousOnUnload = orIdentity(s get Keys.onUnload.key)
|
||||
previousOnUnload(s.runExitHooks())
|
||||
|
||||
def setProject(session: SessionSettings, structure: BuildStructure, s: State): State =
|
||||
setProject(session, structure, s, identity)
|
||||
|
||||
def setProject(
|
||||
session: SessionSettings,
|
||||
structure: BuildStructure,
|
||||
s: State,
|
||||
preOnLoad: State => State
|
||||
): State = {
|
||||
val unloaded = Project.runUnloadHooks(s)
|
||||
val (onLoad, onUnload) = getHooks(structure.data)
|
||||
val newAttrs = unloaded.attributes
|
||||
.put(stateBuildStructure, structure)
|
||||
.put(sessionSettings, session)
|
||||
.put(Keys.onUnload.key, onUnload)
|
||||
val newState = unloaded.copy(attributes = newAttrs)
|
||||
// TODO: Fix this
|
||||
onLoad(
|
||||
preOnLoad(
|
||||
updateCurrent(newState)
|
||||
) /*LogManager.setGlobalLogLevels(updateCurrent(newState), structure.data)*/
|
||||
)
|
||||
}
|
||||
|
||||
def orIdentity[A](opt: Option[A => A]): A => A =
|
||||
opt.getOrElse(identity)
|
||||
|
||||
def getHook[A](key: SettingKey[A => A], data: Settings[Scope]): A => A =
|
||||
orIdentity((Global / key) get data)
|
||||
|
||||
def getHooks(data: Settings[Scope]): (State => State, State => State) =
|
||||
(getHook(Keys.onLoad, data), getHook(Keys.onUnload, data))
|
||||
|
||||
def current(state: State): ProjectRef = session(state).current
|
||||
|
||||
def updateCurrent(s: State): State = {
|
||||
val structure = Project.structure(s)
|
||||
val ref = Project.current(s)
|
||||
Load.getProject(structure.units, ref.build, ref.project)
|
||||
val msg = (ref / Keys.onLoadMessage) get structure.data getOrElse ""
|
||||
if (!msg.isEmpty) s.log.info(msg)
|
||||
def get[T](k: SettingKey[T]): Option[T] = (ref / k) get structure.data
|
||||
def commandsIn(axis: ResolvedReference) = (axis / commands) get structure.data toList
|
||||
|
||||
val allCommands = commandsIn(ref) ++ commandsIn(
|
||||
BuildRef(ref.build)
|
||||
) ++ ((Global / commands) get structure.data toList)
|
||||
val history = get(historyPath).flatMap(identity)
|
||||
val prompt = get(shellPrompt)
|
||||
val newPrompt = get(colorShellPrompt)
|
||||
val trs = ((Global / templateResolverInfos) get structure.data).toList.flatten
|
||||
val startSvr: Option[Boolean] = get(autoStartServer)
|
||||
val host: Option[String] = get(serverHost)
|
||||
val port: Option[Int] = get(serverPort)
|
||||
val enabledBsp: Option[Boolean] = get(bspEnabled)
|
||||
val timeout: Option[Option[FiniteDuration]] = get(serverIdleTimeout)
|
||||
val authentication: Option[Set[ServerAuthentication]] = get(serverAuthentication)
|
||||
val connectionType: Option[ConnectionType] = get(serverConnectionType)
|
||||
val srvLogLevel: Option[Level.Value] = (ref / serverLog / logLevel).get(structure.data)
|
||||
val hs: Option[Seq[ServerHandler]] = get(ThisBuild / fullServerHandlers)
|
||||
val commandDefs = allCommands.distinct.flatten[Command].map(_ tag (projectCommand, true))
|
||||
val newDefinedCommands = commandDefs ++ BasicCommands.removeTagged(
|
||||
s.definedCommands,
|
||||
projectCommand
|
||||
)
|
||||
val winSecurityLevel = get(windowsServerSecurityLevel).getOrElse(2)
|
||||
val useJni = get(serverUseJni).getOrElse(false)
|
||||
val newAttrs =
|
||||
s.attributes
|
||||
.put(historyPath.key, history)
|
||||
.put(windowsServerSecurityLevel.key, winSecurityLevel)
|
||||
.put(serverUseJni.key, useJni)
|
||||
.setCond(bspEnabled.key, enabledBsp)
|
||||
.setCond(autoStartServer.key, startSvr)
|
||||
.setCond(serverPort.key, port)
|
||||
.setCond(serverHost.key, host)
|
||||
.setCond(serverAuthentication.key, authentication)
|
||||
.setCond(serverConnectionType.key, connectionType)
|
||||
.setCond(serverIdleTimeout.key, timeout)
|
||||
.put(historyPath.key, history)
|
||||
.put(templateResolverInfos.key, trs)
|
||||
.setCond(shellPrompt.key, prompt)
|
||||
.setCond(colorShellPrompt.key, newPrompt)
|
||||
.setCond(BasicKeys.serverLogLevel, srvLogLevel)
|
||||
.setCond(fullServerHandlers.key, hs)
|
||||
s.copy(
|
||||
attributes = newAttrs,
|
||||
definedCommands = newDefinedCommands
|
||||
)
|
||||
}
|
||||
|
||||
def setCond[T](key: AttributeKey[T], vopt: Option[T], attributes: AttributeMap): AttributeMap =
|
||||
attributes.setCond(key, vopt)
|
||||
|
||||
private[sbt] def checkTargets(data: Settings[Scope]): Option[String] =
|
||||
val dups = overlappingTargets(allTargets(data))
|
||||
if (dups.isEmpty) None
|
||||
else {
|
||||
val dupStrs = dups map { case (dir, scopes) =>
|
||||
s"${dir.getAbsolutePath}:\n\t${scopes.mkString("\n\t")}"
|
||||
}
|
||||
Some(s"Overlapping output directories:${dupStrs.mkString}")
|
||||
}
|
||||
|
||||
private[this] def overlappingTargets(
|
||||
targets: Seq[(ProjectRef, File)]
|
||||
): Map[File, Seq[ProjectRef]] =
|
||||
targets.groupBy(_._2).filter(_._2.size > 1).mapValues(_.map(_._1)).toMap
|
||||
|
||||
private[this] def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = {
|
||||
import ScopeFilter._
|
||||
val allProjects = ScopeFilter(Make.inAnyProject)
|
||||
val targetAndRef = Def.setting { (Keys.thisProjectRef.value, Keys.target.value) }
|
||||
new SettingKeyAll(Def.optional(targetAndRef)(identity))
|
||||
.all(allProjects)
|
||||
.evaluate(data)
|
||||
.flatMap(x => x)
|
||||
}
|
||||
|
||||
private[sbt] def equalKeys(a: ScopedKey[_], b: ScopedKey[_], mask: ScopeMask): Boolean =
|
||||
a.key == b.key && Scope.equal(a.scope, b.scope, mask)
|
||||
|
||||
def delegates(
|
||||
structure: BuildStructure,
|
||||
scope: Scope,
|
||||
key: AttributeKey[_]
|
||||
): Seq[ScopedKey[_]] =
|
||||
structure.delegates(scope).map(d => ScopedKey(d, key))
|
||||
|
||||
private[sbt] def scopedKeyData(
|
||||
structure: BuildStructure,
|
||||
scope: Scope,
|
||||
key: AttributeKey[_]
|
||||
): Option[ScopedKeyData[_]] =
|
||||
structure.data.get(scope, key) map { v =>
|
||||
ScopedKeyData(ScopedKey(scope, key), v)
|
||||
}
|
||||
|
||||
def details(structure: BuildStructure, actual: Boolean, scope: Scope, key: AttributeKey[_])(
|
||||
using display: Show[ScopedKey[_]]
|
||||
): String = {
|
||||
val scoped = ScopedKey(scope, key)
|
||||
|
||||
val data = scopedKeyData(structure, scope, key) map { _.description } getOrElse {
|
||||
"No entry for key."
|
||||
}
|
||||
val description = key.description match {
|
||||
case Some(desc) => "Description:\n\t" + desc + "\n"; case None => ""
|
||||
}
|
||||
|
||||
val definingScope = structure.data.definingScope(scope, key)
|
||||
val providedBy = definingScope match {
|
||||
case Some(sc) => "Provided by:\n\t" + Scope.display(sc, key.label) + "\n"
|
||||
case None => ""
|
||||
}
|
||||
val definingScoped = definingScope match {
|
||||
case Some(sc) => ScopedKey(sc, key)
|
||||
case None => scoped
|
||||
}
|
||||
val comp =
|
||||
Def.compiled(structure.settings, actual)(using
|
||||
structure.delegates,
|
||||
structure.scopeLocal,
|
||||
display
|
||||
)
|
||||
val definedAt = comp get definingScoped map { c =>
|
||||
Def.definedAtString(c.settings).capitalize
|
||||
} getOrElse ""
|
||||
|
||||
val cMap = Def.flattenLocals(comp)
|
||||
val related = cMap.keys.filter(k => k.key == key && k.scope != scope)
|
||||
def derivedDependencies(c: ScopedKey[_]): List[ScopedKey[_]] =
|
||||
comp
|
||||
.get(c)
|
||||
.map(_.settings.flatMap(s => if (s.isDerived) s.dependencies else Nil))
|
||||
.toList
|
||||
.flatten
|
||||
|
||||
val depends = cMap.get(scoped) match {
|
||||
case Some(c) => c.dependencies.toSet; case None => Set.empty
|
||||
}
|
||||
val derivedDepends: Set[ScopedKey[_]] = derivedDependencies(definingScoped).toSet
|
||||
|
||||
val reverse = Project.reverseDependencies(cMap, scoped)
|
||||
val derivedReverse =
|
||||
reverse.filter(r => derivedDependencies(r).contains(definingScoped)).toSet
|
||||
|
||||
def printDepScopes(
|
||||
baseLabel: String,
|
||||
derivedLabel: String,
|
||||
scopes: Iterable[ScopedKey[_]],
|
||||
derived: Set[ScopedKey[_]]
|
||||
): String = {
|
||||
val label = s"$baseLabel${if (derived.isEmpty) "" else s" (D=$derivedLabel)"}"
|
||||
val prefix: ScopedKey[_] => String =
|
||||
if (derived.isEmpty) const("") else sk => if (derived(sk)) "D " else " "
|
||||
printScopes(label, scopes, prefix = prefix)
|
||||
}
|
||||
|
||||
def printScopes(
|
||||
label: String,
|
||||
scopes: Iterable[ScopedKey[_]],
|
||||
max: Int = Int.MaxValue,
|
||||
prefix: ScopedKey[_] => String = const("")
|
||||
) =
|
||||
if (scopes.isEmpty) ""
|
||||
else {
|
||||
val (limited, more) =
|
||||
if (scopes.size <= max) (scopes, "\n") else (scopes.take(max), "\n...\n")
|
||||
limited.map(sk => prefix(sk) + display.show(sk)).mkString(label + ":\n\t", "\n\t", more)
|
||||
}
|
||||
|
||||
data + "\n" +
|
||||
description +
|
||||
providedBy +
|
||||
definedAt +
|
||||
printDepScopes("Dependencies", "derived from", depends, derivedDepends) +
|
||||
printDepScopes("Reverse dependencies", "derives", reverse, derivedReverse) +
|
||||
printScopes("Delegates", delegates(structure, scope, key)) +
|
||||
printScopes("Related", related, 10)
|
||||
}
|
||||
|
||||
def settingGraph(structure: BuildStructure, basedir: File, scoped: ScopedKey[_])(using
|
||||
display: Show[ScopedKey[_]]
|
||||
): SettingGraph =
|
||||
SettingGraph(structure, basedir, scoped, 0)
|
||||
|
||||
/*
|
||||
def graphSettings(structure: BuildStructure, basedir: File)(implicit
|
||||
display: Show[ScopedKey[_]]
|
||||
): Unit = {
|
||||
def graph(actual: Boolean, name: String) =
|
||||
graphSettings(structure, actual, name, new File(basedir, name + ".dot"))
|
||||
graph(true, "actual_dependencies")
|
||||
graph(false, "declared_dependencies")
|
||||
}
|
||||
def graphSettings(structure: BuildStructure, actual: Boolean, graphName: String, file: File)(
|
||||
implicit display: Show[ScopedKey[_]]
|
||||
): Unit = {
|
||||
val rel = relation(structure, actual)
|
||||
val keyToString = display.show _
|
||||
DotGraph.generateGraph(file, graphName, rel, keyToString, keyToString)
|
||||
}
|
||||
*/
|
||||
|
||||
def relation(structure: BuildStructure, actual: Boolean)(using
|
||||
display: Show[ScopedKey[_]]
|
||||
): Relation[ScopedKey[_], ScopedKey[_]] =
|
||||
relation(structure.settings, actual)(using
|
||||
structure.delegates,
|
||||
structure.scopeLocal,
|
||||
display,
|
||||
)
|
||||
|
||||
private[sbt] def relation(settings: Seq[Def.Setting[_]], actual: Boolean)(using
|
||||
delegates: Scope => Seq[Scope],
|
||||
scopeLocal: Def.ScopeLocal,
|
||||
display: Show[ScopedKey[_]]
|
||||
): Relation[ScopedKey[_], ScopedKey[_]] =
|
||||
val cMap = Def.flattenLocals(Def.compiled(settings, actual))
|
||||
val emptyRelation = Relation.empty[ScopedKey[_], ScopedKey[_]]
|
||||
cMap.foldLeft(emptyRelation) { case (r, (key, value)) =>
|
||||
r + (key, value.dependencies)
|
||||
}
|
||||
|
||||
private[sbt] def showDefinitions(key: AttributeKey[_], defs: Seq[Scope])(using
|
||||
display: Show[ScopedKey[_]]
|
||||
): String =
|
||||
showKeys(defs.map(scope => ScopedKey(scope, key)))
|
||||
|
||||
private[sbt] def showUses(defs: Seq[ScopedKey[_]])(using display: Show[ScopedKey[_]]): String =
|
||||
showKeys(defs)
|
||||
|
||||
private[this] def showKeys(s: Seq[ScopedKey[_]])(using display: Show[ScopedKey[_]]): String =
|
||||
s.map(display.show).sorted.mkString("\n\t", "\n\t", "\n\n")
|
||||
|
||||
private[sbt] def definitions(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(
|
||||
using display: Show[ScopedKey[_]]
|
||||
): Seq[Scope] =
|
||||
relation(structure, actual)(using display)._1s.toSeq flatMap { sk =>
|
||||
if (sk.key == key) sk.scope :: Nil else Nil
|
||||
}
|
||||
|
||||
private[sbt] def usedBy(structure: BuildStructure, actual: Boolean, key: AttributeKey[_])(using
|
||||
display: Show[ScopedKey[_]]
|
||||
): Seq[ScopedKey[_]] =
|
||||
relation(structure, actual)(using display).all.toSeq flatMap { case (a, b) =>
|
||||
if (b.key == key) List[ScopedKey[_]](a) else Nil
|
||||
}
|
||||
|
||||
def reverseDependencies(
|
||||
cMap: Map[ScopedKey[_], Flattened],
|
||||
scoped: ScopedKey[_]
|
||||
): Iterable[ScopedKey[_]] =
|
||||
for {
|
||||
(key, compiled) <- cMap
|
||||
dep <- compiled.dependencies if dep == scoped
|
||||
} yield key
|
||||
|
||||
/*
|
||||
def setAll(extracted: Extracted, settings: Seq[Def.Setting[_]]): SessionSettings =
|
||||
SettingCompletions.setAll(extracted, settings).session
|
||||
*/
|
||||
|
||||
def extraBuilds(s: State): List[URI] =
|
||||
getOrNil(s, ProjectExtra.extraBuildsKey)
|
||||
def getOrNil[A](s: State, key: AttributeKey[List[A]]): List[A] =
|
||||
s.get(key).getOrElse(Nil)
|
||||
def setExtraBuilds(s: State, extra: List[URI]): State =
|
||||
s.put(ProjectExtra.extraBuildsKey, extra)
|
||||
def addExtraBuilds(s: State, extra: List[URI]): State =
|
||||
setExtraBuilds(s, extra ::: extraBuilds(s))
|
||||
def removeExtraBuilds(s: State, remove: List[URI]): State =
|
||||
updateExtraBuilds(s, _.filterNot(remove.toSet))
|
||||
def updateExtraBuilds(s: State, f: List[URI] => List[URI]): State =
|
||||
setExtraBuilds(s, f(extraBuilds(s)))
|
||||
|
||||
// used by Coursier integration
|
||||
private[sbt] def transitiveInterDependencies(
|
||||
state: State,
|
||||
projectRef: ProjectRef
|
||||
): Seq[ProjectRef] = {
|
||||
def dependencies(map: Map[ProjectRef, Seq[ProjectRef]], id: ProjectRef): Set[ProjectRef] = {
|
||||
def helper(map: Map[ProjectRef, Seq[ProjectRef]], acc: Set[ProjectRef]): Set[ProjectRef] =
|
||||
if (acc.exists(map.contains)) {
|
||||
val (kept, rem) = map.partition { case (k, _) => acc(k) }
|
||||
helper(rem, acc ++ kept.valuesIterator.flatten)
|
||||
} else acc
|
||||
helper(map - id, map.getOrElse(id, Nil).toSet)
|
||||
}
|
||||
val allProjectsDeps: Map[ProjectRef, Seq[ProjectRef]] =
|
||||
(for {
|
||||
(p, ref) <- Project.structure(state).allProjectPairs
|
||||
} yield ref -> p.dependencies.map(_.project)).toMap
|
||||
val deps = dependencies(allProjectsDeps.toMap, projectRef)
|
||||
Project.structure(state).allProjectRefs.filter(p => deps(p))
|
||||
}
|
||||
|
||||
def projectReturn(s: State): List[File] = getOrNil(s, projectReturnKey)
|
||||
def inPluginProject(s: State): Boolean = projectReturn(s).length > 1
|
||||
def setProjectReturn(s: State, pr: List[File]): State =
|
||||
s.copy(attributes = s.attributes.put(projectReturnKey, pr))
|
||||
|
||||
def loadAction(s: State, action: LoadAction): (State, File) =
|
||||
action match
|
||||
case LoadAction.Return =>
|
||||
projectReturn(s) match
|
||||
case _ /* current */ :: returnTo :: rest =>
|
||||
(setProjectReturn(s, returnTo :: rest), returnTo)
|
||||
case _ => sys.error("Not currently in a plugin definition")
|
||||
|
||||
case LoadAction.Current =>
|
||||
val base = s.configuration.baseDirectory
|
||||
projectReturn(s) match
|
||||
case Nil => (setProjectReturn(s, base :: Nil), base); case x :: _ => (s, x)
|
||||
|
||||
case LoadAction.Plugins =>
|
||||
val (newBase, oldStack) =
|
||||
if Project.isProjectLoaded(s) then
|
||||
(Project.extract(s).currentUnit.unit.plugins.base, projectReturn(s))
|
||||
else // support changing to the definition project if it fails to load
|
||||
(BuildPaths.projectStandard(s.baseDir), s.baseDir :: Nil)
|
||||
val newS = setProjectReturn(s, newBase :: oldStack)
|
||||
(newS, newBase)
|
||||
|
||||
/*
|
||||
def runTask[T](
|
||||
taskKey: ScopedKey[Task[T]],
|
||||
state: State,
|
||||
checkCycles: Boolean = false
|
||||
): Option[(State, Result[T])] = {
|
||||
val extracted = Project.extract(state)
|
||||
val ch = EvaluateTask.cancelStrategy(extracted, extracted.structure, state)
|
||||
val p = EvaluateTask.executeProgress(extracted, extracted.structure, state)
|
||||
val r = EvaluateTask.restrictions(state)
|
||||
val fgc = EvaluateTask.forcegc(extracted, extracted.structure)
|
||||
val mfi = EvaluateTask.minForcegcInterval(extracted, extracted.structure)
|
||||
runTask(taskKey, state, EvaluateTaskConfig(r, checkCycles, p, ch, fgc, mfi))
|
||||
}
|
||||
|
||||
def runTask[T](
|
||||
taskKey: ScopedKey[Task[T]],
|
||||
state: State,
|
||||
config: EvaluateTaskConfig
|
||||
): Option[(State, Result[T])] = {
|
||||
val extracted = Project.extract(state)
|
||||
EvaluateTask(extracted.structure, taskKey, state, extracted.currentRef, config)
|
||||
}
|
||||
|
||||
def projectToRef(p: Project): ProjectReference = LocalProject(p.id)
|
||||
|
||||
implicit def projectToLocalProject(p: Project): LocalProject = LocalProject(p.id)
|
||||
|
||||
*/
|
||||
|
||||
extension [A1](in: Def.Initialize[Task[A1]])
|
||||
def updateState(f: (State, A1) => State): Def.Initialize[Task[A1]] =
|
||||
in(t => SessionVar.transform(t, f))
|
||||
|
||||
def storeAs(key: TaskKey[A1])(using f: JsonFormat[A1]): Def.Initialize[Task[A1]] =
|
||||
Keys.resolvedScoped.zipWith(in) { (scoped, task) =>
|
||||
SessionVar.transform(
|
||||
task,
|
||||
(state, value) =>
|
||||
SessionVar.persistAndSet(
|
||||
SessionVar.resolveContext(key, scoped.scope, state),
|
||||
state,
|
||||
value
|
||||
)(f)
|
||||
)
|
||||
}
|
||||
|
||||
def keepAs(key: TaskKey[A1]): Def.Initialize[Task[A1]] =
|
||||
in.zipWith(Keys.resolvedScoped) { (t, scoped) =>
|
||||
SessionVar.transform(
|
||||
t,
|
||||
(state, value) =>
|
||||
SessionVar.set(SessionVar.resolveContext(key, scoped.scope, state), state, value)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* implicitly injected to tasks that return PromiseWrap.
|
||||
*/
|
||||
extension [A1](in: Initialize[Task[PromiseWrap[A1]]])
|
||||
def await: Def.Initialize[Task[A1]] = await(Duration.Inf)
|
||||
def await(atMost: Duration): Def.Initialize[Task[A1]] =
|
||||
(Def
|
||||
.task {
|
||||
val p = in.value
|
||||
var result: Option[A1] = None
|
||||
if atMost == Duration.Inf then
|
||||
while result.isEmpty do
|
||||
try {
|
||||
result = Some(Await.result(p.underlying.future, Duration("1s")))
|
||||
Thread.sleep(10)
|
||||
} catch {
|
||||
case _: TimeoutException => ()
|
||||
}
|
||||
else result = Some(Await.result(p.underlying.future, atMost))
|
||||
result.get
|
||||
})
|
||||
.tag(Tags.Sentinel)
|
||||
|
||||
/*
|
||||
import scala.reflect.macros._
|
||||
|
||||
def projectMacroImpl(c: blackbox.Context): c.Expr[Project] = {
|
||||
import c.universe._
|
||||
val enclosingValName = std.KeyMacro.definingValName(
|
||||
c,
|
||||
methodName =>
|
||||
s"""$methodName must be directly assigned to a val, such as `val x = $methodName`. Alternatively, you can use `sbt.Project.apply`"""
|
||||
)
|
||||
val name = c.Expr[String](Literal(Constant(enclosingValName)))
|
||||
reify { Project(name.splice, new File(name.splice)) }
|
||||
}
|
||||
|
||||
implicit def configDependencyConstructor[T](
|
||||
p: T
|
||||
)(implicit ev: T => ProjectReference): Constructor =
|
||||
new Constructor(p)
|
||||
|
||||
implicit def classpathDependency[T](
|
||||
p: T
|
||||
)(implicit ev: T => ProjectReference): ClasspathDependency = ClasspathDependency(p, None)
|
||||
*/
|
||||
|
||||
// Duplicated with Structure
|
||||
|
||||
// These used to be in Project so that they didn't need to get imported (due to Initialize being nested in Project).
|
||||
// Moving Initialize and other settings types to Def and decoupling Project, Def, and Structure means these go here for now
|
||||
implicit def richInitializeTask[T](init: Initialize[Task[T]]): Scoped.RichInitializeTask[T] =
|
||||
new Scoped.RichInitializeTask(init)
|
||||
|
||||
/*
|
||||
implicit def richInitializeInputTask[T](
|
||||
init: Initialize[InputTask[T]]
|
||||
): Scoped.RichInitializeInputTask[T] =
|
||||
new Scoped.RichInitializeInputTask(init)
|
||||
|
||||
implicit def richInitialize[T](i: Initialize[T]): Scoped.RichInitialize[T] =
|
||||
new Scoped.RichInitialize[T](i)
|
||||
|
||||
implicit def richTaskSessionVar[T](init: Initialize[Task[T]]): Project.RichTaskSessionVar[T] =
|
||||
new Project.RichTaskSessionVar(init)
|
||||
|
||||
implicit def sbtRichTaskPromise[A](
|
||||
i: Initialize[Task[PromiseWrap[A]]]
|
||||
): Project.RichTaskPromise[A] =
|
||||
new Project.RichTaskPromise(i)
|
||||
*/
|
||||
end ProjectExtra
|
||||
|
|
@ -17,7 +17,9 @@ import org.apache.ivy.core.resolve.DownloadOptions
|
|||
import org.apache.ivy.plugins.resolver.DependencyResolver
|
||||
import sbt.Defaults.prefix
|
||||
import sbt.Keys._
|
||||
import sbt.Project._
|
||||
import sbt.Project.*
|
||||
import sbt.ProjectExtra.inConfig
|
||||
import sbt.ProjectExtra.richInitializeTask
|
||||
import sbt.ScopeFilter.Make._
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.coursierint.LMCoursier
|
||||
|
|
@ -159,7 +161,9 @@ object RemoteCache {
|
|||
.withResolvers(rs)
|
||||
}
|
||||
)
|
||||
) ++ inConfig(Compile)(configCacheSettings(compileArtifact(Compile, cachedCompileClassifier)))
|
||||
) ++ inConfig(Compile)(
|
||||
configCacheSettings(compileArtifact(Compile, cachedCompileClassifier))
|
||||
)
|
||||
++ inConfig(Test)(configCacheSettings(testArtifact(Test, cachedTestClassifier))))
|
||||
|
||||
def getResourceFilePaths() = Def.task {
|
||||
|
|
@ -183,7 +187,7 @@ object RemoteCache {
|
|||
if (af.exists && artp.length() > 0) {
|
||||
JarUtils.includeInJar(artp, Vector(af -> s"META-INF/inc_compile.zip"))
|
||||
}
|
||||
val rf = getResourceFilePaths.value
|
||||
val rf = getResourceFilePaths().value
|
||||
if (rf.exists) {
|
||||
JarUtils.includeInJar(artp, Vector(rf -> s"META-INF/copy-resources.txt"))
|
||||
}
|
||||
|
|
@ -271,12 +275,12 @@ object RemoteCache {
|
|||
val smi = scalaModuleInfo.value
|
||||
val artifacts = (pushRemoteCacheConfiguration / remoteCacheArtifacts).value
|
||||
val nonPom = artifacts.filterNot(isPomArtifact).toVector
|
||||
val copyResources = getResourceFilePaths.value
|
||||
val copyResources = getResourceFilePaths().value
|
||||
m.withModule(log) { case (ivy, md, _) =>
|
||||
val resolver = ivy.getSettings.getResolver(r.name)
|
||||
if (resolver eq null) sys.error(s"undefined resolver '${r.name}'")
|
||||
val cross = CrossVersion(p, smi)
|
||||
val crossf: String => String = cross.getOrElse(identity _)
|
||||
val crossf: String => String = cross.getOrElse(identity[String](_))
|
||||
var found = false
|
||||
ids foreach { (id: String) =>
|
||||
val v = toVersion(id)
|
||||
|
|
@ -12,6 +12,7 @@ import sbt.internal.util.{ AttributeKey, Dag, Types }
|
|||
import sbt.librarymanagement.{ ConfigRef, Configuration }
|
||||
import Types.const
|
||||
import Def.Initialize
|
||||
import sbt.Project.inScope
|
||||
import java.net.URI
|
||||
|
||||
object ScopeFilter {
|
||||
|
|
@ -64,27 +65,36 @@ object ScopeFilter {
|
|||
}
|
||||
}
|
||||
|
||||
final class SettingKeyAll[T] private[sbt] (i: Initialize[T]) {
|
||||
final class SettingKeyAll[A] private[sbt] (i: Initialize[A]):
|
||||
|
||||
/**
|
||||
* Evaluates the initialization in all scopes selected by the filter. These are dynamic dependencies, so
|
||||
* static inspections will not show them.
|
||||
*/
|
||||
def all(sfilter: => ScopeFilter): Initialize[Seq[T]] = Def.bind(getData) { data =>
|
||||
data.allScopes.toSeq.filter(sfilter(data)).map(s => Project.inScope(s, i)).join
|
||||
}
|
||||
}
|
||||
final class TaskKeyAll[T] private[sbt] (i: Initialize[Task[T]]) {
|
||||
def all(sfilter: => ScopeFilter): Initialize[Seq[A]] =
|
||||
Def.flatMap(getData) { data =>
|
||||
data.allScopes.toSeq
|
||||
.filter(sfilter(data))
|
||||
.map(s => Project.inScope(s, i))
|
||||
.join
|
||||
}
|
||||
end SettingKeyAll
|
||||
|
||||
final class TaskKeyAll[A] private[sbt] (i: Initialize[Task[A]]):
|
||||
|
||||
/**
|
||||
* Evaluates the task in all scopes selected by the filter. These are dynamic dependencies, so
|
||||
* static inspections will not show them.
|
||||
*/
|
||||
def all(sfilter: => ScopeFilter): Initialize[Task[Seq[T]]] = Def.bind(getData) { data =>
|
||||
import std.TaskExtra._
|
||||
data.allScopes.toSeq.filter(sfilter(data)).map(s => Project.inScope(s, i)).join(_.join)
|
||||
}
|
||||
}
|
||||
def all(sfilter: => ScopeFilter): Initialize[Task[Seq[A]]] =
|
||||
Def.flatMap(getData) { data =>
|
||||
import std.TaskExtra._
|
||||
data.allScopes.toSeq
|
||||
.filter(sfilter(data))
|
||||
.map(s => Project.inScope(s, i))
|
||||
.join(_.join)
|
||||
}
|
||||
end TaskKeyAll
|
||||
|
||||
private[sbt] val Make = new Make {}
|
||||
trait Make {
|
||||
|
|
@ -219,6 +229,7 @@ object ScopeFilter {
|
|||
aggregate: Boolean
|
||||
): ProjectRef => Seq[ProjectRef] =
|
||||
ref =>
|
||||
import sbt.ProjectExtra.getProject
|
||||
Project.getProject(ref, structure).toList flatMap { p =>
|
||||
(if (classpath) p.dependencies.map(_.project) else Nil) ++
|
||||
(if (aggregate) p.aggregate else Nil)
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import java.io.File
|
|||
import sbt.Def._
|
||||
import sbt.Keys._
|
||||
import sbt.nio.Keys._
|
||||
import sbt.Project._
|
||||
import sbt.ProjectExtra.richInitializeTask
|
||||
import sbt.ScopeFilter.Make._
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.inc.ModuleUtilities
|
||||
|
|
@ -62,7 +62,9 @@ object ScriptedPlugin extends AutoPlugin {
|
|||
override lazy val projectSettings: Seq[Setting[_]] = Seq(
|
||||
ivyConfigurations ++= Seq(ScriptedConf, ScriptedLaunchConf),
|
||||
scriptedSbt := (pluginCrossBuild / sbtVersion).value,
|
||||
sbtLauncher := getJars(ScriptedLaunchConf).map(_.get().head).value,
|
||||
sbtLauncher := getJars(ScriptedLaunchConf)
|
||||
.map(_.get().head)
|
||||
.value,
|
||||
sbtTestDirectory := sourceDirectory.value / "sbt-test",
|
||||
libraryDependencies ++= (CrossVersion.partialVersion(scriptedSbt.value) match {
|
||||
case Some((0, 13)) =>
|
||||
|
|
@ -174,20 +176,21 @@ object ScriptedPlugin extends AutoPlugin {
|
|||
(token(Space) ~> (PagedIds | testIdAsGroup)).* map (_.flatten)
|
||||
}
|
||||
|
||||
private[sbt] def scriptedTask: Initialize[InputTask[Unit]] = Def.inputTask {
|
||||
val args = scriptedParser(sbtTestDirectory.value).parsed
|
||||
Def.unit(scriptedDependencies.value)
|
||||
scriptedRun.value.run(
|
||||
sbtTestDirectory.value,
|
||||
scriptedBufferLog.value,
|
||||
args,
|
||||
sbtLauncher.value,
|
||||
Fork.javaCommand((scripted / javaHome).value, "java").getAbsolutePath,
|
||||
scriptedLaunchOpts.value,
|
||||
new java.util.ArrayList[File](),
|
||||
scriptedParallelInstances.value
|
||||
)
|
||||
}
|
||||
private[sbt] def scriptedTask: Initialize[InputTask[Unit]] =
|
||||
Def.inputTask {
|
||||
val args = scriptedParser(sbtTestDirectory.value).parsed
|
||||
Def.unit(scriptedDependencies.value)
|
||||
scriptedRun.value.run(
|
||||
sbtTestDirectory.value,
|
||||
scriptedBufferLog.value,
|
||||
args,
|
||||
sbtLauncher.value,
|
||||
Fork.javaCommand((scripted / javaHome).value, "java").getAbsolutePath,
|
||||
scriptedLaunchOpts.value,
|
||||
new java.util.ArrayList[File](),
|
||||
scriptedParallelInstances.value
|
||||
)
|
||||
}
|
||||
|
||||
private[this] def getJars(config: Configuration): Initialize[Task[PathFinder]] = Def.task {
|
||||
PathFinder(Classpaths.managedJars(config, classpathTypes.value, Keys.update.value).map(_.data))
|
||||
|
|
@ -15,6 +15,7 @@ import Def.ScopedKey
|
|||
import Types.Id
|
||||
import Keys.sessionVars
|
||||
import sjsonnew.JsonFormat
|
||||
import sbt.ProjectExtra.*
|
||||
|
||||
object SessionVar {
|
||||
val DefaultDataID = "data"
|
||||
|
|
|
|||
|
|
@ -25,25 +25,28 @@ package sbt
|
|||
* Prior to a call to `setFoo`, `getFoo` will return `None`. After a call to `setFoo`, `getFoo` will
|
||||
* return `Some("foo")`.
|
||||
*/
|
||||
final class StateTransform private (val transform: State => State, stateProxy: () => State) {
|
||||
@deprecated("Exists only for binary compatibility with 1.3.x.", "1.4.0")
|
||||
private[sbt] def state: State = stateProxy()
|
||||
@deprecated("1.4.0", "Use the constructor that takes a transform function.")
|
||||
private[sbt] def this(state: State) = this((_: State) => state, () => state)
|
||||
final class StateTransform private (
|
||||
val transform: State => State,
|
||||
stateProxy: () => State,
|
||||
) {
|
||||
// @deprecated("Exists only for binary compatibility with 1.3.x.", "1.4.0")
|
||||
// private[sbt] def state: State = stateProxy()
|
||||
// @deprecated("1.4.0", "Use the constructor that takes a transform function.")
|
||||
// private[sbt] def this(state: State) = this((_: State) => state, () => state)
|
||||
}
|
||||
|
||||
object StateTransform {
|
||||
@deprecated("Exists only for binary compatibility with 1.3.x", "1.4.0")
|
||||
def apply(state: State): State = state
|
||||
object StateTransform:
|
||||
// @deprecated("Exists only for binary compatibility with 1.3.x", "1.4.0")
|
||||
// def apply(state: State): State = state
|
||||
|
||||
/**
|
||||
* Create an instance of [[StateTransform]].
|
||||
* @param transform the transformation to apply after task evaluation has completed
|
||||
* @return the [[StateTransform]].
|
||||
*/
|
||||
def apply(transform: State => State) =
|
||||
def apply(transform: State => State): StateTransform =
|
||||
new StateTransform(
|
||||
transform,
|
||||
() => throw new IllegalStateException("No state was added to the StateTransform.")
|
||||
)
|
||||
}
|
||||
end StateTransform
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import sbt.librarymanagement._
|
|||
import sbt.librarymanagement.ivy.{ IvyConfiguration, IvyDependencyResolution }
|
||||
import sbt.internal.inc.classpath.ClasspathUtil
|
||||
import BasicCommandStrings._, BasicKeys._
|
||||
import sbt.ProjectExtra.*
|
||||
|
||||
private[sbt] object TemplateCommandUtil {
|
||||
def templateCommand: Command =
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ package sbt
|
|||
import sjsonnew.JsonFormat
|
||||
import Def.Setting
|
||||
import sbt.internal.{ BuildStructure, LoadedBuildUnit, SessionSettings }
|
||||
import sbt.ProjectExtra.*
|
||||
|
||||
/**
|
||||
* Extends State with setting-level knowledge.
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import lmcoursier.definitions.{
|
|||
}
|
||||
import sbt.librarymanagement._
|
||||
import sbt.Keys._
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.SlashSyntax0._
|
||||
|
||||
object CoursierArtifactsTasks {
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ import sbt.librarymanagement.ivy.{
|
|||
Credentials,
|
||||
DirectCredentials => IvyDirectCredentials
|
||||
}
|
||||
import sbt.ProjectExtra.transitiveInterDependencies
|
||||
import sbt.ScopeFilter.Make._
|
||||
import scala.collection.JavaConverters._
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ package coursierint
|
|||
|
||||
import sbt.librarymanagement._
|
||||
import sbt.Keys._
|
||||
import sbt.ProjectExtra.transitiveInterDependencies
|
||||
import sbt.ScopeFilter.Make._
|
||||
import sbt.SlashSyntax0._
|
||||
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ object LMCoursier {
|
|||
.orElse(sys.props.get("coursier.cache").map(absoluteFile)) match {
|
||||
case Some(dir) => dir
|
||||
case _ =>
|
||||
if (Util.isWindows) windowsCacheDirectory
|
||||
if Util.isWindows then windowsCacheDirectory
|
||||
else CoursierDependencyResolution.defaultCacheLocation
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -68,9 +68,9 @@ private[sbt] abstract class AbstractTaskExecuteProgress extends ExecuteProgress[
|
|||
}
|
||||
|
||||
override def afterRegistered(
|
||||
task: Task[_],
|
||||
allDeps: Iterable[Task[_]],
|
||||
pendingDeps: Iterable[Task[_]]
|
||||
task: Task[Any],
|
||||
allDeps: Iterable[Task[Any]],
|
||||
pendingDeps: Iterable[Task[Any]]
|
||||
): Unit = {
|
||||
// we need this to infer anonymous task names
|
||||
pendingDeps foreach { t =>
|
||||
|
|
@ -80,7 +80,7 @@ private[sbt] abstract class AbstractTaskExecuteProgress extends ExecuteProgress[
|
|||
}
|
||||
}
|
||||
|
||||
override def beforeWork(task: Task[_]): Unit = {
|
||||
override def beforeWork(task: Task[Any]): Unit = {
|
||||
timings.put(task, new Timer)
|
||||
()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,19 +14,20 @@ import sbt.internal.util.complete.{ DefaultParsers, Parser }
|
|||
import Aggregation.{ KeyValue, Values }
|
||||
import DefaultParsers._
|
||||
import sbt.internal.util.Types.idFun
|
||||
import sbt.ProjectExtra.*
|
||||
import java.net.URI
|
||||
import sbt.internal.CommandStrings.{ MultiTaskCommand, ShowCommand, PrintCommand }
|
||||
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, IMap, Settings, Util }
|
||||
import sbt.util.Show
|
||||
import scala.collection.mutable
|
||||
|
||||
final class ParsedKey[+A](val key: ScopedKey[A], val mask: ScopeMask, val separaters: Seq[String]):
|
||||
def this(key: ScopedKey[A], mask: ScopeMask) = this(key, mask, Nil)
|
||||
final class ParsedKey(val key: ScopedKey[_], val mask: ScopeMask, val separaters: Seq[String]):
|
||||
def this(key: ScopedKey[_], mask: ScopeMask) = this(key, mask, Nil)
|
||||
|
||||
override def equals(o: Any): Boolean =
|
||||
this.eq(o.asInstanceOf[AnyRef]) || (o match {
|
||||
case x: ParsedKey[_] => (this.key == x.key) && (this.mask == x.mask)
|
||||
case _ => false
|
||||
case x: ParsedKey => (this.key == x.key) && (this.mask == x.mask)
|
||||
case _ => false
|
||||
})
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (17 + "sbt.internal.ParsedKey".##) + this.key.##)) + this.mask.##
|
||||
|
|
@ -55,7 +56,8 @@ object Act {
|
|||
keyMap: Map[String, AttributeKey[_]],
|
||||
data: Settings[Scope]
|
||||
): Parser[ScopedKey[Any]] =
|
||||
scopedKeySelected(index, current, defaultConfigs, keyMap, data).map(_.key)
|
||||
scopedKeySelected(index, current, defaultConfigs, keyMap, data)
|
||||
.map(_.key.asInstanceOf[ScopedKey[Any]])
|
||||
|
||||
// the index should be an aggregated index for proper tab completion
|
||||
def scopedKeyAggregated(
|
||||
|
|
@ -72,7 +74,11 @@ object Act {
|
|||
structure.data
|
||||
)
|
||||
)
|
||||
yield Aggregation.aggregate(selected.key, selected.mask, structure.extra)
|
||||
yield Aggregation.aggregate(
|
||||
selected.key.asInstanceOf[ScopedKey[Any]],
|
||||
selected.mask,
|
||||
structure.extra
|
||||
)
|
||||
|
||||
def scopedKeyAggregatedSep(
|
||||
current: ProjectRef,
|
||||
|
|
@ -88,7 +94,7 @@ object Act {
|
|||
)
|
||||
yield Aggregation
|
||||
.aggregate(selected.key, selected.mask, structure.extra)
|
||||
.map(k => k -> selected.separaters)
|
||||
.map(k => k.asInstanceOf[ScopedKey[Any]] -> selected.separaters)
|
||||
|
||||
def scopedKeySelected(
|
||||
index: KeyIndex,
|
||||
|
|
@ -96,7 +102,7 @@ object Act {
|
|||
defaultConfigs: Option[ResolvedReference] => Seq[String],
|
||||
keyMap: Map[String, AttributeKey[_]],
|
||||
data: Settings[Scope]
|
||||
): Parser[ParsedKey[Any]] =
|
||||
): Parser[ParsedKey] =
|
||||
scopedKeyFull(index, current, defaultConfigs, keyMap) flatMap { choices =>
|
||||
select(choices, data)(showRelativeKey2(current))
|
||||
}
|
||||
|
|
@ -106,7 +112,7 @@ object Act {
|
|||
current: ProjectRef,
|
||||
defaultConfigs: Option[ResolvedReference] => Seq[String],
|
||||
keyMap: Map[String, AttributeKey[_]]
|
||||
): Parser[Seq[Parser[ParsedKey[Any]]]] = {
|
||||
): Parser[Seq[Parser[ParsedKey]]] = {
|
||||
val confParserCache
|
||||
: mutable.Map[Option[sbt.ResolvedReference], Parser[(ParsedAxis[String], Seq[String])]] =
|
||||
mutable.Map.empty
|
||||
|
|
@ -151,7 +157,7 @@ object Act {
|
|||
confAmb: ParsedAxis[String],
|
||||
baseMask: ScopeMask,
|
||||
baseSeps: Seq[String]
|
||||
): Seq[Parser[ParsedKey[Any]]] =
|
||||
): Seq[Parser[ParsedKey]] =
|
||||
for {
|
||||
conf <- configs(confAmb, defaultConfigs, proj, index)
|
||||
} yield for {
|
||||
|
|
@ -178,9 +184,9 @@ object Act {
|
|||
key
|
||||
)
|
||||
|
||||
def select(allKeys: Seq[Parser[ParsedKey[_]]], data: Settings[Scope])(implicit
|
||||
def select(allKeys: Seq[Parser[ParsedKey]], data: Settings[Scope])(implicit
|
||||
show: Show[ScopedKey[_]]
|
||||
): Parser[ParsedKey[Any]] =
|
||||
): Parser[ParsedKey] =
|
||||
seq(allKeys) flatMap { ss =>
|
||||
val default = ss.headOption match {
|
||||
case None => noValidKeys
|
||||
|
|
@ -188,16 +194,16 @@ object Act {
|
|||
}
|
||||
selectFromValid(ss filter isValid(data), default)
|
||||
}
|
||||
def selectFromValid(ss: Seq[ParsedKey[_]], default: Parser[ParsedKey[_]])(implicit
|
||||
def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])(implicit
|
||||
show: Show[ScopedKey[_]]
|
||||
): Parser[ParsedKey[Any]] =
|
||||
): Parser[ParsedKey] =
|
||||
selectByTask(selectByConfig(ss)) match {
|
||||
case Seq() => default
|
||||
case Seq(single) => success(single)
|
||||
case multi => failure("Ambiguous keys: " + showAmbiguous(keys(multi)))
|
||||
}
|
||||
private[this] def keys(ss: Seq[ParsedKey[_]]): Seq[ScopedKey[_]] = ss.map(_.key)
|
||||
def selectByConfig(ss: Seq[ParsedKey[_]]): Seq[ParsedKey[Any]] =
|
||||
private[this] def keys(ss: Seq[ParsedKey]): Seq[ScopedKey[_]] = ss.map(_.key)
|
||||
def selectByConfig(ss: Seq[ParsedKey]): Seq[ParsedKey] =
|
||||
ss match {
|
||||
case Seq() => Nil
|
||||
case Seq(x, tail @ _*) => // select the first configuration containing a valid key
|
||||
|
|
@ -206,7 +212,7 @@ object Act {
|
|||
case xs => x +: xs
|
||||
}
|
||||
}
|
||||
def selectByTask(ss: Seq[ParsedKey[_]]): Seq[ParsedKey[Any]] = {
|
||||
def selectByTask(ss: Seq[ParsedKey]): Seq[ParsedKey] = {
|
||||
val (selects, zeros) = ss.partition(_.key.scope.task.isSelect)
|
||||
if (zeros.nonEmpty) zeros else selects
|
||||
}
|
||||
|
|
@ -216,7 +222,7 @@ object Act {
|
|||
def showAmbiguous(keys: Seq[ScopedKey[_]])(implicit show: Show[ScopedKey[_]]): String =
|
||||
keys.take(3).map(x => show.show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "")
|
||||
|
||||
def isValid(data: Settings[Scope])(parsed: ParsedKey[_]): Boolean = {
|
||||
def isValid(data: Settings[Scope])(parsed: ParsedKey): Boolean = {
|
||||
val key = parsed.key
|
||||
data.definingScope(key.scope, key.key) == Some(key.scope)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ package internal
|
|||
|
||||
import sbt.internal.util.Types.const
|
||||
import java.io.File
|
||||
import xsbti.VirtualFile
|
||||
|
||||
/**
|
||||
* Represents how settings from various sources are automatically merged into a Project's settings.
|
||||
|
|
@ -23,10 +24,10 @@ object AddSettings {
|
|||
}
|
||||
private[sbt] final object User extends AddSettings
|
||||
private[sbt] final class AutoPlugins(val include: AutoPlugin => Boolean) extends AddSettings
|
||||
private[sbt] final class DefaultSbtFiles(val include: File => Boolean) extends AddSettings
|
||||
private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings {
|
||||
override def toString: String = s"SbtFiles($files)"
|
||||
}
|
||||
private[sbt] final class DefaultSbtFiles(val include: VirtualFile => Boolean) extends AddSettings
|
||||
// private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings {
|
||||
// override def toString: String = s"SbtFiles($files)"
|
||||
// }
|
||||
private[sbt] final object BuildScalaFiles extends AddSettings
|
||||
|
||||
/** Adds all settings from autoplugins. */
|
||||
|
|
@ -51,7 +52,7 @@ object AddSettings {
|
|||
val defaultSbtFiles: AddSettings = new DefaultSbtFiles(const(true))
|
||||
|
||||
/** Includes the settings from the .sbt files given by `files`. */
|
||||
def sbtFiles(files: File*): AddSettings = new SbtFiles(files)
|
||||
// def sbtFiles(files: File*): AddSettings = new SbtFiles(files)
|
||||
|
||||
/** Includes settings automatically */
|
||||
def seq(autos: AddSettings*): AddSettings = new Sequence(autos)
|
||||
|
|
@ -69,8 +70,9 @@ object AddSettings {
|
|||
|
||||
def clearSbtFiles(a: AddSettings): AddSettings =
|
||||
tx(a) {
|
||||
case _: DefaultSbtFiles | _: SbtFiles => None
|
||||
case x => Some(x)
|
||||
// case _: SbtFiles => None
|
||||
case _: DefaultSbtFiles => None
|
||||
case x => Some(x)
|
||||
} getOrElse seq()
|
||||
|
||||
private[sbt] def tx(a: AddSettings)(f: AddSettings => Option[AddSettings]): Option[AddSettings] =
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import java.text.DateFormat
|
|||
import sbt.Def.ScopedKey
|
||||
import sbt.Keys.{ showSuccess, showTiming, timingFormat }
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.util.complete.Parser.{ failure, seq, success }
|
||||
import sbt.internal.util._
|
||||
|
|
@ -57,7 +58,7 @@ object Aggregation {
|
|||
}
|
||||
|
||||
type Values[T] = Seq[KeyValue[T]]
|
||||
type AnyKeys = Values[_]
|
||||
type AnyKeys = Values[Any]
|
||||
|
||||
def seqParser[T](ps: Values[Parser[T]]): Parser[Seq[KeyValue[T]]] =
|
||||
seq(ps.map { case KeyValue(k, p) => p.map(v => KeyValue(k, v)) })
|
||||
|
|
@ -219,7 +220,12 @@ object Aggregation {
|
|||
val inputStrings = inputTasks.map(_.key).mkString("Input task(s):\n\t", "\n\t", "\n")
|
||||
val otherStrings = other.map(_.key).mkString("Task(s)/setting(s):\n\t", "\n\t", "\n")
|
||||
failure(s"Cannot mix input tasks with plain tasks/settings. $inputStrings $otherStrings")
|
||||
} else applyDynamicTasks(s, maps(inputTasks)(castToAny), show)
|
||||
} else
|
||||
applyDynamicTasks(
|
||||
s,
|
||||
inputTasks.map { case KeyValue(k, v: InputTask[a]) => KeyValue(k, castToAny(v)) },
|
||||
show
|
||||
)
|
||||
} else {
|
||||
val base =
|
||||
if (tasks.isEmpty) success(() => s)
|
||||
|
|
@ -233,8 +239,10 @@ object Aggregation {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// this is a hack to avoid duplicating method implementations
|
||||
private[this] def castToAny[T[_]](t: T[_]): T[Any] = t.asInstanceOf[T[Any]]
|
||||
private[this] def castToAny[F[_]]: [a] => F[a] => F[Any] = [a] =>
|
||||
(fa: F[a]) => fa.asInstanceOf[F[Any]]
|
||||
|
||||
private[this] def maps[T, S](vs: Values[T])(f: T => S): Values[S] =
|
||||
vs map { case KeyValue(k, v) => KeyValue(k, f(v)) }
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ package internal
|
|||
import java.io.File
|
||||
import Keys.{ organization, thisProject, autoGeneratedProject }
|
||||
import Def.Setting
|
||||
// import sbt.ProjectExtra.apply
|
||||
import sbt.io.Hash
|
||||
import sbt.internal.util.Attributed
|
||||
import sbt.internal.inc.ReflectUtilities
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ package sbt
|
|||
package internal
|
||||
|
||||
import java.io.File
|
||||
import java.nio.file.Path
|
||||
import java.net.URI
|
||||
|
||||
import Def.{ ScopeLocal, ScopedKey, Setting, displayFull }
|
||||
|
|
@ -17,6 +18,7 @@ import Scope.GlobalScope
|
|||
import BuildStreams.Streams
|
||||
import sbt.LocalRootProject
|
||||
import sbt.io.syntax._
|
||||
import sbt.internal.inc.MappedFileConverter
|
||||
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attributed, Settings }
|
||||
import sbt.internal.util.Attributed.data
|
||||
import sbt.util.Logger
|
||||
|
|
@ -32,19 +34,8 @@ final class BuildStructure(
|
|||
val delegates: Scope => Seq[Scope],
|
||||
val scopeLocal: ScopeLocal,
|
||||
private[sbt] val compiledMap: Map[ScopedKey[_], Def.Compiled[_]],
|
||||
private[sbt] val converter: MappedFileConverter,
|
||||
) {
|
||||
@deprecated("Used the variant that takes a compiledMap", "1.4.0")
|
||||
def this(
|
||||
units: Map[URI, LoadedBuildUnit],
|
||||
root: URI,
|
||||
settings: Seq[Setting[_]],
|
||||
data: Settings[Scope],
|
||||
index: StructureIndex,
|
||||
streams: State => Streams,
|
||||
delegates: Scope => Seq[Scope],
|
||||
scopeLocal: ScopeLocal,
|
||||
) = this(units, root, settings, data, index, streams, delegates, scopeLocal, Map.empty)
|
||||
|
||||
val extra: BuildUtil[ResolvedProject] = BuildUtil(root, units, index.keyIndex, data)
|
||||
|
||||
/** The root project for the specified build. Throws if no build or empty build. */
|
||||
|
|
@ -117,8 +108,10 @@ final class LoadedBuildUnit(
|
|||
* The classpath to use when compiling against this build unit's publicly visible code.
|
||||
* It includes build definition and plugin classes and classes for .sbt file statements and expressions.
|
||||
*/
|
||||
def classpath: Seq[File] =
|
||||
unit.definitions.target ++ unit.plugins.classpath ++ unit.definitions.dslDefinitions.classpath
|
||||
def classpath: Seq[Path] =
|
||||
unit.definitions.target.map(
|
||||
_.toPath()
|
||||
) ++ unit.plugins.classpath.map(_.toPath()) ++ unit.definitions.dslDefinitions.classpath
|
||||
|
||||
/**
|
||||
* The class loader to use for this build unit's publicly visible code.
|
||||
|
|
@ -281,7 +274,11 @@ final class LoadedBuild(val root: URI, val units: Map[URI, LoadedBuildUnit]) {
|
|||
private[sbt] def autos = GroupedAutoPlugins(units)
|
||||
}
|
||||
|
||||
final class PartBuild(val root: URI, val units: Map[URI, PartBuildUnit])
|
||||
final class PartBuild(
|
||||
val root: URI,
|
||||
val units: Map[URI, PartBuildUnit],
|
||||
val converter: MappedFileConverter,
|
||||
)
|
||||
|
||||
sealed trait BuildUnitBase { def rootProjects: Seq[String]; def buildSettings: Seq[Setting[_]] }
|
||||
|
||||
|
|
|
|||
|
|
@ -178,7 +178,7 @@ private[sbt] object ClasspathImpl {
|
|||
track: TrackLevel,
|
||||
log: Logger
|
||||
): Initialize[Task[Classpath]] =
|
||||
Def.value {
|
||||
Def.value[Task[Classpath]] {
|
||||
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
|
||||
exportedProductsNoTracking,
|
||||
exportedProductsIfMissing,
|
||||
|
|
@ -196,7 +196,7 @@ private[sbt] object ClasspathImpl {
|
|||
track: TrackLevel,
|
||||
log: Logger
|
||||
): Initialize[Task[VirtualClasspath]] =
|
||||
Def.value {
|
||||
Def.value[Task[VirtualClasspath]] {
|
||||
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
|
||||
exportedPickles,
|
||||
exportedPickles,
|
||||
|
|
@ -242,7 +242,7 @@ private[sbt] object ClasspathImpl {
|
|||
track: TrackLevel,
|
||||
log: Logger
|
||||
): Initialize[Task[Classpath]] =
|
||||
Def.value {
|
||||
Def.value[Task[Classpath]] {
|
||||
interDependencies(projectRef, deps, conf, self, data, track, false, log)(
|
||||
exportedProductJarsNoTracking,
|
||||
exportedProductJarsIfMissing,
|
||||
|
|
@ -270,7 +270,7 @@ private[sbt] object ClasspathImpl {
|
|||
deps: BuildDependencies,
|
||||
log: Logger
|
||||
): Initialize[Task[Classpath]] =
|
||||
Def.value {
|
||||
Def.value[Task[Classpath]] {
|
||||
interDependencies(
|
||||
projectRef,
|
||||
deps,
|
||||
|
|
@ -346,7 +346,7 @@ private[sbt] object ClasspathImpl {
|
|||
val masterConfs = names(getConfigurations(projectRef, data).toVector)
|
||||
|
||||
for {
|
||||
ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)
|
||||
ClasspathDep.ResolvedClasspathDependency(dep, confMapping) <- deps.classpath(p)
|
||||
} {
|
||||
val configurations = getConfigurations(dep, data)
|
||||
val mapping =
|
||||
|
|
|
|||
|
|
@ -13,12 +13,14 @@ import java.nio.file.{ DirectoryNotEmptyException, Files, Path }
|
|||
|
||||
import sbt.Def._
|
||||
import sbt.Keys._
|
||||
import sbt.Project.richInitializeTask
|
||||
// import sbt.Project.richInitializeTask
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.io.syntax._
|
||||
import sbt.nio.Keys._
|
||||
import sbt.nio.file._
|
||||
import sbt.nio.file.syntax._
|
||||
import sbt.nio.file.syntax.pathToPathOps
|
||||
import sbt.nio.file.Glob.{ GlobOps }
|
||||
import sbt.util.Level
|
||||
import sjsonnew.JsonFormat
|
||||
import scala.annotation.nowarn
|
||||
|
|
@ -56,7 +58,7 @@ private[sbt] object Clean {
|
|||
val excludes = (scope / cleanKeepFiles).value.map {
|
||||
// This mimics the legacy behavior of cleanFilesTask
|
||||
case f if f.isDirectory => Glob(f, AnyPath)
|
||||
case f => f.toGlob
|
||||
case f => f.toPath.toGlob
|
||||
} ++ (scope / cleanKeepGlobs).value
|
||||
(p: Path) => excludes.exists(_.matches(p))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@
|
|||
package sbt
|
||||
package internal
|
||||
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.classpath.AlternativeZincUtil
|
||||
import sbt.internal.inc.{ ScalaInstance, ZincLmUtil }
|
||||
|
|
@ -66,7 +67,7 @@ object ConsoleProject {
|
|||
val terminal = Terminal.get
|
||||
// TODO - Hook up dsl classpath correctly...
|
||||
(new Console(compiler))(
|
||||
unit.classpath,
|
||||
unit.classpath.map(_.toFile),
|
||||
options,
|
||||
initCommands,
|
||||
cleanupCommands,
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger }
|
|||
import sbt.BasicCommandStrings._
|
||||
import sbt.Def._
|
||||
import sbt.Keys._
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.Continuous.{ ContinuousState, FileStampRepository }
|
||||
import sbt.internal.LabeledFunctions._
|
||||
|
|
@ -972,17 +973,17 @@ private[sbt] object Continuous extends DeprecatedContinuous {
|
|||
* @param inputs the transitive task inputs (see [[SettingsGraph]])
|
||||
* @param watchSettings the [[WatchSettings]] instance for the task
|
||||
*/
|
||||
private final class Config private[internal] (
|
||||
private final class Config(
|
||||
val command: String,
|
||||
val dynamicInputs: mutable.Set[DynamicInput],
|
||||
val watchSettings: WatchSettings
|
||||
) {
|
||||
val watchSettings: WatchSettings,
|
||||
):
|
||||
def inputs() = dynamicInputs.toSeq.sorted
|
||||
private[sbt] def watchState(count: Int): DeprecatedWatchState =
|
||||
WatchState.empty(inputs().map(_.glob)).withCount(count)
|
||||
|
||||
def arguments(logger: Logger): Arguments = new Arguments(logger, inputs())
|
||||
}
|
||||
end Config
|
||||
|
||||
private def getStartMessage(key: ScopedKey[_])(implicit e: Extracted): StartMessage = Some {
|
||||
lazy val default = key.get(watchStartMessage).getOrElse(Watch.defaultStartWatch)
|
||||
|
|
@ -16,6 +16,7 @@ import sbt.io.{ IO, Path }
|
|||
import sbt.io.syntax._
|
||||
import sbt.Cross._
|
||||
import sbt.Def.{ ScopedKey, Setting }
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.util.complete.DefaultParsers._
|
||||
import sbt.internal.util.AttributeKey
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import java.util.concurrent.{ ConcurrentHashMap, TimeUnit }
|
|||
import java.util.concurrent.atomic.{ AtomicLong, AtomicReference }
|
||||
|
||||
import sbt.Def.{ Classpath, ScopedKey, Setting }
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.Scope.GlobalScope
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.inc.classpath.ClasspathFilter
|
||||
|
|
|
|||
|
|
@ -14,90 +14,91 @@ import sbt.nio.Keys._
|
|||
import sbt.nio.{ FileChanges, FileStamp }
|
||||
|
||||
import scala.annotation.compileTimeOnly
|
||||
import scala.language.experimental.macros
|
||||
import scala.reflect.macros.blackbox
|
||||
import scala.quoted.*
|
||||
|
||||
/**
|
||||
* Provides extension methods to `TaskKey[T]` that can be use to fetch the input and output file
|
||||
* dependency changes for a task. Nothing in this object is intended to be called directly but,
|
||||
* because there are macro definitions, some of the definitions must be public.
|
||||
*/
|
||||
object FileChangesMacro {
|
||||
private[sbt] sealed abstract class TaskOps[T](val taskKey: TaskKey[T]) {
|
||||
object FileChangesMacro:
|
||||
|
||||
extension [A](in: TaskKey[A])
|
||||
@compileTimeOnly(
|
||||
"`inputFileChanges` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
|
||||
)
|
||||
def inputFileChanges: FileChanges = macro changedInputFilesImpl[T]
|
||||
inline def inputFileChanges: FileChanges =
|
||||
${ FileChangesMacro.changedInputFilesImpl[A]('in) }
|
||||
|
||||
@compileTimeOnly(
|
||||
"`outputFileChanges` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
|
||||
)
|
||||
def outputFileChanges: FileChanges = macro changedOutputFilesImpl[T]
|
||||
inline def outputFileChanges: FileChanges =
|
||||
${ FileChangesMacro.changedOutputFilesImpl[A]('in) }
|
||||
|
||||
@compileTimeOnly(
|
||||
"`inputFiles` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
|
||||
)
|
||||
def inputFiles: Seq[NioPath] = macro inputFilesImpl[T]
|
||||
inline def inputFiles: Seq[NioPath] =
|
||||
${ FileChangesMacro.inputFilesImpl[A]('in) }
|
||||
|
||||
@compileTimeOnly(
|
||||
"`outputFiles` can only be called on a task within a task definition macro, such as :=, +=, ++=, or Def.task."
|
||||
)
|
||||
def outputFiles: Seq[NioPath] = macro outputFilesImpl[T]
|
||||
}
|
||||
def changedInputFilesImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[FileChanges] = {
|
||||
impl[T](c)(
|
||||
c.universe.reify(allInputFiles),
|
||||
c.universe.reify(changedInputFiles),
|
||||
c.universe.reify(inputFileStamps)
|
||||
inline def outputFiles: Seq[NioPath] =
|
||||
${ FileChangesMacro.outputFilesImpl[A]('in) }
|
||||
|
||||
def changedInputFilesImpl[A: Type](in: Expr[TaskKey[A]])(using qctx: Quotes): Expr[FileChanges] =
|
||||
impl[A](
|
||||
in = in,
|
||||
currentKey = '{ allInputFiles },
|
||||
changeKey = '{ changedInputFiles },
|
||||
mapKey = '{ inputFileStamps },
|
||||
)
|
||||
}
|
||||
def changedOutputFilesImpl[T: c.WeakTypeTag](
|
||||
c: blackbox.Context
|
||||
): c.Expr[FileChanges] = {
|
||||
impl[T](c)(
|
||||
c.universe.reify(allOutputFiles),
|
||||
c.universe.reify(changedOutputFiles),
|
||||
c.universe.reify(outputFileStamps)
|
||||
|
||||
def changedOutputFilesImpl[A: Type](in: Expr[TaskKey[A]])(using qctx: Quotes): Expr[FileChanges] =
|
||||
impl[A](
|
||||
in = in,
|
||||
currentKey = '{ allOutputFiles },
|
||||
changeKey = '{ changedOutputFiles },
|
||||
mapKey = '{ outputFileStamps },
|
||||
)
|
||||
}
|
||||
def rescope[T](left: TaskKey[_], right: TaskKey[T]): TaskKey[T] =
|
||||
|
||||
def rescope[A](left: TaskKey[_], right: TaskKey[A]): TaskKey[A] =
|
||||
Scoped.scopedTask(left.scope.copy(task = Select(left.key)), right.key)
|
||||
def rescope[T](left: Scope, right: TaskKey[T]): TaskKey[T] =
|
||||
|
||||
def rescope[A](left: Scope, right: TaskKey[A]): TaskKey[A] =
|
||||
Scoped.scopedTask(left, right.key)
|
||||
private def impl[T: c.WeakTypeTag](
|
||||
c: blackbox.Context
|
||||
)(
|
||||
currentKey: c.Expr[TaskKey[Seq[NioPath]]],
|
||||
changeKey: c.Expr[TaskKey[Seq[(NioPath, FileStamp)] => FileChanges]],
|
||||
mapKey: c.Expr[TaskKey[Seq[(NioPath, FileStamp)]]]
|
||||
): c.Expr[FileChanges] = {
|
||||
import c.universe._
|
||||
val taskScope = getTaskScope(c)
|
||||
reify {
|
||||
val changes = rescope(taskScope.splice, changeKey.splice).value
|
||||
val current = rescope(taskScope.splice, currentKey.splice).value
|
||||
import sbt.nio.FileStamp.Formats._
|
||||
val previous = Previous.runtimeInEnclosingTask(rescope(taskScope.splice, mapKey.splice)).value
|
||||
|
||||
private def impl[A: Type](
|
||||
in: Expr[TaskKey[A]],
|
||||
currentKey: Expr[TaskKey[Seq[NioPath]]],
|
||||
changeKey: Expr[TaskKey[Seq[(NioPath, FileStamp)] => FileChanges]],
|
||||
mapKey: Expr[TaskKey[Seq[(NioPath, FileStamp)]]],
|
||||
)(using qctx: Quotes): Expr[FileChanges] =
|
||||
import qctx.reflect.*
|
||||
val taskScope = getTaskScope[A](in)
|
||||
'{
|
||||
val ts: Scope = $taskScope
|
||||
val changes = rescope[Seq[(NioPath, FileStamp)] => FileChanges](ts, $changeKey).value
|
||||
val current = rescope[Seq[NioPath]](ts, $currentKey).value
|
||||
import sbt.nio.FileStamp.Formats.*
|
||||
val previous =
|
||||
Previous.runtimeInEnclosingTask(rescope[Seq[(NioPath, FileStamp)]](ts, $mapKey)).value
|
||||
previous.map(changes).getOrElse(FileChanges.noPrevious(current))
|
||||
}
|
||||
}
|
||||
def inputFilesImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Seq[NioPath]] = {
|
||||
val taskKey = getTaskScope(c)
|
||||
c.universe.reify(rescope(taskKey.splice, allInputFiles).value)
|
||||
}
|
||||
def outputFilesImpl[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[Seq[NioPath]] = {
|
||||
val taskKey = getTaskScope(c)
|
||||
c.universe.reify(rescope(taskKey.splice, allOutputFiles).value)
|
||||
}
|
||||
private def getTaskScope[T: c.WeakTypeTag](c: blackbox.Context): c.Expr[sbt.Scope] = {
|
||||
import c.universe._
|
||||
val taskTpe = c.weakTypeOf[TaskKey[T]]
|
||||
lazy val err = "Couldn't expand file change macro."
|
||||
c.macroApplication match {
|
||||
case Select(Apply(_, k :: Nil), _) if k.tpe <:< taskTpe =>
|
||||
val expr = c.Expr[TaskKey[T]](k)
|
||||
c.universe.reify {
|
||||
if (expr.splice.scope.task.toOption.isDefined) expr.splice.scope
|
||||
else expr.splice.scope.copy(task = sbt.Select(expr.splice.key))
|
||||
}
|
||||
case _ => c.abort(c.enclosingPosition, err)
|
||||
|
||||
def inputFilesImpl[A: Type](in: Expr[TaskKey[A]])(using qctx: Quotes): Expr[Seq[NioPath]] =
|
||||
val ts = getTaskScope[A](in)
|
||||
'{ rescope[Seq[NioPath]]($ts, allInputFiles).value }
|
||||
|
||||
def outputFilesImpl[A: Type](in: Expr[TaskKey[A]])(using qctx: Quotes): Expr[Seq[NioPath]] =
|
||||
val ts = getTaskScope[A](in)
|
||||
'{ rescope[Seq[NioPath]]($ts, allOutputFiles).value }
|
||||
|
||||
private def getTaskScope[A: Type](in: Expr[TaskKey[A]])(using qctx: Quotes): Expr[sbt.Scope] =
|
||||
'{
|
||||
if $in.scope.task.toOption.isDefined then $in.scope
|
||||
else $in.scope.copy(task = sbt.Select($in.key))
|
||||
}
|
||||
}
|
||||
}
|
||||
end FileChangesMacro
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import sbt.internal.util.Attributed
|
|||
import Def.{ ScopedKey, Setting }
|
||||
import Keys._
|
||||
import Configurations.{ Compile, Runtime }
|
||||
import sbt.ProjectExtra.{ extract, runUnloadHooks, setProject }
|
||||
import sbt.SlashSyntax0._
|
||||
import java.io.File
|
||||
import org.apache.ivy.core.module.{ descriptor, id }
|
||||
|
|
@ -80,8 +81,6 @@ object GlobalPlugin {
|
|||
val intcp = (Runtime / internalDependencyClasspath).value
|
||||
val prods = (Runtime / exportedProducts).value
|
||||
val depMap = projectDescriptors.value + ivyModule.value.dependencyMapping(state.log)
|
||||
// If we reference it directly (if it's an executionRoot) then it forces an update, which is not what we want.
|
||||
val updateReport = Def.taskDyn { Def.task { update.value } }.value
|
||||
|
||||
GlobalPluginData(
|
||||
projectID.value,
|
||||
|
|
@ -90,7 +89,7 @@ object GlobalPlugin {
|
|||
resolvers.value.toVector,
|
||||
(Runtime / fullClasspath).value,
|
||||
(prods ++ intcp).distinct
|
||||
)(updateReport)
|
||||
)(updateReport.value)
|
||||
}
|
||||
val resolvedTaskInit = taskInit mapReferenced Project.mapScope(Scope replaceThis p)
|
||||
val task = resolvedTaskInit evaluate data
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import Def.ScopedKey
|
|||
import Types.idFun
|
||||
import java.io.File
|
||||
import Scope.Global
|
||||
import sbt.ProjectExtra.*
|
||||
|
||||
object Inspect {
|
||||
sealed trait Mode
|
||||
|
|
@ -87,14 +88,16 @@ object Inspect {
|
|||
import extracted._
|
||||
option match {
|
||||
case Details(actual) =>
|
||||
Project.details(structure, actual, sk.scope, sk.key)
|
||||
Project.details(extracted.structure, actual, sk.scope, sk.key)
|
||||
case DependencyTreeMode =>
|
||||
val basedir = new File(Project.session(s).current.build)
|
||||
Project.settingGraph(structure, basedir, sk).dependsAscii(get(sbt.Keys.asciiGraphWidth))
|
||||
Project
|
||||
.settingGraph(extracted.structure, basedir, sk)
|
||||
.dependsAscii(get(sbt.Keys.asciiGraphWidth))
|
||||
case UsesMode =>
|
||||
Project.showUses(Project.usedBy(structure, true, sk.key))
|
||||
Project.showUses(Project.usedBy(extracted.structure, true, sk.key))
|
||||
case DefinitionsMode =>
|
||||
Project.showDefinitions(sk.key, Project.definitions(structure, true, sk.key))
|
||||
Project.showDefinitions(sk.key, Project.definitions(extracted.structure, true, sk.key))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ private[sbt] object InternalDependencies {
|
|||
val projectDependencies = buildDependencies.value.classpath.get(ref).toSeq.flatten
|
||||
val applicableConfigs = allConfigs + "*"
|
||||
((ref -> allConfigs) +:
|
||||
projectDependencies.flatMap { case ResolvedClasspathDependency(p, rawConfigs) =>
|
||||
projectDependencies.flatMap { case ClasspathDep.ResolvedClasspathDependency(p, rawConfigs) =>
|
||||
val configs = rawConfigs.getOrElse("*->compile").split(";").flatMap { config =>
|
||||
config.split("->") match {
|
||||
case Array(n, c) if applicableConfigs.contains(n) => Some(c)
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ import Configurations.Compile
|
|||
import Def.Setting
|
||||
import Keys._
|
||||
import Scope.Global
|
||||
import sbt.ProjectExtra.{ extract, setProject }
|
||||
import sbt.SlashSyntax0._
|
||||
|
||||
import sbt.io.IO
|
||||
|
|
|
|||
|
|
@ -276,7 +276,7 @@ private[sbt] final class KeyIndex0(val data: BuildIndex) extends ExtendableKeyIn
|
|||
case _ => (None, None)
|
||||
}
|
||||
private[this] def optConfigs(project: Option[ResolvedReference]): Seq[Option[String]] =
|
||||
None +: (configs(project).toSeq map some.fn)
|
||||
None +: (configs(project).toSeq.map(some[String]))
|
||||
|
||||
def addAggregated(scoped: ScopedKey[_], extra: BuildUtil[_]): ExtendableKeyIndex =
|
||||
if (validID(scoped.key.label)) {
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ import sbt.librarymanagement.syntax._
|
|||
import sbt.util.{ CacheStore, CacheStoreFactory, Level, Logger, Tracked }
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
import sbt.Project.richInitializeTask
|
||||
import sbt.ProjectExtra.richInitializeTask
|
||||
import sjsonnew.JsonFormat
|
||||
import scala.compat.Platform.EOL
|
||||
import scala.concurrent.duration.FiniteDuration
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import Keys._
|
|||
import Def.{ Setting, ScopedKey }
|
||||
import sbt.internal.util.{ FilePosition, NoPosition, SourcePosition }
|
||||
import java.io.File
|
||||
import ProjectExtra.{ extract, scopedKeyData }
|
||||
import Scope.Global
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.Def._
|
||||
|
|
|
|||
|
|
@ -12,12 +12,13 @@ import sbt.BuildPaths._
|
|||
import sbt.Def.{ ScopeLocal, ScopedKey, Setting, isDummy }
|
||||
import sbt.Keys._
|
||||
import sbt.Project.inScope
|
||||
import sbt.ProjectExtra.{ checkTargets, prefixConfigs, setProject, showLoadingKey, structure }
|
||||
import sbt.Scope.GlobalScope
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.compiler.{ Eval, EvalReporter }
|
||||
import sbt.internal.{ Eval, EvalReporter }
|
||||
import sbt.internal.BuildStreams._
|
||||
import sbt.internal.inc.classpath.ClasspathUtil
|
||||
import sbt.internal.inc.{ ScalaInstance, ZincLmUtil, ZincUtil }
|
||||
import sbt.internal.inc.{ MappedFileConverter, ScalaInstance, ZincLmUtil, ZincUtil }
|
||||
import sbt.internal.server.BuildServerEvalReporter
|
||||
import sbt.internal.util.Attributed.data
|
||||
import sbt.internal.util.Types.const
|
||||
|
|
@ -27,13 +28,14 @@ import sbt.librarymanagement.ivy.{ InlineIvyConfiguration, IvyDependencyResoluti
|
|||
import sbt.librarymanagement.{ Configuration, Configurations, Resolver }
|
||||
import sbt.nio.Settings
|
||||
import sbt.util.{ Logger, Show }
|
||||
import xsbti.VirtualFile
|
||||
import xsbti.compile.{ ClasspathOptionsUtil, Compilers }
|
||||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import java.nio.file.{ Path, Paths }
|
||||
import scala.annotation.{ nowarn, tailrec }
|
||||
import scala.collection.mutable
|
||||
import scala.tools.nsc.reporters.ConsoleReporter
|
||||
// import scala.tools.nsc.reporters.ConsoleReporter
|
||||
|
||||
private[sbt] object Load {
|
||||
// note that there is State passed in but not pulled out
|
||||
|
|
@ -68,6 +70,13 @@ private[sbt] object Load {
|
|||
val scalaProvider = app.provider.scalaProvider
|
||||
val launcher = scalaProvider.launcher
|
||||
val stagingDirectory = getStagingDirectory(state, globalBase).getCanonicalFile
|
||||
val javaHome = Paths.get(sys.props("java.home"))
|
||||
val rootPaths = Map(
|
||||
"BASE" -> baseDirectory.toPath,
|
||||
"SBT_BOOT" -> launcher.bootDirectory.toPath,
|
||||
"IVY_HOME" -> launcher.ivyHome.toPath,
|
||||
"JAVA_HOME" -> javaHome,
|
||||
)
|
||||
val loader = getClass.getClassLoader
|
||||
val classpath = Attributed.blankSeq(provider.mainClasspath ++ scalaProvider.jars)
|
||||
val ivyConfiguration =
|
||||
|
|
@ -115,6 +124,7 @@ private[sbt] object Load {
|
|||
inject,
|
||||
None,
|
||||
Nil,
|
||||
converter = MappedFileConverter(rootPaths, false),
|
||||
log
|
||||
)
|
||||
}
|
||||
|
|
@ -139,28 +149,33 @@ private[sbt] object Load {
|
|||
): LoadBuildConfiguration = {
|
||||
val globalPluginsDir = getGlobalPluginsDirectory(state, globalBase)
|
||||
val withGlobal = loadGlobal(state, base, globalPluginsDir, rawConfig)
|
||||
val globalSettings = configurationSources(getGlobalSettingsDirectory(state, globalBase))
|
||||
val globalSettings: Seq[VirtualFile] =
|
||||
configurationSources(getGlobalSettingsDirectory(state, globalBase))
|
||||
.map(x => rawConfig.converter.toVirtualFile(x.toPath))
|
||||
loadGlobalSettings(base, globalBase, globalSettings, withGlobal)
|
||||
}
|
||||
|
||||
def loadGlobalSettings(
|
||||
base: File,
|
||||
globalBase: File,
|
||||
files: Seq[File],
|
||||
files: Seq[VirtualFile],
|
||||
config: LoadBuildConfiguration
|
||||
): LoadBuildConfiguration = {
|
||||
): LoadBuildConfiguration =
|
||||
val compiled: ClassLoader => Seq[Setting[_]] =
|
||||
if (files.isEmpty || base == globalBase) const(Nil)
|
||||
else buildGlobalSettings(globalBase, files, config)
|
||||
config.copy(injectSettings = config.injectSettings.copy(projectLoaded = compiled))
|
||||
}
|
||||
|
||||
def buildGlobalSettings(
|
||||
base: File,
|
||||
files: Seq[File],
|
||||
files: Seq[VirtualFile],
|
||||
config: LoadBuildConfiguration
|
||||
): ClassLoader => Seq[Setting[_]] = {
|
||||
val eval = mkEval(data(config.globalPluginClasspath), base, defaultEvalOptions)
|
||||
val eval = mkEval(
|
||||
classpath = data(config.globalPluginClasspath).map(_.toPath()),
|
||||
base = base,
|
||||
options = defaultEvalOptions,
|
||||
)
|
||||
|
||||
val imports =
|
||||
BuildUtil.baseImports ++ config.detectedGlobalPlugins.imports
|
||||
|
|
@ -254,7 +269,7 @@ private[sbt] object Load {
|
|||
if (settings.size > 10000) {
|
||||
log.info(s"resolving key references (${settings.size} settings) ...")
|
||||
}
|
||||
Def.makeWithCompiledMap(settings)(
|
||||
Def.makeWithCompiledMap(settings)(using
|
||||
delegates,
|
||||
config.scopeLocal,
|
||||
Project.showLoadingKey(loaded)
|
||||
|
|
@ -274,7 +289,8 @@ private[sbt] object Load {
|
|||
streams,
|
||||
delegates,
|
||||
config.scopeLocal,
|
||||
cMap
|
||||
cMap,
|
||||
config.converter,
|
||||
)
|
||||
(rootEval, bs)
|
||||
}
|
||||
|
|
@ -285,11 +301,12 @@ private[sbt] object Load {
|
|||
// 3. resolvedScoped is replaced with the defining key as a value
|
||||
// Note: this must be idempotent.
|
||||
def finalTransforms(ss: Seq[Setting[_]]): Seq[Setting[_]] = {
|
||||
def mapSpecial(to: ScopedKey[_]) = λ[ScopedKey ~> ScopedKey]((key: ScopedKey[_]) =>
|
||||
if (key.key == streams.key) {
|
||||
ScopedKey(Scope.fillTaskAxis(Scope.replaceThis(to.scope)(key.scope), to.key), key.key)
|
||||
} else key
|
||||
)
|
||||
def mapSpecial(to: ScopedKey[_]): [a] => ScopedKey[a] => ScopedKey[a] =
|
||||
[a] =>
|
||||
(key: ScopedKey[a]) =>
|
||||
if key.key == streams.key then
|
||||
ScopedKey(Scope.fillTaskAxis(Scope.replaceThis(to.scope)(key.scope), to.key), key.key)
|
||||
else key
|
||||
def setDefining[T] =
|
||||
(key: ScopedKey[T], value: T) =>
|
||||
value match {
|
||||
|
|
@ -297,12 +314,12 @@ private[sbt] object Load {
|
|||
case ik: InputTask[t] => ik.mapTask(tk => setDefinitionKey(tk, key)).asInstanceOf[T]
|
||||
case _ => value
|
||||
}
|
||||
def setResolved(defining: ScopedKey[_]) = λ[ScopedKey ~> Option]((key: ScopedKey[_]) =>
|
||||
key.key match {
|
||||
case resolvedScoped.key => Some(defining.asInstanceOf[A1$])
|
||||
case _ => None
|
||||
}
|
||||
)
|
||||
def setResolved(defining: ScopedKey[_]): [a] => ScopedKey[a] => Option[a] =
|
||||
[a] =>
|
||||
(key: ScopedKey[a]) =>
|
||||
key.key match
|
||||
case resolvedScoped.key => Some(defining.asInstanceOf[a])
|
||||
case _ => None
|
||||
ss.map(s =>
|
||||
s mapConstant setResolved(s.key) mapReferenced mapSpecial(s.key) mapInit setDefining
|
||||
)
|
||||
|
|
@ -340,7 +357,7 @@ private[sbt] object Load {
|
|||
): BuildStructure = {
|
||||
val transformed = finalTransforms(newSettings)
|
||||
val (cMap, newData) =
|
||||
Def.makeWithCompiledMap(transformed)(structure.delegates, structure.scopeLocal, display)
|
||||
Def.makeWithCompiledMap(transformed)(using structure.delegates, structure.scopeLocal, display)
|
||||
def extra(index: KeyIndex) = BuildUtil(structure.root, structure.units, index, newData)
|
||||
val newIndex = structureIndex(newData, transformed, extra, structure.units)
|
||||
val newStreams = mkStreams(structure.units, structure.root, newData)
|
||||
|
|
@ -354,6 +371,7 @@ private[sbt] object Load {
|
|||
delegates = structure.delegates,
|
||||
scopeLocal = structure.scopeLocal,
|
||||
compiledMap = cMap,
|
||||
converter = structure.converter,
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -421,39 +439,51 @@ private[sbt] object Load {
|
|||
|
||||
def mkEval(unit: BuildUnit): Eval = {
|
||||
val defs = unit.definitions
|
||||
mkEval(defs.target ++ unit.plugins.classpath, defs.base, unit.plugins.pluginData.scalacOptions)
|
||||
mkEval(
|
||||
(defs.target ++ unit.plugins.classpath).map(_.toPath()),
|
||||
defs.base,
|
||||
unit.plugins.pluginData.scalacOptions,
|
||||
)
|
||||
}
|
||||
|
||||
def mkEval(classpath: Seq[File], base: File, options: Seq[String]): Eval =
|
||||
mkEval(classpath, base, options, EvalReporter.console)
|
||||
def mkEval(classpath: Seq[Path], base: File, options: Seq[String]): Eval =
|
||||
mkEval(classpath, base, options, () => EvalReporter.console)
|
||||
|
||||
def mkEval(
|
||||
classpath: Seq[File],
|
||||
classpath: Seq[Path],
|
||||
base: File,
|
||||
options: Seq[String],
|
||||
mkReporter: scala.tools.nsc.Settings => EvalReporter
|
||||
mkReporter: () => EvalReporter,
|
||||
): Eval =
|
||||
new Eval(options, classpath, mkReporter, Some(evalOutputDirectory(base)))
|
||||
new Eval(
|
||||
nonCpOptions = options,
|
||||
classpath = classpath,
|
||||
backingDir = Option(evalOutputDirectory(base).toPath()),
|
||||
mkReporter = Option(() => (mkReporter(): dotty.tools.dotc.reporting.Reporter)),
|
||||
)
|
||||
|
||||
/**
|
||||
* This will clean up left-over files in the config-classes directory if they are no longer used.
|
||||
*
|
||||
* @param base The base directory for the build, should match the one passed into `mkEval` method.
|
||||
*/
|
||||
def cleanEvalClasses(base: File, keep: Seq[File]): Unit = {
|
||||
def cleanEvalClasses(base: File, keep: Seq[Path]): Unit = {
|
||||
val baseTarget = evalOutputDirectory(base)
|
||||
val keepSet = keep.map(_.getCanonicalPath).toSet
|
||||
val keepSet = keep.map(_.toAbsolutePath().normalize()).toSet
|
||||
// If there are no keeper files, this may be because cache was up-to-date and
|
||||
// the files aren't properly returned, even though they should be.
|
||||
// TODO - figure out where the caching of whether or not to generate classfiles occurs, and
|
||||
// put cleanups there, perhaps.
|
||||
if (keepSet.nonEmpty) {
|
||||
def keepFile(f: File) = keepSet(f.getCanonicalPath)
|
||||
def keepFile(f: Path) = keepSet(f.toAbsolutePath().normalize())
|
||||
import sbt.io.syntax._
|
||||
val existing = (baseTarget.allPaths.get).filterNot(_.isDirectory)
|
||||
val existing = (baseTarget.allPaths
|
||||
.get())
|
||||
.filterNot(_.isDirectory)
|
||||
.map(_.toPath())
|
||||
val toDelete = existing.filterNot(keepFile)
|
||||
if (toDelete.nonEmpty) {
|
||||
IO.delete(toDelete)
|
||||
IO.delete(toDelete.map(_.toFile()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -472,7 +502,7 @@ private[sbt] object Load {
|
|||
val newConfig: LoadBuildConfiguration =
|
||||
config.copy(pluginManagement = manager, extraBuilds = Nil)
|
||||
val loader = builtinLoader(s, newConfig)
|
||||
loadURI(IO.directoryURI(root), loader, config.extraBuilds.toList)
|
||||
loadURI(IO.directoryURI(root), loader, config.extraBuilds.toList, newConfig.converter)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -492,12 +522,17 @@ private[sbt] object Load {
|
|||
BuildLoader(components, fail, s, config)
|
||||
}
|
||||
|
||||
private def loadURI(uri: URI, loader: BuildLoader, extra: List[URI]): PartBuild = {
|
||||
private def loadURI(
|
||||
uri: URI,
|
||||
loader: BuildLoader,
|
||||
extra: List[URI],
|
||||
converter: MappedFileConverter,
|
||||
): PartBuild = {
|
||||
IO.assertAbsolute(uri)
|
||||
val (referenced, map, newLoaders) = loadAll(uri +: extra, Map.empty, loader, Map.empty)
|
||||
checkAll(referenced, map)
|
||||
val build = new PartBuild(uri, map)
|
||||
newLoaders transformAll build
|
||||
val build = PartBuild(uri, map, converter)
|
||||
newLoaders.transformAll(build)
|
||||
}
|
||||
|
||||
def addOverrides(unit: BuildUnit, loaders: BuildLoader): BuildLoader =
|
||||
|
|
@ -703,20 +738,27 @@ private[sbt] object Load {
|
|||
|
||||
// NOTE - because we create an eval here, we need a clean-eval later for this URI.
|
||||
lazy val eval = timed("Load.loadUnit: mkEval", log) {
|
||||
def mkReporter(settings: scala.tools.nsc.Settings): EvalReporter =
|
||||
plugs.pluginData.buildTarget match {
|
||||
case None => EvalReporter.console(settings)
|
||||
case Some(buildTarget) =>
|
||||
new BuildServerEvalReporter(buildTarget, new ConsoleReporter(settings))
|
||||
}
|
||||
mkEval(plugs.classpath, defDir, plugs.pluginData.scalacOptions, mkReporter)
|
||||
def mkReporter() = EvalReporter.console
|
||||
// todo:
|
||||
// def mkReporter(settings: scala.tools.nsc.Settings): EvalReporter =
|
||||
// plugs.pluginData.buildTarget match {
|
||||
// case None => EvalReporter.console // (settings)
|
||||
// case Some(buildTarget) =>
|
||||
// new BuildServerEvalReporter(buildTarget, new ConsoleReporter(settings))
|
||||
// }
|
||||
mkEval(
|
||||
classpath = plugs.classpath.map(_.toPath()),
|
||||
defDir,
|
||||
plugs.pluginData.scalacOptions,
|
||||
mkReporter,
|
||||
)
|
||||
}
|
||||
val initialProjects =
|
||||
defsScala.flatMap(b => projectsFromBuild(b, normBase)) ++ buildLevelExtraProjects
|
||||
|
||||
val hasRootAlreadyDefined = defsScala.exists(_.rootProject.isDefined)
|
||||
|
||||
val memoSettings = new mutable.HashMap[File, LoadedSbtFile]
|
||||
val memoSettings = new mutable.HashMap[VirtualFile, LoadedSbtFile]
|
||||
def loadProjects(ps: Seq[Project], createRoot: Boolean) =
|
||||
loadTransitive(
|
||||
ps,
|
||||
|
|
@ -731,7 +773,8 @@ private[sbt] object Load {
|
|||
uri,
|
||||
config.pluginManagement.context,
|
||||
Nil,
|
||||
s.get(BasicKeys.extraMetaSbtFiles).getOrElse(Nil)
|
||||
s.get(BasicKeys.extraMetaSbtFiles).getOrElse(Nil),
|
||||
converter = config.converter,
|
||||
)
|
||||
val loadedProjectsRaw = timed("Load.loadUnit: loadedProjectsRaw", log) {
|
||||
loadProjects(initialProjects, !hasRootAlreadyDefined)
|
||||
|
|
@ -817,7 +860,7 @@ private[sbt] object Load {
|
|||
// Lame hackery to keep track of our state.
|
||||
private[this] case class LoadedProjects(
|
||||
projects: Seq[Project],
|
||||
generatedConfigClassFiles: Seq[File]
|
||||
generatedConfigClassFiles: Seq[Path],
|
||||
)
|
||||
|
||||
/**
|
||||
|
|
@ -857,17 +900,18 @@ private[sbt] object Load {
|
|||
eval: () => Eval,
|
||||
injectSettings: InjectSettings,
|
||||
acc: Seq[Project],
|
||||
memoSettings: mutable.Map[File, LoadedSbtFile],
|
||||
memoSettings: mutable.Map[VirtualFile, LoadedSbtFile],
|
||||
log: Logger,
|
||||
makeOrDiscoverRoot: Boolean,
|
||||
buildUri: URI,
|
||||
context: PluginManagement.Context,
|
||||
generatedConfigClassFiles: Seq[File],
|
||||
extraSbtFiles: Seq[File]
|
||||
generatedConfigClassFiles: Seq[Path],
|
||||
extraSbtFiles: Seq[VirtualFile],
|
||||
converter: MappedFileConverter,
|
||||
): LoadedProjects =
|
||||
/*timed(s"Load.loadTransitive(${ newProjects.map(_.id) })", log)*/ {
|
||||
|
||||
def load(newProjects: Seq[Project], acc: Seq[Project], generated: Seq[File]) = {
|
||||
def load(newProjects: Seq[Project], acc: Seq[Project], generated: Seq[Path]) = {
|
||||
loadTransitive(
|
||||
newProjects,
|
||||
buildBase,
|
||||
|
|
@ -881,7 +925,8 @@ private[sbt] object Load {
|
|||
buildUri,
|
||||
context,
|
||||
generated,
|
||||
Nil
|
||||
Nil,
|
||||
converter,
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -894,7 +939,7 @@ private[sbt] object Load {
|
|||
val extraFiles =
|
||||
if (base == buildBase && isMetaBuildContext(context)) extraSbtFiles
|
||||
else Nil
|
||||
discoverProjects(auto, base, extraFiles, plugins, eval, memoSettings)
|
||||
discoverProjects(auto, base, extraFiles, plugins, eval, memoSettings, converter)
|
||||
}
|
||||
|
||||
// Step two:
|
||||
|
|
@ -903,8 +948,8 @@ private[sbt] object Load {
|
|||
// c. Finalize a project with all its settings/configuration.
|
||||
def finalizeProject(
|
||||
p: Project,
|
||||
files: Seq[File],
|
||||
extraFiles: Seq[File],
|
||||
files: Seq[VirtualFile],
|
||||
extraFiles: Seq[VirtualFile],
|
||||
expand: Boolean
|
||||
): (Project, Seq[Project]) = {
|
||||
val configFiles = files.flatMap(f => memoSettings.get(f))
|
||||
|
|
@ -913,7 +958,16 @@ private[sbt] object Load {
|
|||
try plugins.detected.deducePluginsFromProject(p1, log)
|
||||
catch { case e: AutoPluginException => throw translateAutoPluginException(e, p) }
|
||||
val p2 =
|
||||
resolveProject(p1, autoPlugins, plugins, injectSettings, memoSettings, extraFiles, log)
|
||||
resolveProject(
|
||||
p1,
|
||||
autoPlugins,
|
||||
plugins,
|
||||
injectSettings,
|
||||
memoSettings,
|
||||
extraFiles,
|
||||
converter,
|
||||
log
|
||||
)
|
||||
val projectLevelExtra =
|
||||
if (expand) {
|
||||
autoPlugins.flatMap(
|
||||
|
|
@ -1004,9 +1058,9 @@ private[sbt] object Load {
|
|||
private[this] case class DiscoveredProjects(
|
||||
root: Option[Project],
|
||||
nonRoot: Seq[Project],
|
||||
sbtFiles: Seq[File],
|
||||
extraSbtFiles: Seq[File],
|
||||
generatedFiles: Seq[File]
|
||||
sbtFiles: Seq[VirtualFile],
|
||||
extraSbtFiles: Seq[VirtualFile],
|
||||
generatedFiles: Seq[Path]
|
||||
)
|
||||
|
||||
/**
|
||||
|
|
@ -1028,8 +1082,9 @@ private[sbt] object Load {
|
|||
projectPlugins: Seq[AutoPlugin],
|
||||
loadedPlugins: LoadedPlugins,
|
||||
globalUserSettings: InjectSettings,
|
||||
memoSettings: mutable.Map[File, LoadedSbtFile],
|
||||
extraSbtFiles: Seq[File],
|
||||
memoSettings: mutable.Map[VirtualFile, LoadedSbtFile],
|
||||
extraSbtFiles: Seq[VirtualFile],
|
||||
converter: MappedFileConverter,
|
||||
log: Logger
|
||||
): Project =
|
||||
timed(s"Load.resolveProject(${p.id})", log) {
|
||||
|
|
@ -1040,35 +1095,37 @@ private[sbt] object Load {
|
|||
val allSettings = {
|
||||
// TODO - This mechanism of applying settings could be off... It's in two places now...
|
||||
lazy val defaultSbtFiles = configurationSources(p.base)
|
||||
lazy val sbtFiles = defaultSbtFiles ++ extraSbtFiles
|
||||
.map(_.toPath())
|
||||
.map(converter.toVirtualFile)
|
||||
lazy val sbtFiles: Seq[VirtualFile] = defaultSbtFiles ++ extraSbtFiles
|
||||
// Filter the AutoPlugin settings we included based on which ones are
|
||||
// intended in the AddSettings.AutoPlugins filter.
|
||||
def autoPluginSettings(f: AutoPlugins) =
|
||||
projectPlugins.filter(f.include).flatMap(_.projectSettings)
|
||||
// Grab all the settings we already loaded from sbt files
|
||||
def settings(files: Seq[File]): Seq[Setting[_]] = {
|
||||
def settings(files: Seq[VirtualFile]): Seq[Setting[_]] = {
|
||||
if (files.nonEmpty)
|
||||
log.info(
|
||||
s"${files.map(_.getName).mkString(s"loading settings for project ${p.id} from ", ",", " ...")}"
|
||||
s"${files.map(_.name()).mkString(s"loading settings for project ${p.id} from ", ",", " ...")}"
|
||||
)
|
||||
for {
|
||||
file <- files
|
||||
config <- (memoSettings get file).toSeq
|
||||
config <- memoSettings.get(file).toSeq
|
||||
setting <- config.settings
|
||||
} yield setting
|
||||
}
|
||||
// Expand the AddSettings instance into a real Seq[Setting[_]] we'll use on the project
|
||||
def expandSettings(auto: AddSettings): Seq[Setting[_]] = auto match {
|
||||
case BuildScalaFiles => p.settings
|
||||
case User => globalUserSettings.cachedProjectLoaded(loadedPlugins.loader)
|
||||
case sf: SbtFiles => settings(sf.files.map(f => IO.resolve(p.base, f)))
|
||||
case sf: DefaultSbtFiles => settings(sbtFiles.filter(sf.include))
|
||||
case p: AutoPlugins => autoPluginSettings(p)
|
||||
case q: Sequence =>
|
||||
q.sequence.foldLeft(Seq.empty[Setting[_]]) { (b, add) =>
|
||||
b ++ expandSettings(add)
|
||||
}
|
||||
}
|
||||
def expandSettings(auto: AddSettings): Seq[Setting[_]] =
|
||||
auto match
|
||||
case BuildScalaFiles => p.settings
|
||||
case User => globalUserSettings.cachedProjectLoaded(loadedPlugins.loader)
|
||||
// case sf: SbtFiles => settings(sf.files.map(f => IO.resolve(p.base, f)))
|
||||
case sf: DefaultSbtFiles => settings(sbtFiles.filter(sf.include))
|
||||
case p: AutoPlugins => autoPluginSettings(p)
|
||||
case q: Sequence =>
|
||||
q.sequence.foldLeft(Seq.empty[Setting[_]]) { (b, add) =>
|
||||
b ++ expandSettings(add)
|
||||
}
|
||||
val auto = AddSettings.allDefaults
|
||||
expandSettings(auto)
|
||||
}
|
||||
|
|
@ -1089,14 +1146,17 @@ private[sbt] object Load {
|
|||
private[this] def discoverProjects(
|
||||
auto: AddSettings,
|
||||
projectBase: File,
|
||||
extraSbtFiles: Seq[File],
|
||||
extraSbtFiles: Seq[VirtualFile],
|
||||
loadedPlugins: LoadedPlugins,
|
||||
eval: () => Eval,
|
||||
memoSettings: mutable.Map[File, LoadedSbtFile]
|
||||
memoSettings: mutable.Map[VirtualFile, LoadedSbtFile],
|
||||
converter: MappedFileConverter,
|
||||
): DiscoveredProjects = {
|
||||
|
||||
// Default sbt files to read, if needed
|
||||
lazy val defaultSbtFiles = configurationSources(projectBase)
|
||||
.map(_.toPath)
|
||||
.map(converter.toVirtualFile)
|
||||
lazy val sbtFiles = defaultSbtFiles ++ extraSbtFiles
|
||||
|
||||
// Classloader of the build
|
||||
|
|
@ -1105,11 +1165,11 @@ private[sbt] object Load {
|
|||
// How to load an individual file for use later.
|
||||
// TODO - We should import vals defined in other sbt files here, if we wish to
|
||||
// share. For now, build.sbt files have their own unique namespace.
|
||||
def loadSettingsFile(src: File): LoadedSbtFile =
|
||||
def loadSettingsFile(src: VirtualFile): LoadedSbtFile =
|
||||
EvaluateConfigurations.evaluateSbtFile(
|
||||
eval(),
|
||||
src,
|
||||
IO.readLines(src),
|
||||
IO.readStream(src.input()).linesIterator.toList,
|
||||
loadedPlugins.detected.imports,
|
||||
0
|
||||
)(loader)
|
||||
|
|
@ -1119,7 +1179,7 @@ private[sbt] object Load {
|
|||
}
|
||||
// Loads a given file, or pulls from the cache.
|
||||
|
||||
def memoLoadSettingsFile(src: File): LoadedSbtFile =
|
||||
def memoLoadSettingsFile(src: VirtualFile): LoadedSbtFile =
|
||||
memoSettings.getOrElse(
|
||||
src, {
|
||||
val lf = loadSettingsFile(src)
|
||||
|
|
@ -1129,20 +1189,31 @@ private[sbt] object Load {
|
|||
)
|
||||
|
||||
// Loads a set of sbt files, sorted by their lexical name (current behavior of sbt).
|
||||
def loadFiles(fs: Seq[File]): LoadedSbtFile =
|
||||
merge(fs.sortBy(_.getName).map(memoLoadSettingsFile))
|
||||
def loadFiles(fs: Seq[VirtualFile]): LoadedSbtFile =
|
||||
merge(
|
||||
fs.sortBy(_.name())
|
||||
.map(memoLoadSettingsFile)
|
||||
)
|
||||
|
||||
// Finds all the build files associated with this project
|
||||
import AddSettings.{ DefaultSbtFiles, SbtFiles, Sequence }
|
||||
def associatedFiles(auto: AddSettings): Seq[File] = auto match {
|
||||
case sf: SbtFiles => sf.files.map(f => IO.resolve(projectBase, f)).filterNot(_.isHidden)
|
||||
case sf: DefaultSbtFiles => sbtFiles.filter(sf.include).filterNot(_.isHidden)
|
||||
case q: Sequence =>
|
||||
q.sequence.foldLeft(Seq.empty[File]) { (b, add) =>
|
||||
b ++ associatedFiles(add)
|
||||
}
|
||||
case _ => Seq.empty
|
||||
}
|
||||
import AddSettings.{ DefaultSbtFiles, Sequence }
|
||||
def associatedFiles(auto: AddSettings): Seq[VirtualFile] =
|
||||
auto match
|
||||
// case sf: SbtFiles =>
|
||||
// sf.files
|
||||
// .map(f => IO.resolve(projectBase, f))
|
||||
// .filterNot(_.isHidden)
|
||||
// .map(_.toPath)
|
||||
case sf: DefaultSbtFiles =>
|
||||
sbtFiles
|
||||
// .filter(sf.include)
|
||||
// .filterNot(_.isHidden)
|
||||
// .map(_.toPath)
|
||||
case q: Sequence =>
|
||||
q.sequence.foldLeft(Seq.empty[VirtualFile]) { (b, add) =>
|
||||
b ++ associatedFiles(add)
|
||||
}
|
||||
case _ => Seq.empty
|
||||
val rawFiles = associatedFiles(auto)
|
||||
val loadedFiles = loadFiles(rawFiles)
|
||||
val rawProjects = loadedFiles.projects
|
||||
|
|
@ -1223,7 +1294,7 @@ private[sbt] object Load {
|
|||
|
||||
def plugins(dir: File, s: State, config: LoadBuildConfiguration): LoadedPlugins = {
|
||||
val context = config.pluginManagement.context
|
||||
val extraSbtFiles: Seq[File] =
|
||||
val extraSbtFiles: Seq[VirtualFile] =
|
||||
if (isMetaBuildContext(context)) s.get(BasicKeys.extraMetaSbtFiles).getOrElse(Nil)
|
||||
else Nil
|
||||
if (hasDefinition(dir) || extraSbtFiles.nonEmpty)
|
||||
|
|
@ -1237,7 +1308,7 @@ private[sbt] object Load {
|
|||
|
||||
def hasDefinition(dir: File): Boolean = {
|
||||
import sbt.io.syntax._
|
||||
(dir * -GlobFilter(DefaultTargetName)).get.nonEmpty
|
||||
(dir * -GlobFilter(DefaultTargetName)).get().nonEmpty
|
||||
}
|
||||
|
||||
def noPlugins(dir: File, config: LoadBuildConfiguration): LoadedPlugins =
|
||||
|
|
@ -1310,7 +1381,7 @@ private[sbt] object Load {
|
|||
else {
|
||||
// Load only the dependency classpath for the common plugin classloader
|
||||
val loader = manager.loader
|
||||
loader.add(Path.toURLs(data(dependencyClasspath)))
|
||||
loader.add(sbt.io.Path.toURLs(data(dependencyClasspath)))
|
||||
loader
|
||||
}
|
||||
}
|
||||
|
|
@ -1425,6 +1496,7 @@ final case class LoadBuildConfiguration(
|
|||
injectSettings: Load.InjectSettings,
|
||||
globalPlugin: Option[GlobalPlugin],
|
||||
extraBuilds: Seq[URI],
|
||||
converter: MappedFileConverter,
|
||||
log: Logger
|
||||
) {
|
||||
lazy val globalPluginClasspath: Def.Classpath =
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ package internal
|
|||
|
||||
import sbt.Def.ScopedKey
|
||||
import sbt.Keys._
|
||||
import sbt.ProjectExtra.showContextKey
|
||||
import sbt.Scope.Global
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.util.MainAppender._
|
||||
|
|
|
|||
|
|
@ -25,23 +25,23 @@ object Output {
|
|||
final val DefaultTail = "> "
|
||||
|
||||
def last(
|
||||
keys: Values[_],
|
||||
keys: Values[Any],
|
||||
streams: Streams,
|
||||
printLines: Seq[String] => Unit,
|
||||
sid: Option[String]
|
||||
)(implicit display: Show[ScopedKey[_]]): Unit =
|
||||
)(using display: Show[ScopedKey[_]]): Unit =
|
||||
printLines(flatLines(lastLines(keys, streams, sid))(idFun))
|
||||
|
||||
def last(file: File, printLines: Seq[String] => Unit, tailDelim: String = DefaultTail): Unit =
|
||||
printLines(tailLines(file, tailDelim))
|
||||
|
||||
def lastGrep(
|
||||
keys: Values[_],
|
||||
keys: Values[Any],
|
||||
streams: Streams,
|
||||
patternString: String,
|
||||
printLines: Seq[String] => Unit
|
||||
)(implicit display: Show[ScopedKey[_]]): Unit = {
|
||||
val pattern = Pattern compile patternString
|
||||
)(using display: Show[ScopedKey[_]]): Unit = {
|
||||
val pattern = Pattern.compile(patternString)
|
||||
val lines = flatLines(lastLines(keys, streams))(_ flatMap showMatches(pattern))
|
||||
printLines(lines)
|
||||
}
|
||||
|
|
@ -68,7 +68,7 @@ object Output {
|
|||
}
|
||||
|
||||
def lastLines(
|
||||
keys: Values[_],
|
||||
keys: Values[Any],
|
||||
streams: Streams,
|
||||
sid: Option[String] = None
|
||||
): Values[Seq[String]] = {
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ object PluginDiscovery {
|
|||
|
||||
/** Discovers and loads the sbt-plugin-related top-level modules from the classpath and source analysis in `data` and using the provided class `loader`. */
|
||||
def discoverAll(data: PluginData, loader: ClassLoader): DetectedPlugins = {
|
||||
def discover[T](resource: String)(implicit classTag: reflect.ClassTag[T]) =
|
||||
def discover[T](resource: String)(implicit manifest: Manifest[T]) =
|
||||
binarySourceModules[T](data, loader, resource)
|
||||
import Paths._
|
||||
// TODO - Fix this once we can autodetect AutoPlugins defined by sbt itself.
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ package internal
|
|||
import sbt.internal.util.{ AttributeKey, Dag, Relation, Util }
|
||||
import sbt.util.Logger
|
||||
|
||||
import sbt.ProjectExtra.*
|
||||
import Def.Setting
|
||||
import sbt.SlashSyntax0._
|
||||
import Plugins._
|
||||
|
|
|
|||
|
|
@ -10,9 +10,9 @@ package internal
|
|||
|
||||
import java.net.URI
|
||||
import sbt.internal.util.complete, complete.{ DefaultParsers, Parser }, DefaultParsers._
|
||||
import sbt.compiler.Eval
|
||||
import sbt.internal.Eval
|
||||
import Keys.sessionSettings
|
||||
import Project.updateCurrent
|
||||
import sbt.ProjectExtra.{ extract, updateCurrent }
|
||||
|
||||
object ProjectNavigation {
|
||||
def command(s: State): Parser[() => State] =
|
||||
|
|
@ -21,12 +21,12 @@ object ProjectNavigation {
|
|||
}
|
||||
|
||||
final class ProjectNavigation(s: State) {
|
||||
val extracted: Extracted = Project extract s
|
||||
val extracted: Extracted = Project.extract(s)
|
||||
import extracted.{ currentRef, structure, session }
|
||||
|
||||
def setProject(nuri: URI, nid: String): State = {
|
||||
val neval = if (currentRef.build == nuri) session.currentEval else mkEval(nuri)
|
||||
updateCurrent(s.put(sessionSettings, session.setCurrent(nuri, nid, neval)))
|
||||
Project.updateCurrent(s.put(sessionSettings, session.setCurrent(nuri, nid, neval)))
|
||||
}
|
||||
|
||||
def mkEval(nuri: URI): () => Eval = Load.lazyEval(structure.units(nuri).unit)
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import Keys._
|
|||
import EvaluateConfigurations.{ evaluateConfiguration => evaluate }
|
||||
import Configurations.Compile
|
||||
import Scope.Global
|
||||
import sbt.ProjectExtra.{ extract, setProject }
|
||||
import sbt.SlashSyntax0._
|
||||
|
||||
import sbt.io.{ Hash, IO }
|
||||
|
|
@ -46,10 +47,11 @@ object Script {
|
|||
val (eval, structure) = Load.defaultLoad(state, base, state.log)
|
||||
val session = Load.initialSession(structure, eval)
|
||||
val extracted = Project.extract(session, structure)
|
||||
import extracted._
|
||||
val vf = structure.converter.toVirtualFile(script.toPath())
|
||||
import extracted.*
|
||||
|
||||
val embeddedSettings = blocks(script).flatMap { block =>
|
||||
evaluate(eval(), script, block.lines, currentUnit.imports, block.offset + 1)(currentLoader)
|
||||
evaluate(eval(), vf, block.lines, currentUnit.imports, block.offset + 1)(currentLoader)
|
||||
}
|
||||
val scriptAsSource = (Compile / sources) := script :: Nil
|
||||
val asScript = scalacOptions ++= Seq("-Xscript", script.getName.stripSuffix(".scala"))
|
||||
|
|
|
|||
|
|
@ -12,13 +12,11 @@ import sbt.internal.util.{ complete, LineRange, RangePosition, Types }
|
|||
|
||||
import java.io.File
|
||||
import java.net.URI
|
||||
import sbt.ProjectExtra.extract
|
||||
import Def.{ ScopedKey, Setting }
|
||||
import Types.Endo
|
||||
import compiler.Eval
|
||||
|
||||
import SessionSettings._
|
||||
import sbt.ProjectExtra.{ extract, getProject, session, structure }
|
||||
import sbt.internal.parser.SbtRefactorings
|
||||
|
||||
import sbt.io.IO
|
||||
|
||||
/**
|
||||
|
|
@ -93,19 +91,24 @@ final case class SessionSettings(
|
|||
private[this] def merge(map: SessionMap): Seq[Setting[_]] =
|
||||
map.values.toSeq.flatten[SessionSetting].map(_._1)
|
||||
|
||||
private[this] def modify(map: SessionMap, onSeq: Endo[Seq[SessionSetting]]): SessionMap = {
|
||||
private[this] def modify(
|
||||
map: SessionMap,
|
||||
onSeq: Seq[SessionSetting] => Seq[SessionSetting],
|
||||
): SessionMap = {
|
||||
val cur = current
|
||||
map.updated(cur, onSeq(map.getOrElse(cur, Nil)))
|
||||
}
|
||||
}
|
||||
|
||||
object SessionSettings {
|
||||
object SessionSettings:
|
||||
|
||||
/** A session setting is simply a tuple of a Setting[_] and the strings which define it. */
|
||||
type SessionSetting = (Setting[_], Seq[String])
|
||||
type SessionSetting = sbt.internal.parser.SbtRefactorings.SessionSetting
|
||||
// (Setting[_], Seq[String])
|
||||
|
||||
type SessionMap = Map[ProjectRef, Seq[SessionSetting]]
|
||||
type SbtConfigFile = (File, Seq[String])
|
||||
type SbtConfigFile = sbt.internal.parser.SbtRefactorings.SbtConfigFile
|
||||
// (File, Seq[String])
|
||||
|
||||
/**
|
||||
* This will re-evaluate all Setting[_]'s on this session against the current build state and
|
||||
|
|
@ -133,14 +136,12 @@ object SessionSettings {
|
|||
* @param f A function which takes the current SessionSettings and returns the new build state.
|
||||
* @return The new build state
|
||||
*/
|
||||
def withSettings(s: State)(f: SessionSettings => State): State = {
|
||||
val extracted = Project extract s
|
||||
import extracted._
|
||||
if (session.append.isEmpty) {
|
||||
def withSettings(s: State)(f: SessionSettings => State): State =
|
||||
val extracted = Project.extract(s)
|
||||
if (extracted.session.append.isEmpty) {
|
||||
s.log.info("No session settings defined.")
|
||||
s
|
||||
} else f(session)
|
||||
}
|
||||
} else f(extracted.session)
|
||||
|
||||
/** Adds `s` to a strings when needed. Maybe one day we'll care about non-english languages. */
|
||||
def pluralize(size: Int, of: String) = size.toString + (if (size == 1) of else (of + "s"))
|
||||
|
|
@ -356,4 +357,4 @@ save, save-all
|
|||
case c: Clear => if (c.all) clearAllSettings(s) else clearSettings(s)
|
||||
case r: Remove => removeSettings(s, r.ranges)
|
||||
}
|
||||
}
|
||||
end SessionSettings
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import sbt.internal.util.{ AttributeKey, complete, Relation, Settings, Types, Ut
|
|||
import sbt.util.Show
|
||||
import sbt.librarymanagement.Configuration
|
||||
|
||||
import Project._
|
||||
import ProjectExtra.{ relation }
|
||||
import Def.{ ScopedKey, Setting }
|
||||
import Scope.Global
|
||||
import Types.idFun
|
||||
|
|
@ -41,13 +41,13 @@ private[sbt] object SettingCompletions {
|
|||
*/
|
||||
def setAll(extracted: Extracted, settings: Seq[Setting[_]]): SetResult = {
|
||||
import extracted._
|
||||
val r = relation(extracted.structure, true)
|
||||
val r = Project.relation(extracted.structure, true)
|
||||
val allDefs = Def
|
||||
.flattenLocals(
|
||||
Def.compiled(extracted.structure.settings, true)(
|
||||
Def.compiled(extracted.structure.settings, true)(using
|
||||
structure.delegates,
|
||||
structure.scopeLocal,
|
||||
implicitly[Show[ScopedKey[_]]]
|
||||
implicitly[Show[ScopedKey[_]]],
|
||||
)
|
||||
)
|
||||
.keys
|
||||
|
|
@ -81,10 +81,10 @@ private[sbt] object SettingCompletions {
|
|||
val append =
|
||||
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
|
||||
val newSession = session.appendSettings(append map (a => (a, arg.split('\n').toList)))
|
||||
val r = relation(newSession.mergeSettings, true)(
|
||||
val r = Project.relation(newSession.mergeSettings, true)(using
|
||||
structure.delegates,
|
||||
structure.scopeLocal,
|
||||
implicitly
|
||||
summon[Show[ScopedKey[_]]],
|
||||
)
|
||||
setResult(newSession, r, append)
|
||||
}
|
||||
|
|
@ -12,16 +12,16 @@ import sbt.util.Show
|
|||
import java.io.File
|
||||
|
||||
import Def.{ ScopedKey, compiled, flattenLocals }
|
||||
|
||||
import Predef.{ any2stringadd => _, _ }
|
||||
import sbt.ProjectExtra.scopedKeyData
|
||||
import sbt.io.IO
|
||||
|
||||
object SettingGraph {
|
||||
def apply(structure: BuildStructure, basedir: File, scoped: ScopedKey[_], generation: Int)(
|
||||
implicit display: Show[ScopedKey[_]]
|
||||
def apply(structure: BuildStructure, basedir: File, scoped: ScopedKey[_], generation: Int)(using
|
||||
display: Show[ScopedKey[_]]
|
||||
): SettingGraph = {
|
||||
val cMap = flattenLocals(
|
||||
compiled(structure.settings, false)(structure.delegates, structure.scopeLocal, display)
|
||||
compiled(structure.settings, false)(using structure.delegates, structure.scopeLocal, display)
|
||||
)
|
||||
def loop(scoped: ScopedKey[_], generation: Int): SettingGraph = {
|
||||
val key = scoped.key
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ private[sbt] class TaskProgress(
|
|||
}
|
||||
Util.ignoreResult(pending.add(executor.submit(runnable)))
|
||||
}
|
||||
override def beforeWork(task: Task[_]): Unit =
|
||||
override def beforeWork(task: Task[Any]): Unit =
|
||||
if (!closed.get) {
|
||||
super.beforeWork(task)
|
||||
reportLoop.get match {
|
||||
|
|
@ -108,7 +108,7 @@ private[sbt] class TaskProgress(
|
|||
logger.debug(s"called beforeWork for ${taskName(task)} after task progress was closed")
|
||||
}
|
||||
|
||||
override def afterReady(task: Task[_]): Unit =
|
||||
override def afterReady(task: Task[Any]): Unit =
|
||||
if (!closed.get) {
|
||||
try {
|
||||
Util.ignoreResult(executor.submit((() => {
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ private[sbt] final class TaskTimings(reportOnShutdown: Boolean, logger: Logger)
|
|||
start = System.nanoTime
|
||||
}
|
||||
|
||||
override def afterReady(task: Task[_]): Unit = ()
|
||||
override def afterReady(task: Task[Any]): Unit = ()
|
||||
override def afterCompleted[T](task: Task[T], result: Result[T]): Unit = ()
|
||||
override def afterAllCompleted(results: RMap[Task, Result]): Unit =
|
||||
if (!reportOnShutdown) {
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ private[sbt] final class TaskTraceEvent
|
|||
private[this] val console = ConsoleOut.systemOut
|
||||
|
||||
override def initial(): Unit = ()
|
||||
override def afterReady(task: Task[_]): Unit = ()
|
||||
override def afterReady(task: Task[Any]): Unit = ()
|
||||
override def afterCompleted[T](task: Task[T], result: Result[T]): Unit = ()
|
||||
override def afterAllCompleted(results: RMap[Task, Result]): Unit = ()
|
||||
override def stop(): Unit = ()
|
||||
|
|
|
|||
|
|
@ -5,12 +5,13 @@
|
|||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
package sbt.internal
|
||||
package sbt
|
||||
package internal
|
||||
|
||||
import sbt.Def._
|
||||
import sbt.Keys._
|
||||
import sbt.Project.richInitializeTask
|
||||
import sbt._
|
||||
// import sbt.Project.richInitializeTask
|
||||
import sbt.ProjectExtra.{ delegates, extract, richInitializeTask }
|
||||
import sbt.internal.io.Source
|
||||
import sbt.internal.nio.Globs
|
||||
import sbt.internal.util.AttributeMap
|
||||
|
|
@ -59,22 +60,26 @@ private[sbt] object WatchTransitiveDependencies {
|
|||
scopedKey: ScopedKey[_],
|
||||
extracted: Extracted,
|
||||
compiledMap: CompiledMap
|
||||
): Def.Initialize[Task[Arguments]] = Def.task {
|
||||
val log = (streamsManager map { mgr =>
|
||||
val stream = mgr(scopedKey)
|
||||
stream.open()
|
||||
stream
|
||||
}).value.log
|
||||
val configs = (internalDependencyConfigurations in scopedKey.scope).value
|
||||
new Arguments(
|
||||
scopedKey,
|
||||
extracted,
|
||||
compiledMap,
|
||||
log,
|
||||
configs,
|
||||
state.value
|
||||
)
|
||||
}
|
||||
): Def.Initialize[Task[Arguments]] =
|
||||
import sbt.TupleSyntax.*
|
||||
(
|
||||
(streamsManager map { mgr =>
|
||||
val stream = mgr(scopedKey)
|
||||
stream.open()
|
||||
stream
|
||||
}).toTaskable,
|
||||
(internalDependencyConfigurations in scopedKey.scope).toTaskable,
|
||||
state,
|
||||
).mapN { case (log, configs, st) =>
|
||||
new Arguments(
|
||||
scopedKey,
|
||||
extracted,
|
||||
compiledMap,
|
||||
log.log,
|
||||
configs,
|
||||
st
|
||||
)
|
||||
}
|
||||
private val ShowTransitive = "(?:show)?(?:[ ]*)(.*)/(?:[ ]*)transitive(?:Inputs|Globs|Triggers)".r
|
||||
private def arguments: Def.Initialize[Task[Arguments]] =
|
||||
Def
|
||||
|
|
@ -149,8 +154,8 @@ private[sbt] object WatchTransitiveDependencies {
|
|||
case Some(k) =>
|
||||
k.work match {
|
||||
// Avoid extracted.runTask if possible.
|
||||
case Pure(w, _) => Some(Right(w().map(_.toGlob)))
|
||||
case _ => Some(Left(s))
|
||||
case Action.Pure(w, _) => Some(Right(w().map(_.toGlob)))
|
||||
case _ => Some(Left(s))
|
||||
}
|
||||
case _ => None
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,21 +13,26 @@ import java.io.File
|
|||
import sjsonnew._
|
||||
import scala.collection.mutable.{ HashMap, MultiMap, Set }
|
||||
|
||||
private[sbt] case class GraphModuleId(organization: String, name: String, version: String) {
|
||||
private[sbt] case class GraphModuleId(
|
||||
organization: String,
|
||||
name: String,
|
||||
version: String,
|
||||
) {
|
||||
def idString: String = organization + ":" + name + ":" + version
|
||||
}
|
||||
|
||||
private[sbt] object GraphModuleId {
|
||||
private[sbt] object GraphModuleId:
|
||||
import sjsonnew.BasicJsonProtocol.StringJsonFormat
|
||||
implicit val graphModuleIdIso = LList.iso[GraphModuleId, String :*: String :*: String :*: LNil](
|
||||
{ (m: GraphModuleId) =>
|
||||
("organization", m.organization) :*: ("name", m.name) :*: ("version", m.version) :*: LNil
|
||||
},
|
||||
{ case (_, organization) :*: (_, name) :*: (_, version) :*: LNil =>
|
||||
GraphModuleId(organization, name, version)
|
||||
}
|
||||
)
|
||||
}
|
||||
given graphModuleIdIso: IsoLList.Aux[GraphModuleId, String :*: String :*: String :*: LNil] =
|
||||
LList.iso[GraphModuleId, String :*: String :*: String :*: LNil](
|
||||
{ (m: GraphModuleId) =>
|
||||
("organization", m.organization) :*: ("name", m.name) :*: ("version", m.version) :*: LNil
|
||||
},
|
||||
{ case (_, organization) :*: (_, name) :*: (_, version) :*: LNil =>
|
||||
GraphModuleId(organization, name, version)
|
||||
}
|
||||
)
|
||||
end GraphModuleId
|
||||
|
||||
private[sbt] case class Module(
|
||||
id: GraphModuleId,
|
||||
|
|
@ -36,15 +41,21 @@ private[sbt] case class Module(
|
|||
evictedByVersion: Option[String] = None,
|
||||
jarFile: Option[File] = None,
|
||||
error: Option[String] = None
|
||||
) {
|
||||
):
|
||||
def hadError: Boolean = error.isDefined
|
||||
def isUsed: Boolean = !isEvicted
|
||||
def isEvicted: Boolean = evictedByVersion.isDefined
|
||||
}
|
||||
end Module
|
||||
|
||||
private[sbt] object Module {
|
||||
import sjsonnew.BasicJsonProtocol._
|
||||
implicit val moduleIso = LList.iso[
|
||||
private[sbt] object Module:
|
||||
import sjsonnew.BasicJsonProtocol.*
|
||||
given moduleIso: IsoLList.Aux[
|
||||
Module,
|
||||
GraphModuleId :*: Option[String] :*: String :*:
|
||||
Option[
|
||||
String
|
||||
] :*: Option[File] :*: Option[String] :*: LNil
|
||||
] = LList.iso[
|
||||
Module,
|
||||
GraphModuleId :*: Option[String] :*: String :*:
|
||||
Option[
|
||||
|
|
@ -52,24 +63,24 @@ private[sbt] object Module {
|
|||
] :*: Option[File] :*: Option[String] :*: LNil
|
||||
](
|
||||
{ (m: Module) =>
|
||||
("id", m.id) :*: ("license", m.license) :*: ("extraInfo", m.extraInfo) :*:
|
||||
("evictedByVersion", m.evictedByVersion) :*: (
|
||||
"jarFile",
|
||||
m.jarFile
|
||||
) :*: ("error", m.error) :*: LNil
|
||||
("id", m.id) :*:
|
||||
("license", m.license) :*:
|
||||
("extraInfo", m.extraInfo) :*:
|
||||
("evictedByVersion", m.evictedByVersion) :*:
|
||||
("jarFile", m.jarFile) :*:
|
||||
("error", m.error) :*: LNil
|
||||
},
|
||||
{
|
||||
case (_, id) :*: (_, license) :*: (_, extraInfo) :*: (_, evictedByVersion) :*: (
|
||||
_,
|
||||
jarFile
|
||||
) :*: (
|
||||
_,
|
||||
error
|
||||
) :*: LNil =>
|
||||
case (_, id) :*:
|
||||
(_, license) :*:
|
||||
(_, extraInfo) :*:
|
||||
(_, evictedByVersion) :*:
|
||||
(_, jarFile) :*:
|
||||
(_, error) :*: LNil =>
|
||||
Module(id, license, extraInfo, evictedByVersion, jarFile, error)
|
||||
}
|
||||
)
|
||||
}
|
||||
end Module
|
||||
|
||||
private[sbt] case class ModuleGraph(nodes: Seq[Module], edges: Seq[Edge]) {
|
||||
lazy val modules: Map[GraphModuleId, Module] =
|
||||
|
|
@ -98,16 +109,17 @@ private[sbt] case class ModuleGraph(nodes: Seq[Module], edges: Seq[Edge]) {
|
|||
nodes.filter(n => !edges.exists(_._2 == n.id)).sortBy(_.id.idString)
|
||||
}
|
||||
|
||||
private[sbt] object ModuleGraph {
|
||||
private[sbt] object ModuleGraph:
|
||||
val empty = ModuleGraph(Seq.empty, Seq.empty)
|
||||
|
||||
import BasicJsonProtocol._
|
||||
implicit val moduleGraphIso = LList.iso[ModuleGraph, Vector[Module] :*: Vector[Edge] :*: LNil](
|
||||
{ (g: ModuleGraph) =>
|
||||
("nodes", g.nodes.toVector) :*: ("edges", g.edges.toVector) :*: LNil
|
||||
},
|
||||
{ case (_, nodes: Vector[Module]) :*: (_, edges: Vector[Edge]) :*: LNil =>
|
||||
ModuleGraph(nodes, edges)
|
||||
}
|
||||
)
|
||||
}
|
||||
given moduleGraphIso: IsoLList.Aux[ModuleGraph, Vector[Module] :*: Vector[Edge] :*: LNil] =
|
||||
LList.iso[ModuleGraph, Vector[Module] :*: Vector[Edge] :*: LNil](
|
||||
{ (g: ModuleGraph) =>
|
||||
("nodes", g.nodes.toVector) :*: ("edges", g.edges.toVector) :*: LNil
|
||||
},
|
||||
{ case (_, nodes: Vector[Module]) :*: (_, edges: Vector[Edge]) :*: LNil =>
|
||||
ModuleGraph(nodes, edges)
|
||||
}
|
||||
)
|
||||
end ModuleGraph
|
||||
|
|
@ -22,6 +22,7 @@ import sbt.Keys.{
|
|||
publishConfiguration,
|
||||
useCoursier
|
||||
}
|
||||
import sbt.ProjectExtra.richInitializeTask
|
||||
import sbt.librarymanagement.PublishConfiguration
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.xml.{ Node, PrefixedAttribute }
|
||||
|
|
|
|||
|
|
@ -7,8 +7,11 @@
|
|||
|
||||
package sbt.internal.server
|
||||
|
||||
import dotty.tools.dotc.core.Contexts.Context
|
||||
import dotty.tools.dotc.reporting.{ Diagnostic => ScalaDiagnostic }
|
||||
import dotty.tools.dotc.reporting.Reporter
|
||||
import sbt.StandardMain.exchange
|
||||
import sbt.compiler.ForwardingReporter
|
||||
import sbt.internal.ForwardingReporter
|
||||
import sbt.internal.bsp
|
||||
import sbt.internal.bsp.{
|
||||
BuildTargetIdentifier,
|
||||
|
|
@ -21,16 +24,14 @@ import sbt.internal.bsp.{
|
|||
|
||||
import java.nio.file.{ Files, Path, Paths }
|
||||
import scala.collection.mutable
|
||||
import scala.reflect.internal.Reporter
|
||||
import scala.reflect.internal.util.{ DefinedPosition, Position }
|
||||
import scala.tools.nsc.reporters.FilteringReporter
|
||||
import sbt.internal.bsp.codec.JsonProtocol._
|
||||
|
||||
class BuildServerEvalReporter(buildTarget: BuildTargetIdentifier, delegate: FilteringReporter)
|
||||
extends ForwardingReporter(delegate) {
|
||||
class BuildServerEvalReporter(buildTarget: BuildTargetIdentifier, delegate: Reporter)
|
||||
extends ForwardingReporter(delegate):
|
||||
private val problemsByFile = mutable.Map[Path, Vector[Diagnostic]]()
|
||||
|
||||
override def doReport(pos: Position, msg: String, severity: Severity): Unit = {
|
||||
override def doReport(dia: ScalaDiagnostic)(using Context): Unit = {
|
||||
/*
|
||||
for {
|
||||
filePath <- if (pos.source.file.exists) Some(Paths.get(pos.source.file.path)) else None
|
||||
range <- convertToRange(pos)
|
||||
|
|
@ -47,10 +48,12 @@ class BuildServerEvalReporter(buildTarget: BuildTargetIdentifier, delegate: Filt
|
|||
)
|
||||
exchange.notifyEvent("build/publishDiagnostics", params)
|
||||
}
|
||||
super.doReport(pos, msg, severity)
|
||||
*/
|
||||
super.doReport(dia)
|
||||
}
|
||||
|
||||
override def finalReport(sourceName: String): Unit = {
|
||||
/*
|
||||
def finalReport(sourceName: String): Unit = {
|
||||
val filePath = Paths.get(sourceName)
|
||||
if (Files.exists(filePath)) {
|
||||
val diagnostics = problemsByFile.getOrElse(filePath, Vector())
|
||||
|
|
@ -90,4 +93,5 @@ class BuildServerEvalReporter(buildTarget: BuildTargetIdentifier, delegate: Filt
|
|||
case _ => None
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
end BuildServerEvalReporter
|
||||
|
|
@ -13,8 +13,10 @@ import java.net.URI
|
|||
import sbt.BuildPaths.{ configurationSources, projectStandard }
|
||||
import sbt.BuildSyntax._
|
||||
import sbt.Def._
|
||||
import sbt.Def.{ parsed }
|
||||
import sbt.Keys._
|
||||
import sbt.Project._
|
||||
import sbt.ProjectExtra.richInitializeTask
|
||||
import sbt.ScopeFilter.Make._
|
||||
import sbt.Scoped.richTaskSeq
|
||||
import sbt.SlashSyntax0._
|
||||
|
|
@ -61,6 +63,15 @@ object BuildServerProtocol {
|
|||
|
||||
private val bspReload = "bspReload"
|
||||
|
||||
private lazy val targetIdentifierParser: Parser[Seq[BuildTargetIdentifier]] =
|
||||
Def
|
||||
.spaceDelimited()
|
||||
.map { xs =>
|
||||
xs.map { uri =>
|
||||
BuildTargetIdentifier(URI.create(uri))
|
||||
}
|
||||
}
|
||||
|
||||
lazy val commands: Seq[Command] = Seq(
|
||||
Command.single(bspReload) { (state, reqId) =>
|
||||
try {
|
||||
|
|
@ -114,137 +125,177 @@ object BuildServerProtocol {
|
|||
})
|
||||
.value,
|
||||
// https://github.com/build-server-protocol/build-server-protocol/blob/master/docs/specification.md#build-target-sources-request
|
||||
bspBuildTargetSources := bspInputTask { (state, _, workspace, filter) =>
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
val items = bspBuildTargetSourcesItem.result.all(filter).value
|
||||
val buildItems = workspace.builds.map { case (id, loadedBuildUnit) =>
|
||||
val base = loadedBuildUnit.localBase
|
||||
val sbtFiles = configurationSources(base)
|
||||
val pluginData = loadedBuildUnit.unit.plugins.pluginData
|
||||
val dirs = pluginData.unmanagedSourceDirectories
|
||||
val sourceFiles = getStandaloneSourceFiles(pluginData.unmanagedSources, dirs)
|
||||
val managedDirs = pluginData.managedSourceDirectories
|
||||
val managedSourceFiles =
|
||||
getStandaloneSourceFiles(pluginData.managedSources, managedDirs)
|
||||
val items =
|
||||
dirs.map(toSourceItem(SourceItemKind.Directory, generated = false)) ++
|
||||
sourceFiles.map(toSourceItem(SourceItemKind.File, generated = false)) ++
|
||||
managedDirs.map(toSourceItem(SourceItemKind.Directory, generated = true)) ++
|
||||
managedSourceFiles.map(toSourceItem(SourceItemKind.File, generated = true)) ++
|
||||
sbtFiles.map(toSourceItem(SourceItemKind.File, generated = false))
|
||||
Value(SourcesItem(id, items.toVector))
|
||||
bspBuildTargetSources := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
// val targets = spaceDelimited().parsed.map(uri => BuildTargetIdentifier(URI.create(uri)))
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
val items = bspBuildTargetSourcesItem.result.all(filter).value
|
||||
val buildItems = workspace.builds.map { case (id, loadedBuildUnit) =>
|
||||
val base = loadedBuildUnit.localBase
|
||||
val sbtFiles = configurationSources(base)
|
||||
val pluginData = loadedBuildUnit.unit.plugins.pluginData
|
||||
val dirs = pluginData.unmanagedSourceDirectories
|
||||
val sourceFiles = getStandaloneSourceFiles(pluginData.unmanagedSources, dirs)
|
||||
val managedDirs = pluginData.managedSourceDirectories
|
||||
val managedSourceFiles =
|
||||
getStandaloneSourceFiles(pluginData.managedSources, managedDirs)
|
||||
val items =
|
||||
dirs.map(toSourceItem(SourceItemKind.Directory, generated = false)) ++
|
||||
sourceFiles.map(toSourceItem(SourceItemKind.File, generated = false)) ++
|
||||
managedDirs.map(toSourceItem(SourceItemKind.Directory, generated = true)) ++
|
||||
managedSourceFiles.map(toSourceItem(SourceItemKind.File, generated = true)) ++
|
||||
sbtFiles.map(toSourceItem(SourceItemKind.File, generated = false))
|
||||
Result.Value(SourcesItem(id, items.toVector))
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = SourcesResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = SourcesResult(successfulItems.toVector)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetSources / aggregate := false,
|
||||
bspBuildTargetResources := bspInputTask { (state, _, workspace, filter) =>
|
||||
workspace.warnIfBuildsNonEmpty(Method.Resources, state.log)
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
val items = bspBuildTargetResourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ResourcesResult(successfulItems.toVector)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
bspBuildTargetResources / aggregate := false,
|
||||
bspBuildTargetDependencySources := bspInputTask { (state, _, workspace, filter) =>
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
import sbt.internal.bsp.codec.JsonProtocol._
|
||||
val items = bspBuildTargetDependencySourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = DependencySourcesResult(successfulItems.toVector)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
bspBuildTargetDependencySources / aggregate := false,
|
||||
bspBuildTargetOutputPaths := bspInputTask { (state, _, workspace, filter) =>
|
||||
Def.task {
|
||||
import sbt.internal.bsp.codec.JsonProtocol._
|
||||
val items = bspBuildTargetOutputPathsItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = OutputPathsResult(successfulItems.toVector)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
bspBuildTargetOutputPaths / aggregate := false,
|
||||
bspBuildTargetCompile := bspInputTask { (state, _, workspace, filter) =>
|
||||
workspace.warnIfBuildsNonEmpty(Method.Compile, state.log)
|
||||
Def.task {
|
||||
val statusCodes = Keys.bspBuildTargetCompileItem.result.all(filter).value
|
||||
val aggregatedStatusCode = allOrThrow(statusCodes) match {
|
||||
case Seq() => StatusCode.Success
|
||||
case codes => codes.max
|
||||
bspBuildTargetResources := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.Resources, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
val items = bspBuildTargetResourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ResourcesResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
state.respondEvent(BspCompileResult(None, aggregatedStatusCode))
|
||||
}
|
||||
}.evaluated,
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetResources / aggregate := false,
|
||||
bspBuildTargetDependencySources := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
// run the worker task concurrently
|
||||
Def.task {
|
||||
import sbt.internal.bsp.codec.JsonProtocol._
|
||||
val items = bspBuildTargetDependencySourcesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = DependencySourcesResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetDependencySources / aggregate := false,
|
||||
bspBuildTargetCompile := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s: State = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.Compile, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val statusCodes = Keys.bspBuildTargetCompileItem.result.all(filter).value
|
||||
val aggregatedStatusCode = allOrThrow(statusCodes) match {
|
||||
case Seq() => StatusCode.Success
|
||||
case codes => codes.max
|
||||
}
|
||||
s.respondEvent(BspCompileResult(None, aggregatedStatusCode))
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetCompile / aggregate := false,
|
||||
bspBuildTargetTest := bspTestTask.evaluated,
|
||||
bspBuildTargetTest / aggregate := false,
|
||||
bspBuildTargetCleanCache := bspInputTask { (state, targets, workspace, filter) =>
|
||||
workspace.warnIfBuildsNonEmpty(Method.CleanCache, state.log)
|
||||
Def.task {
|
||||
val results = Keys.clean.result.all(filter).value
|
||||
val successes = anyOrThrow(results).size
|
||||
bspBuildTargetCleanCache := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s: State = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.CleanCache, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val results = Keys.clean.result.all(filter).value
|
||||
val successes = anyOrThrow(results).size
|
||||
|
||||
// When asking to Rebuild Project, IntelliJ sends the root build as an additional target, however it is
|
||||
// not returned as part of the results. In this case, there's 1 build entry in the workspace, and we're
|
||||
// checking that the executed results plus this entry is equal to the total number of targets.
|
||||
// When rebuilding a single module, the root build isn't sent, just the requested targets.
|
||||
val cleaned = successes + workspace.builds.size == targets.size
|
||||
state.respondEvent(CleanCacheResult(None, cleaned))
|
||||
}
|
||||
}.evaluated,
|
||||
bspBuildTargetCleanCache / aggregate := false,
|
||||
bspBuildTargetScalacOptions := bspInputTask { (state, _, workspace, filter) =>
|
||||
val builds = workspace.builds
|
||||
Def.task {
|
||||
val items = bspBuildTargetScalacOptionsItem.result.all(filter).value
|
||||
val appProvider = appConfiguration.value.provider()
|
||||
val sbtJars = appProvider.mainClasspath()
|
||||
val buildItems = builds.map { build =>
|
||||
val plugins: LoadedPlugins = build._2.unit.plugins
|
||||
val scalacOptions = plugins.pluginData.scalacOptions
|
||||
val pluginClassPath = plugins.classpath
|
||||
val classpath = (pluginClassPath ++ sbtJars).map(_.toURI).toVector
|
||||
val item = ScalacOptionsItem(
|
||||
build._1,
|
||||
scalacOptions.toVector,
|
||||
classpath,
|
||||
new File(build._2.localBase, "project/target").toURI
|
||||
)
|
||||
Value(item)
|
||||
// When asking to Rebuild Project, IntelliJ sends the root build as an additional target, however it is
|
||||
// not returned as part of the results. In this case, there's 1 build entry in the workspace, and we're
|
||||
// checking that the executed results plus this entry is equal to the total number of targets.
|
||||
// When rebuilding a single module, the root build isn't sent, just the requested targets.
|
||||
val cleaned = successes + workspace.builds.size == targets.size
|
||||
s.respondEvent(CleanCacheResult(None, cleaned))
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = ScalacOptionsResult(successfulItems.toVector)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetCleanCache / aggregate := false,
|
||||
bspBuildTargetScalacOptions := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
val builds = workspace.builds
|
||||
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val items = bspBuildTargetScalacOptionsItem.result.all(filter).value
|
||||
val appProvider = appConfiguration.value.provider()
|
||||
val sbtJars = appProvider.mainClasspath()
|
||||
val buildItems = builds.map { build =>
|
||||
val plugins: LoadedPlugins = build._2.unit.plugins
|
||||
val scalacOptions = plugins.pluginData.scalacOptions
|
||||
val pluginClassPath = plugins.classpath
|
||||
val classpath = (pluginClassPath ++ sbtJars).map(_.toURI).toVector
|
||||
val item = ScalacOptionsItem(
|
||||
build._1,
|
||||
scalacOptions.toVector,
|
||||
classpath,
|
||||
new File(build._2.localBase, "project/target").toURI
|
||||
)
|
||||
Result.Value(item)
|
||||
}
|
||||
val successfulItems = anyOrThrow(items ++ buildItems)
|
||||
val result = ScalacOptionsResult(successfulItems.toVector)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspBuildTargetScalacOptions / aggregate := false,
|
||||
bspScalaTestClasses := bspInputTask { (state, _, workspace, filter) =>
|
||||
workspace.warnIfBuildsNonEmpty(Method.ScalaTestClasses, state.log)
|
||||
Def.task {
|
||||
val items = bspScalaTestClassesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items).flatten.toVector
|
||||
val result = ScalaTestClassesResult(successfulItems.toVector, None)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
bspScalaMainClasses := bspInputTask { (state, _, workspace, filter) =>
|
||||
workspace.warnIfBuildsNonEmpty(Method.ScalaMainClasses, state.log)
|
||||
Def.task {
|
||||
val items = bspScalaMainClassesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ScalaMainClassesResult(successfulItems.toVector, None)
|
||||
state.respondEvent(result)
|
||||
}
|
||||
}.evaluated,
|
||||
bspScalaTestClasses := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.ScalaTestClasses, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val items = bspScalaTestClassesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ScalaTestClassesResult(successfulItems.toVector, None)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspScalaMainClasses := (Def
|
||||
.input((s: State) => targetIdentifierParser)
|
||||
.flatMapTask { targets =>
|
||||
val s = state.value
|
||||
val workspace = bspFullWorkspace.value.filter(targets)
|
||||
workspace.warnIfBuildsNonEmpty(Method.ScalaMainClasses, s.log)
|
||||
val filter = ScopeFilter.in(workspace.scopes.values.toList)
|
||||
Def.task {
|
||||
val items = bspScalaMainClassesItem.result.all(filter).value
|
||||
val successfulItems = anyOrThrow(items)
|
||||
val result = ScalaMainClassesResult(successfulItems.toVector, None)
|
||||
s.respondEvent(result)
|
||||
}
|
||||
})
|
||||
.value,
|
||||
bspScalaMainClasses / aggregate := false
|
||||
)
|
||||
|
||||
|
|
@ -555,15 +606,13 @@ object BuildServerProtocol {
|
|||
if setting.key.key.label == Keys.bspTargetIdentifier.key.label
|
||||
} yield Scope.replaceThis(Scope.Global.in(ref))(setting.key.scope)
|
||||
|
||||
Def.setting {
|
||||
val targetIds = scopes
|
||||
.map(_ / Keys.bspTargetIdentifier)
|
||||
.join
|
||||
.value
|
||||
val bspEnabled = scopes
|
||||
.map(_ / Keys.bspEnabled)
|
||||
.join
|
||||
.value
|
||||
import sbt.TupleSyntax.*
|
||||
t2ToApp2(
|
||||
(
|
||||
scopes.map(_ / Keys.bspTargetIdentifier).join,
|
||||
scopes.map(_ / Keys.bspEnabled).join,
|
||||
)
|
||||
) { case ((targetIds: Seq[BuildTargetIdentifier], bspEnabled: Seq[Boolean])) =>
|
||||
val buildsMap =
|
||||
mutable.HashMap[BuildTargetIdentifier, mutable.ListBuffer[BuildTargetIdentifier]]()
|
||||
|
||||
|
|
@ -879,8 +928,8 @@ object BuildServerProtocol {
|
|||
Def.task {
|
||||
val state = Keys.state.value
|
||||
val statusCode = resultTask.value match {
|
||||
case Value(_) => StatusCode.Success
|
||||
case Inc(_) => StatusCode.Error
|
||||
case Result.Value(_) => StatusCode.Success
|
||||
case Result.Inc(_) => StatusCode.Error
|
||||
}
|
||||
val _ = state.respondEvent(TestResult(testParams.originId, statusCode))
|
||||
}
|
||||
|
|
@ -1013,15 +1062,15 @@ object BuildServerProtocol {
|
|||
}
|
||||
|
||||
private def anyOrThrow[T](results: Seq[Result[T]]): Seq[T] = {
|
||||
val successes = results.collect { case Value(v) => v }
|
||||
val errors = results.collect { case Inc(cause) => cause }
|
||||
val successes = results.collect { case Result.Value(v) => v }
|
||||
val errors = results.collect { case Result.Inc(cause) => cause }
|
||||
if (successes.nonEmpty || errors.isEmpty) successes
|
||||
else throw Incomplete(None, causes = errors)
|
||||
}
|
||||
|
||||
private def allOrThrow[T](results: Seq[Result[T]]): Seq[T] = {
|
||||
val successes = results.collect { case Value(v) => v }
|
||||
val errors = results.collect { case Inc(cause) => cause }
|
||||
val successes = results.collect { case Result.Value(v) => v }
|
||||
val errors = results.collect { case Result.Inc(cause) => cause }
|
||||
if (errors.isEmpty) successes
|
||||
else throw Incomplete(None, causes = errors)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ import scala.annotation.{ nowarn, tailrec }
|
|||
import scala.collection.JavaConverters._
|
||||
import scala.concurrent.{ ExecutionContext, Future }
|
||||
import scala.reflect.NameTransformer
|
||||
import scala.tools.reflect.{ ToolBox, ToolBoxError }
|
||||
import scala.util.matching.Regex
|
||||
|
||||
import sjsonnew.JsonFormat
|
||||
|
|
@ -25,6 +24,7 @@ import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter }
|
|||
|
||||
import sbt.internal.inc.{ Analysis, MixedAnalyzingCompiler }
|
||||
import sbt.internal.inc.JavaInterfaceUtil._
|
||||
import sbt.internal.parser.SbtParser
|
||||
import sbt.internal.protocol.JsonRpcResponseError
|
||||
import sbt.internal.protocol.codec.JsonRPCProtocol
|
||||
import sbt.internal.langserver
|
||||
|
|
@ -48,21 +48,7 @@ private[sbt] object Definition {
|
|||
}
|
||||
|
||||
object textProcessor {
|
||||
private val isIdentifier = {
|
||||
lazy val tb =
|
||||
scala.reflect.runtime.universe
|
||||
.runtimeMirror(this.getClass.getClassLoader)
|
||||
.mkToolBox()
|
||||
import tb._
|
||||
lazy val check = parse _ andThen compile _
|
||||
(identifier: String) =>
|
||||
try {
|
||||
check(s"val $identifier = 0; val ${identifier}${identifier} = $identifier")
|
||||
true
|
||||
} catch {
|
||||
case _: ToolBoxError => false
|
||||
}
|
||||
}
|
||||
private val isIdentifier: String => Boolean = SbtParser.isIdentifier
|
||||
|
||||
private def findInBackticks(line: String, point: Int): Option[String] = {
|
||||
val (even, odd) = line.zipWithIndex
|
||||
|
|
@ -295,7 +281,7 @@ private[sbt] object Definition {
|
|||
analysis.relations.definesClass(className) ++
|
||||
analysis.relations.libraryDefinesClass(className)
|
||||
}
|
||||
.flatMap { classFile: VirtualFileRef =>
|
||||
.flatMap { (classFile: VirtualFileRef) =>
|
||||
val x = converter.toPath(classFile)
|
||||
textProcessor.markPosition(x, sym).collect { case (uri, line, from, to) =>
|
||||
Location(
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import java.util.concurrent.{
|
|||
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
|
||||
|
||||
import sbt.BasicCommandStrings.{ Shutdown, TerminateAction }
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.internal.langserver.{ CancelRequestParams, ErrorCodes, LogMessageParams, MessageType }
|
||||
import sbt.internal.protocol.{
|
||||
JsonRpcNotificationMessage,
|
||||
|
|
@ -745,7 +746,7 @@ final class NetworkChannel(
|
|||
}
|
||||
}
|
||||
private class NetworkTerminal
|
||||
extends TerminalImpl(writeableInputStream, outputStream, errorStream, name) {
|
||||
extends TerminalImpl(writeableInputStream, outputStream, errorStream, name) { term =>
|
||||
private[this] val pending = new AtomicBoolean(false)
|
||||
private[this] val closed = new AtomicBoolean(false)
|
||||
private[this] val properties = new AtomicReference[TerminalPropertiesResponse]
|
||||
|
|
@ -755,7 +756,7 @@ final class NetworkChannel(
|
|||
if (alive.get) {
|
||||
if (!pending.get && Option(lastUpdate.get).fold(true)(d => (d + 1.second).isOverdue)) {
|
||||
pending.set(true)
|
||||
val queue = VirtualTerminal.sendTerminalPropertiesQuery(name, jsonRpcRequest)
|
||||
val queue = VirtualTerminal.sendTerminalPropertiesQuery(term.name, jsonRpcRequest)
|
||||
val update: Runnable = () => {
|
||||
queue.poll(5, java.util.concurrent.TimeUnit.SECONDS) match {
|
||||
case null =>
|
||||
|
|
@ -767,7 +768,7 @@ final class NetworkChannel(
|
|||
pending.notifyAll()
|
||||
}
|
||||
}
|
||||
new Thread(update, s"network-terminal-$name-update") {
|
||||
new Thread(update, s"network-terminal-${term.name}-update") {
|
||||
setDaemon(true)
|
||||
}.start()
|
||||
}
|
||||
|
|
@ -829,7 +830,11 @@ final class NetworkChannel(
|
|||
): Option[T] = {
|
||||
if (closed.get) None
|
||||
else {
|
||||
val queue = VirtualTerminal.sendTerminalCapabilitiesQuery(name, jsonRpcRequest, query)
|
||||
val queue = VirtualTerminal.sendTerminalCapabilitiesQuery(
|
||||
term.name,
|
||||
jsonRpcRequest[TerminalCapabilitiesQuery],
|
||||
query
|
||||
)
|
||||
Some(result(queue.take))
|
||||
}
|
||||
}
|
||||
|
|
@ -856,8 +861,8 @@ final class NetworkChannel(
|
|||
if (closed.get) Map.empty
|
||||
else {
|
||||
val queue = VirtualTerminal.sendTerminalAttributesQuery(
|
||||
name,
|
||||
jsonRpcRequest
|
||||
term.name,
|
||||
jsonRpcRequest[TerminalAttributesQuery]
|
||||
)
|
||||
try {
|
||||
val a = queue.take
|
||||
|
|
@ -879,13 +884,18 @@ final class NetworkChannel(
|
|||
lflag = attributes.getOrElse("lflag", ""),
|
||||
cchars = attributes.getOrElse("cchars", ""),
|
||||
)
|
||||
val queue = VirtualTerminal.setTerminalAttributesCommand(name, jsonRpcRequest, attrs)
|
||||
val queue = VirtualTerminal.setTerminalAttributesCommand(
|
||||
term.name,
|
||||
jsonRpcRequest[TerminalSetAttributesCommand],
|
||||
attrs
|
||||
)
|
||||
try queue.take
|
||||
catch { case _: InterruptedException => }
|
||||
}
|
||||
override private[sbt] def getSizeImpl: (Int, Int) =
|
||||
if (!closed.get) {
|
||||
val queue = VirtualTerminal.getTerminalSize(name, jsonRpcRequest)
|
||||
val queue =
|
||||
VirtualTerminal.getTerminalSize(term.name, jsonRpcRequest[TerminalGetSizeQuery])
|
||||
val res =
|
||||
try queue.take
|
||||
catch { case _: InterruptedException => TerminalGetSizeResponse(1, 1) }
|
||||
|
|
@ -894,14 +904,19 @@ final class NetworkChannel(
|
|||
override def setSize(width: Int, height: Int): Unit =
|
||||
if (!closed.get) {
|
||||
val size = TerminalSetSizeCommand(width, height)
|
||||
val queue = VirtualTerminal.setTerminalSize(name, jsonRpcRequest, size)
|
||||
val queue =
|
||||
VirtualTerminal.setTerminalSize(term.name, jsonRpcRequest[TerminalSetSizeCommand], size)
|
||||
try queue.take
|
||||
catch { case _: InterruptedException => }
|
||||
}
|
||||
private[this] def setRawMode(toggle: Boolean): Unit = {
|
||||
if (!closed.get || false) {
|
||||
val raw = TerminalSetRawModeCommand(toggle)
|
||||
val queue = VirtualTerminal.setTerminalRawMode(name, jsonRpcRequest, raw)
|
||||
val queue = VirtualTerminal.setTerminalRawMode(
|
||||
term.name,
|
||||
jsonRpcRequest[TerminalSetRawModeCommand],
|
||||
raw
|
||||
)
|
||||
try queue.take
|
||||
catch { case _: InterruptedException => }
|
||||
}
|
||||
|
|
@ -911,13 +926,14 @@ final class NetworkChannel(
|
|||
override def setEchoEnabled(toggle: Boolean): Unit =
|
||||
if (!closed.get) {
|
||||
val echo = TerminalSetEchoCommand(toggle)
|
||||
val queue = VirtualTerminal.setTerminalEcho(name, jsonRpcRequest, echo)
|
||||
val queue =
|
||||
VirtualTerminal.setTerminalEcho(term.name, jsonRpcRequest[TerminalSetEchoCommand], echo)
|
||||
try queue.take
|
||||
catch { case _: InterruptedException => () }
|
||||
}
|
||||
|
||||
override def flush(): Unit = doFlush()
|
||||
override def toString: String = s"NetworkTerminal($name)"
|
||||
override def toString: String = s"NetworkTerminal(${term.name})"
|
||||
override def close(): Unit = if (closed.compareAndSet(false, true)) {
|
||||
val threads = blockedThreads.synchronized {
|
||||
val t = blockedThreads.asScala.toVector
|
||||
|
|
@ -961,7 +977,7 @@ object NetworkChannel {
|
|||
|
||||
// direct comparison on strings and
|
||||
// remove hotspring unicode added character for numbers
|
||||
if (checkId || force) {
|
||||
if (checkId() || force) {
|
||||
runningEngine.cancelAndShutdown()
|
||||
Right(runningExecId)
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import java.nio.file.Path
|
|||
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
|
||||
import sbt.BasicCommandStrings.{ RebootCommand, Shutdown, TerminateAction }
|
||||
import sbt.Keys.{ baseDirectory, pollInterval, state }
|
||||
import sbt.ProjectExtra.extract
|
||||
import sbt.Scope.Global
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.internal.CommandStrings.LoadProject
|
||||
|
|
@ -16,6 +16,7 @@ import sbt.io.IO
|
|||
import sbt.nio.file.FileAttributes
|
||||
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
|
||||
import xsbti.compile.analysis.{ Stamp => XStamp }
|
||||
import org.checkerframework.checker.units.qual.A
|
||||
|
||||
/**
|
||||
* A trait that indicates what file stamping implementation should be used to track the state of
|
||||
|
|
@ -49,15 +50,14 @@ sealed trait FileStamp
|
|||
* Provides json formatters for [[FileStamp]].
|
||||
*/
|
||||
object FileStamp {
|
||||
private[sbt] type Id[T] = T
|
||||
private[sbt] type Id[A] = A
|
||||
|
||||
private[sbt] implicit class Ops(val fileStamp: FileStamp) {
|
||||
private[sbt] def stamp: XStamp = fileStamp match {
|
||||
case f: FileHashImpl => f.xstamp
|
||||
case LastModified(time) => new IncLastModified(time)
|
||||
case _ => EmptyStamp
|
||||
}
|
||||
}
|
||||
extension (fileStamp: FileStamp)
|
||||
private[sbt] def stamp: XStamp =
|
||||
fileStamp match
|
||||
case f: FileHashImpl => f.xstamp
|
||||
case LastModified(time) => new IncLastModified(time)
|
||||
case _ => EmptyStamp
|
||||
|
||||
private[sbt] def apply(path: Path, fileStamper: FileStamper): Option[FileStamp] =
|
||||
fileStamper match {
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ import sbt.util.{ Level, Logger }
|
|||
|
||||
import scala.annotation.tailrec
|
||||
import scala.collection.mutable
|
||||
import scala.collection.immutable.StringOps
|
||||
import scala.concurrent.duration._
|
||||
import scala.util.control.NonFatal
|
||||
|
||||
|
|
@ -505,7 +506,9 @@ object Watch {
|
|||
val opts = distinctOptions(options).sortBy(_.input)
|
||||
val alignmentLength = opts.map(_.display.length).max + 1
|
||||
val formatted =
|
||||
opts.map(o => s"${o.display}${" " * (alignmentLength - o.display.length)}: ${o.description}")
|
||||
opts.map(o =>
|
||||
s"${o.display}${StringOps(" ") * (alignmentLength - o.display.length)}: ${o.description}"
|
||||
)
|
||||
s"Options:\n${formatted.mkString(" ", "\n ", "")}"
|
||||
}
|
||||
private def distinctOptions(options: Seq[InputOption]): Seq[InputOption] = {
|
||||
|
|
@ -535,7 +538,8 @@ object Watch {
|
|||
(count: Int, project: ProjectRef, commands: Seq[String]) =>
|
||||
{
|
||||
val countStr = s"$count. "
|
||||
Some(s"$countStr${waitMessage(project, commands).mkString(s"\n${" " * countStr.length}")}")
|
||||
Some(s"$countStr${waitMessage(project, commands)
|
||||
.mkString(s"\n${StringOps(" ") * countStr.length}")}")
|
||||
}
|
||||
}.label("Watched.defaultStartWatch")
|
||||
|
||||
|
|
@ -14,6 +14,8 @@ import sbt.Def._
|
|||
import sbt.Keys._
|
||||
import sbt.SlashSyntax0._
|
||||
import sbt.Project._
|
||||
import sbt.ProjectExtra.storeAs
|
||||
import sbt.ProjectExtra.richInitializeTask
|
||||
import sbt.internal.graph._
|
||||
import sbt.internal.graph.backend.SbtUpdateReport
|
||||
import sbt.internal.graph.rendering.{ DagreHTML, TreeView }
|
||||
|
|
@ -40,7 +42,7 @@ object DependencyTreeSettings {
|
|||
.withCachedResolution(false),
|
||||
dependencyTreeIgnoreMissingUpdate / ivyConfiguration := {
|
||||
// inTask will make sure the new definition will pick up `updateOptions in dependencyTreeIgnoreMissingUpdate`
|
||||
inTask(dependencyTreeIgnoreMissingUpdate, Classpaths.mkIvyConfiguration).value
|
||||
Project.inTask(dependencyTreeIgnoreMissingUpdate, Classpaths.mkIvyConfiguration).value
|
||||
},
|
||||
dependencyTreeIgnoreMissingUpdate / ivyModule := {
|
||||
// concatenating & inlining ivySbt & ivyModule default task implementations, as `SbtAccess.inTask` does
|
||||
|
|
@ -54,7 +56,7 @@ object DependencyTreeSettings {
|
|||
.withMissingOk(true),
|
||||
dependencyTreeIgnoreMissingUpdate := {
|
||||
// inTask will make sure the new definition will pick up `ivyModule/updateConfiguration in ignoreMissingUpdate`
|
||||
inTask(dependencyTreeIgnoreMissingUpdate, Classpaths.updateTask).value
|
||||
Project.inTask(dependencyTreeIgnoreMissingUpdate, Classpaths.updateTask).value
|
||||
},
|
||||
)
|
||||
|
||||
|
|
@ -14,7 +14,7 @@ import Def.{ Setting, settingKey }
|
|||
import Defaults._
|
||||
import Keys._
|
||||
import KeyRanks._
|
||||
import sbt.Project.inConfig
|
||||
import sbt.ProjectExtra.inConfig
|
||||
import sbt.internal._
|
||||
import sbt.io.syntax._
|
||||
import sbt.librarymanagement.Configurations.{ IntegrationTest, Test }
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ package sbt
|
|||
package plugins
|
||||
|
||||
import sbt.PluginTrigger.AllRequirements
|
||||
import sbt.Project._
|
||||
import sbt.ProjectExtra.*
|
||||
import sbt.librarymanagement.Configurations.{ Compile, Test }
|
||||
|
||||
object MiniDependencyTreePlugin extends AutoPlugin {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import Keys._
|
|||
import sbt.internal.SysProp
|
||||
import sbt.librarymanagement.syntax._
|
||||
import sbt.librarymanagement.{ Configuration, CrossVersion }
|
||||
import Project.inConfig
|
||||
import ProjectExtra.inConfig
|
||||
import sbt.internal.inc.ScalaInstance
|
||||
import sbt.ScopeFilter.Make._
|
||||
|
||||
|
|
@ -11,6 +11,7 @@ import sbt.Def.{ ScopedKey, displayFull, displayMasked }
|
|||
import sbt.internal.TestBuild._
|
||||
import sbt.internal.util.complete.Parser
|
||||
import sbt.internal.{ Resolve, TestBuild }
|
||||
import sbt.ProjectExtra.equalKeys
|
||||
import hedgehog._
|
||||
import hedgehog.core.{ ShrinkLimit, SuccessCount }
|
||||
import hedgehog.runner._
|
||||
|
|
@ -64,7 +65,7 @@ object ParseKey extends Properties {
|
|||
val expected = resolve(structure, key, mask)
|
||||
parseCheck(structure, key, mask, showZeroConfig)(sk =>
|
||||
hedgehog.Result
|
||||
.assert(Project.equal(sk, expected, mask))
|
||||
.assert(Project.equalKeys(sk, expected, mask))
|
||||
.log(s"$sk.key == $expected.key: ${sk.key == expected.key}")
|
||||
.log(s"${sk.scope} == ${expected.scope}: ${Scope.equal(sk.scope, expected.scope, mask)}")
|
||||
).log(s"Expected: ${displayFull(expected)}")
|
||||
|
|
|
|||
|
|
@ -7,9 +7,11 @@
|
|||
|
||||
package sbt
|
||||
|
||||
/*
|
||||
import java.io._
|
||||
|
||||
import sbt.internal._
|
||||
import sbt.internal.inc.MappedFileConverter
|
||||
import sbt.internal.util.{
|
||||
AttributeEntry,
|
||||
AttributeMap,
|
||||
|
|
@ -90,7 +92,7 @@ object FakeState {
|
|||
val settings: Seq[Def.Setting[_]] = Nil
|
||||
|
||||
val currentProject = Map(testProject.base.toURI -> testProject.id)
|
||||
val currentEval: () => sbt.compiler.Eval = () => Load.mkEval(Nil, base, Nil)
|
||||
val currentEval: () => Eval = () => Load.mkEval(Nil, base, Nil)
|
||||
val sessionSettings =
|
||||
SessionSettings(base.toURI, currentProject, Nil, Map.empty, Nil, currentEval)
|
||||
|
||||
|
|
@ -98,7 +100,7 @@ object FakeState {
|
|||
val scopeLocal: Def.ScopeLocal = _ => Nil
|
||||
|
||||
val (cMap, data: Settings[Scope]) =
|
||||
Def.makeWithCompiledMap(settings)(delegates, scopeLocal, Def.showFullKey)
|
||||
Def.makeWithCompiledMap(settings)(using delegates, scopeLocal, Def.showFullKey)
|
||||
val extra: KeyIndex => BuildUtil[_] = (keyIndex) =>
|
||||
BuildUtil(base.toURI, Map.empty, keyIndex, data)
|
||||
val structureIndex: StructureIndex =
|
||||
|
|
@ -138,6 +140,7 @@ object FakeState {
|
|||
delegates,
|
||||
scopeLocal,
|
||||
cMap,
|
||||
MappedFileConverter.empty,
|
||||
)
|
||||
|
||||
val attributes = AttributeMap.empty ++ AttributeMap(
|
||||
|
|
@ -165,3 +168,4 @@ object FakeState {
|
|||
}
|
||||
|
||||
}
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
* Licensed under Apache License 2.0 (see LICENSE)
|
||||
*/
|
||||
|
||||
/*
|
||||
package sbt
|
||||
|
||||
import sbt.util.Logger
|
||||
|
|
@ -106,3 +107,4 @@ object AI {
|
|||
override def requires = A && !Q
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ package sbt
|
|||
import scala.util.control.NonFatal
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Project.project
|
||||
import sbt.BuildSyntax.project
|
||||
import java.io.File
|
||||
|
||||
class ProjectDefs {
|
||||
|
|
|
|||
|
|
@ -22,10 +22,10 @@ object TagsTest extends Properties("Tags") {
|
|||
def size: Gen[Size] =
|
||||
for (i <- Arbitrary.arbitrary[Int] if i != Int.MinValue) yield Size(math.abs(i))
|
||||
|
||||
implicit def aTagMap = Arbitrary(tagMap)
|
||||
implicit def aTagAndFrequency = Arbitrary(tagAndFrequency)
|
||||
implicit def aTag = Arbitrary(tag)
|
||||
implicit def aSize = Arbitrary(size)
|
||||
implicit def aTagMap: Arbitrary[Map[Tag, Int]] = Arbitrary(tagMap)
|
||||
implicit def aTagAndFrequency: Arbitrary[(Tag, Int)] = Arbitrary(tagAndFrequency)
|
||||
implicit def aTag: Arbitrary[Tag] = Arbitrary(tag)
|
||||
implicit def aSize: Arbitrary[Size] = Arbitrary(size)
|
||||
|
||||
property("exclusive allows all groups without the exclusive tag") = forAll {
|
||||
(tm: TagMap, tag: Tag) =>
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue