Merge pull request #7837 from eed3si9n/wip/bump_scala_change

[2.x] refactor: Update Scala 3 syntax
This commit is contained in:
eugene yokota 2024-10-28 00:22:39 -04:00 committed by GitHub
commit f997553a89
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
212 changed files with 1592 additions and 1528 deletions

View File

@ -1024,6 +1024,7 @@ lazy val sbtClientProj = (project in file("client"))
Utils.noPublish,
name := "sbt-client",
mimaPreviousArtifacts := Set.empty,
bspEnabled := false,
crossPaths := false,
exportJars := true,
libraryDependencies += jansi,

View File

@ -24,15 +24,15 @@ sealed trait DslEntry {
object DslEntry {
implicit def fromSettingsDef(inc: SettingsDefinition): DslEntry =
DslSetting(inc)
implicit def fromSettingsDef(inc: Seq[Setting[_]]): DslEntry =
implicit def fromSettingsDef(inc: Seq[Setting[?]]): DslEntry =
DslSetting(inc)
/** Represents a DSL entry which adds settings to the current project. */
sealed trait ProjectSettings extends DslEntry {
def toSettings: Seq[Setting[_]]
def toSettings: Seq[Setting[?]]
}
object ProjectSettings {
def unapply(e: DslEntry): Option[Seq[Setting[_]]] =
def unapply(e: DslEntry): Option[Seq[Setting[?]]] =
e match {
case e: ProjectSettings => Some(e.toSettings)
case _ => None
@ -62,26 +62,26 @@ object DslEntry {
/** this represents an `enablePlugins()` in the sbt DSL */
case class DslEnablePlugins(plugins: Seq[AutoPlugin]) extends ProjectManipulation {
override val toFunction: Project => Project = _.enablePlugins(plugins: _*)
override val toFunction: Project => Project = _.enablePlugins(plugins*)
}
/** this represents an `disablePlugins()` in the sbt DSL */
case class DslDisablePlugins(plugins: Seq[AutoPlugin]) extends ProjectManipulation {
override val toFunction: Project => Project = _.disablePlugins(plugins: _*)
override val toFunction: Project => Project = _.disablePlugins(plugins*)
}
/** Represents registering an internal dependency for the current project */
case class DslDependsOn(cs: Seq[ClasspathDep[ProjectReference]]) extends ProjectManipulation {
override val toFunction: Project => Project = _.dependsOn(cs: _*)
override val toFunction: Project => Project = _.dependsOn(cs*)
}
/** Represents registering a set of configurations with the current project. */
case class DslConfigs(cs: Seq[Configuration]) extends ProjectManipulation {
override val toFunction: Project => Project = _.configs(cs: _*)
override val toFunction: Project => Project = _.configs(cs*)
}
/** this represents an `aggregateProjects()` in the sbt DSL */
case class DslAggregate(refs: Seq[ProjectReference]) extends ProjectManipulation {
override val toFunction: Project => Project = _.aggregate(refs: _*)
override val toFunction: Project => Project = _.aggregate(refs*)
}
}

View File

@ -382,7 +382,7 @@ object Eval:
}
def isTopLevelModule(sym: Symbols.Symbol)(using ctx: Context): Boolean =
(sym is Flags.Module) && (sym.owner is Flags.ModuleClass)
sym.is(Flags.Module) && sym.owner.is(Flags.ModuleClass)
override def traverse(tree: tpd.Tree)(using ctx: Context): Unit =
tree match

View File

@ -77,7 +77,7 @@ private[sbt] object EvaluateConfigurations {
}
loader =>
loadFiles.foldLeft(LoadedSbtFile.empty) { (loaded, load) =>
loaded merge load(loader)
loaded.merge(load(loader))
}
}
@ -90,7 +90,7 @@ private[sbt] object EvaluateConfigurations {
eval: Eval,
src: VirtualFile,
imports: Seq[String]
): LazyClassLoaded[Seq[Setting[_]]] =
): LazyClassLoaded[Seq[Setting[?]]] =
evaluateConfiguration(eval, src, IO.readStream(src.input()).linesIterator.toList, imports, 0)
/**
@ -129,7 +129,7 @@ private[sbt] object EvaluateConfigurations {
lines: Seq[String],
imports: Seq[String],
offset: Int
): LazyClassLoaded[Seq[Setting[_]]] = {
): LazyClassLoaded[Seq[Setting[?]]] = {
val l = evaluateSbtFile(eval, file, lines, imports, offset)
loader => l(loader).settings
}
@ -206,7 +206,7 @@ private[sbt] object EvaluateConfigurations {
lines.map { case (s, i) => (s, i + offset) }
def addOffsetToRange(offset: Int, ranges: Seq[(String, LineRange)]): Seq[(String, LineRange)] =
ranges.map { case (s, r) => (s, r shift offset) }
ranges.map { case (s, r) => (s, r.shift(offset)) }
/**
* The name of the class we cast DSL "setting" (vs. definition) lines to.
@ -254,7 +254,7 @@ private[sbt] object EvaluateConfigurations {
TrackedEvalResult(
result.generated,
loader => {
val pos = RangePosition(name, range shift 1)
val pos = RangePosition(name, range.shift(1))
result.getValue(loader).asInstanceOf[DslEntry].withPos(pos)
}
)
@ -281,7 +281,7 @@ private[sbt] object EvaluateConfigurations {
imports: Seq[(String, Int)],
expression: String,
range: LineRange
): LazyClassLoaded[Seq[Setting[_]]] =
): LazyClassLoaded[Seq[Setting[?]]] =
evaluateDslEntry(eval, name, imports, expression, range).result andThen {
case DslEntry.ProjectSettings(values) => values
case _ => Nil
@ -307,15 +307,15 @@ private[sbt] object EvaluateConfigurations {
private def isDefinition(line: String): Boolean = {
val trimmed = line.trim
DefinitionKeywords.exists(trimmed startsWith _)
DefinitionKeywords.exists(trimmed.startsWith(_))
}
private def extractedValTypes: Seq[String] =
Seq(
classOf[CompositeProject],
classOf[InputKey[_]],
classOf[TaskKey[_]],
classOf[SettingKey[_]]
classOf[InputKey[?]],
classOf[TaskKey[?]],
classOf[SettingKey[?]]
).map(_.getName)
private def evaluateDefinitions(
@ -351,19 +351,19 @@ object BuildUtilLite:
end BuildUtilLite
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] = {
def taskToKeyMap(data: Settings[Scope]): Map[Task[?], ScopedKey[Task[?]]] = {
val pairs = data.scopes flatMap (scope =>
data.data(scope).entries collect { case AttributeEntry(key, value: Task[_]) =>
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]]))
(value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[?]]]))
}
)
pairs.toMap[Task[_], ScopedKey[Task[_]]]
pairs.toMap[Task[?], ScopedKey[Task[?]]]
}
def allKeys(settings: Seq[Setting[_]]): Set[ScopedKey[_]] = {
val result = new java.util.HashSet[ScopedKey[_]]
def allKeys(settings: Seq[Setting[?]]): Set[ScopedKey[?]] = {
val result = new java.util.HashSet[ScopedKey[?]]
settings.foreach { s =>
if (!s.key.key.isLocal && result.add(s.key)) {
s.dependencies.foreach(k => if (!k.key.isLocal) result.add(s.key))
@ -372,15 +372,15 @@ object Index {
result.asScala.toSet
}
def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[_]] =
settings.data.values.flatMap(_.keys).toSet[AttributeKey[_]]
def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[?]] =
settings.data.values.flatMap(_.keys).toSet[AttributeKey[?]]
def stringToKeyMap(settings: Set[AttributeKey[_]]): Map[String, AttributeKey[_]] =
def stringToKeyMap(settings: Set[AttributeKey[?]]): Map[String, AttributeKey[?]] =
stringToKeyMap0(settings)(_.label)
private def stringToKeyMap0(
settings: Set[AttributeKey[_]]
)(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] = {
settings: Set[AttributeKey[?]]
)(label: AttributeKey[?] => String): Map[String, AttributeKey[?]] = {
val multiMap = settings.groupBy(label)
val duplicates = multiMap.iterator
.collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.tag)) }

View File

@ -17,7 +17,7 @@ import java.nio.file.Path
* the values of Project vals, and the import statements for all defined vals/defs.
*/
private[sbt] final class LoadedSbtFile(
val settings: Seq[Setting[_]],
val settings: Seq[Setting[?]],
val projects: Seq[Project],
val importedDefs: Seq[String],
val manipulations: Seq[Project => Project],
@ -33,7 +33,7 @@ private[sbt] final class LoadedSbtFile(
projects ++ o.projects,
importedDefs ++ o.importedDefs,
manipulations,
definitions zip o.definitions,
definitions.zip(o.definitions),
generatedFiles ++ o.generatedFiles
)
@ -48,7 +48,7 @@ private[sbt] final class LoadedSbtFile(
private[sbt] final class DefinedSbtValues(val sbtFiles: Seq[EvalDefinitions]) {
def values(parent: ClassLoader): Seq[Any] =
sbtFiles flatMap (_ values parent)
sbtFiles flatMap (_.values(parent))
def classloader(parent: ClassLoader): ClassLoader =
sbtFiles.foldLeft(parent) { (cl, e) =>

View File

@ -17,7 +17,7 @@ private[sbt] object SbtRefactorings:
import sbt.internal.parser.SbtParser.{ END_OF_LINE, FAKE_FILE }
/** A session setting is simply a tuple of a Setting[_] and the strings which define it. */
type SessionSetting = (Def.Setting[_], Seq[String])
type SessionSetting = (Def.Setting[?], Seq[String])
val emptyString = ""
val reverseOrderingInt = Ordering[Int].reverse

View File

@ -13,7 +13,7 @@ import java.nio.file.Paths;
import org.fusesource.jansi.AnsiConsole;
public class Client {
public static void main(final String[] args) {
public static void main(String[] args) {
boolean isWin = System.getProperty("os.name").toLowerCase().startsWith("win");
try {
if (isWin) AnsiConsole.systemInstall();

View File

@ -337,7 +337,7 @@ trait Cont:
summon[HashWriter[Unit]]
}.asExprOf[HashWriter[A2]]
else summonHashWriter[A2]
val tagsExpr = '{ List(${ Varargs(tags.map(Expr[CacheLevelTag](_))) }: _*) }
val tagsExpr = '{ List(${ Varargs(tags.map(Expr[CacheLevelTag](_))) }*) }
val block = letOutput(outputs, cacheConfigExpr)(body)
'{
given HashWriter[A2] = $inputHashWriter
@ -374,7 +374,7 @@ trait Cont:
outputs = List(${
Varargs[VirtualFile](outputs.map: out =>
out.toRef.asExprOf[VirtualFile])
}: _*),
}*),
)
}.asTerm
).asExprOf[ActionCache.InternalActionResult[A1]]

View File

@ -28,7 +28,7 @@ trait Convert[C <: Quotes & Singleton] extends ContextUtil[C]:
(n: String, tpe: TypeRepr, tree: Term) =>
tpe.asType match
case '[a] =>
convert[a](n, tree)(Type.of[a]).isSuccess
convert[a](n, tree)(using Type.of[a]).isSuccess
/**
* Substitutes wrappers in tree `t` with the result of `subWrapper`. A wrapper is a Tree of the

View File

@ -20,7 +20,7 @@ object DependencyTreePlugin extends AutoPlugin {
val configurations = Vector(Compile, Test, IntegrationTest, Runtime, Provided, Optional)
// MiniDependencyTreePlugin provides baseBasicReportingSettings for Compile and Test
override def projectSettings: Seq[Def.Setting[_]] =
override def projectSettings: Seq[Def.Setting[?]] =
((configurations diff Vector(Compile, Test)) flatMap { config =>
inConfig(config)(DependencyTreeSettings.baseBasicReportingSettings)
}) ++

View File

@ -42,7 +42,7 @@ object LineReader {
!java.lang.Boolean.getBoolean("sbt.disable.cont") && Signals.supported(Signals.CONT)
val MaxHistorySize = 500
private def completer(parser: Parser[_]): Completer = new Completer {
private def completer(parser: Parser[?]): Completer = new Completer {
def complete(lr: JLineReader, pl: ParsedLine, candidates: JList[Candidate]): Unit = {
Parser.completions(parser, pl.line(), 10).get.foreach { c =>
/*
@ -99,7 +99,7 @@ object LineReader {
inputrcFileUrl().map(in => sbt.io.IO.readBytes(in.openStream()))
def createReader(
historyPath: Option[File],
parser: Parser[_],
parser: Parser[?],
terminal: Terminal,
): LineReader = {
// We may want to consider insourcing LineReader.java from jline. We don't otherwise
@ -291,14 +291,14 @@ private[sbt] object JLine {
final class FullReader(
historyPath: Option[File],
complete: Parser[_],
complete: Parser[?],
val handleCONT: Boolean,
terminal: Terminal
) extends JLine {
@deprecated("Use the constructor with no injectThreadSleep parameter", "1.4.0")
def this(
historyPath: Option[File],
complete: Parser[_],
complete: Parser[?],
handleCONT: Boolean = LineReader.HandleCONT,
injectThreadSleep: Boolean = false
) =

View File

@ -16,7 +16,7 @@ package complete
sealed trait Completions {
def get: Set[Completion]
final def x(o: Completions): Completions = flatMap(_ x o)
final infix def x(o: Completions): Completions = flatMap(_ x o)
final def ++(o: Completions): Completions = Completions(get ++ o.get)
final def +:(o: Completion): Completions = Completions(get + o)
final def filter(f: Completion => Boolean): Completions = Completions(get filter f)
@ -90,8 +90,9 @@ sealed trait Completion {
/** Appends the completions in `o` with the completions in this Completion. */
def ++(o: Completion): Completion = Completion.concat(this, o)
final def x(o: Completions): Completions =
if (Completion evaluatesRight this) o.map(this ++ _) else Completions.strict(Set.empty + this)
final infix def x(o: Completions): Completions =
if Completion.evaluatesRight(this) then o.map(this ++ _)
else Completions.strict(Set.empty + this)
override final lazy val hashCode = Completion.hashCode(this)
override final def equals(o: Any) = o match {

View File

@ -49,7 +49,7 @@ sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSo
override def apply(): Iterable[String] = examples
private def examplesWithRemovedPrefix(prefix: String) = examples.collect {
case example if example startsWith prefix => example substring prefix.length
case example if example.startsWith(prefix) => example.substring(prefix.length)
}
}
@ -79,5 +79,5 @@ class FileExamples(base: File, prefix: String = "") extends ExampleSource {
}
private def dirStartsWithPrefix(relativizedPath: String): Boolean =
(relativizedPath startsWith prefix) || (prefix startsWith relativizedPath)
relativizedPath.startsWith(prefix) || prefix.startsWith(relativizedPath)
}

View File

@ -15,7 +15,7 @@ import scala.annotation.tailrec
import scala.jdk.CollectionConverters.*
object JLineCompletion {
def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit =
def installCustomCompletor(reader: ConsoleReader, parser: Parser[?]): Unit =
installCustomCompletor(reader)(parserAsCompletor(parser))
def installCustomCompletor(reader: ConsoleReader)(
@ -64,12 +64,12 @@ object JLineCompletion {
cursor: Int,
candidates: java.util.List[CharSequence]
): Int = {
candidates.asInstanceOf[java.util.List[String]] add "dummy"
candidates.asInstanceOf[java.util.List[String]].add("dummy")
0
}
}
def parserAsCompletor(p: Parser[_]): (String, Int) => (Seq[String], Seq[String]) =
def parserAsCompletor(p: Parser[?]): (String, Int) => (Seq[String], Seq[String]) =
(str, level) => convertCompletions(Parser.completions(p, str, level))
def convertCompletions(c: Completions): (Seq[String], Seq[String]) = {

View File

@ -100,7 +100,7 @@ sealed trait RichParser[A] {
* Apply the original parser, but only succeed if `o` also succeeds. Note that `o` does not need
* to consume the same amount of input to satisfy this condition.
*/
def &(o: Parser[_]): Parser[A]
def &(o: Parser[?]): Parser[A]
/** Explicitly defines the completions for the original Parser. */
def examples(s: String*): Parser[A]
@ -160,11 +160,11 @@ object Parser extends ParserMain:
def isFailure: Boolean
def isValid: Boolean
def errors: Seq[String]
def or[B >: T](b: => Result[B]): Result[B]
infix def or[B >: T](b: => Result[B]): Result[B]
def either[B](b: => Result[B]): Result[Either[T, B]]
def map[B](f: T => B): Result[B]
def flatMap[B](f: T => Result[B]): Result[B]
def &&(b: => Result[_]): Result[T]
infix def &&(b: => Result[?]): Result[T]
def filter(f: T => Boolean, msg: => String): Result[T]
def seq[B](b: => Result[B]): Result[(T, B)] = app(b)((m, n) => (m, n))
def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C]
@ -181,8 +181,8 @@ object Parser extends ParserMain:
case Value(bv) => Value(f(value, bv))
}
def &&(b: => Result[_]): Result[T] = b match { case f: Failure => f; case _ => this }
def or[B >: T](b: => Result[B]): Result[B] = this
infix def &&(b: => Result[?]): Result[T] = b match { case f: Failure => f; case _ => this }
infix def or[B >: T](b: => Result[B]): Result[B] = this
def either[B](b: => Result[B]): Result[Either[T, B]] = Value(Left(value))
def map[B](f: T => B): Result[B] = Value(f(value))
def flatMap[B](f: T => Result[B]): Result[B] = f(value)
@ -198,7 +198,7 @@ object Parser extends ParserMain:
def map[B](f: Nothing => B) = this
def flatMap[B](f: Nothing => Result[B]) = this
def or[B](b: => Result[B]): Result[B] = b match {
infix def or[B](b: => Result[B]): Result[B] = b match {
case v: Value[B] => v
case f: Failure => if (definitive) this else this ++ f
}
@ -210,7 +210,7 @@ object Parser extends ParserMain:
def filter(f: Nothing => Boolean, msg: => String) = this
def app[B, C](b: => Result[B])(f: (Nothing, B) => C): Result[C] = this
def &&(b: => Result[_]) = this
infix def &&(b: => Result[?]) = this
def toEither = Left(() => errors)
private[sbt] def ++(f: Failure) = mkFailures(errors ++ f.errors)
@ -263,8 +263,8 @@ object Parser extends ParserMain:
b.ifValid {
(a.result, b.result) match {
case (Some(av), Some(bv)) => success((av, bv))
case (Some(av), None) => b map (bv => (av, bv))
case (None, Some(bv)) => a map (av => (av, bv))
case (Some(av), None) => b.map(bv => (av, bv))
case (None, Some(bv)) => a.map(av => (av, bv))
case (None, None) => new SeqParser(a, b)
}
}
@ -332,7 +332,7 @@ object Parser extends ParserMain:
}
}
def and[T](a: Parser[T], b: Parser[_]): Parser[T] = a.ifValid(b.ifValid(new And(a, b)))
def and[T](a: Parser[T], b: Parser[?]): Parser[T] = a.ifValid(b.ifValid(new And(a, b)))
end Parser
@ -349,7 +349,7 @@ trait ParserMain {
def map[B](f: A => B) = mapParser(a, f)
def id = a
def ^^^[B](value: B): Parser[B] = a map (_ => value)
def ^^^[B](value: B): Parser[B] = a.map(_ => value)
def ??[B >: A](alt: B): Parser[B] = a.? map { x =>
x.getOrElse[B](alt)
}
@ -358,7 +358,7 @@ trait ParserMain {
def !!!(msg: String): Parser[A] = onFailure(a, msg)
def failOnException: Parser[A] = trapAndFail(a)
def &(o: Parser[_]) = and(a, o)
def &(o: Parser[?]) = and(a, o)
def examples(s: String*): Parser[A] = examples(s.toSet)
def examples(s: Set[String], check: Boolean = false): Parser[A] =
@ -424,12 +424,12 @@ trait ParserMain {
* Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.
*/
def range(r: collection.immutable.NumericRange[Char], label: String): Parser[Char] =
charClass(r contains _, label).examples(r.map(_.toString): _*)
charClass(r contains _, label).examples(r.map(_.toString)*)
/** Defines a Parser that parses a single character only if it is contained in `legal`. */
def chars(legal: String): Parser[Char] = {
val set = legal.toSet
charClass(set, "character in '" + legal + "'") examples (set.map(_.toString))
charClass(set, "character in '" + legal + "'").examples(set.map(_.toString))
}
/**
@ -481,7 +481,7 @@ trait ParserMain {
*
* See also [[sampleParse]] and [[sampleCompletions]].
*/
def sample(str: String, parser: Parser[_], completions: Boolean = false): Unit =
def sample(str: String, parser: Parser[?], completions: Boolean = false): Unit =
if (completions) sampleCompletions(str, parser) else sampleParse(str, parser)
/**
@ -489,7 +489,7 @@ trait ParserMain {
* the result of parsing is printed using the result's `toString` method. If parsing fails, the
* error message is displayed.
*/
def sampleParse(str: String, parser: Parser[_]): Unit =
def sampleParse(str: String, parser: Parser[?]): Unit =
parse(str, parser) match {
case Left(msg) => println(msg)
case Right(v) => println(v)
@ -500,7 +500,7 @@ trait ParserMain {
* the available completions are displayed on separate lines. If parsing fails, the error message
* is displayed.
*/
def sampleCompletions(str: String, parser: Parser[_], level: Int = 1): Unit =
def sampleCompletions(str: String, parser: Parser[?], level: Int = 1): Unit =
Parser.completions(parser, str, level).get foreach println
// intended to be temporary pending proper error feedback
@ -517,7 +517,7 @@ trait ParserMain {
(nonEmpty, ci)
}
else
loop(ci, a derive s(ci))
loop(ci, a.derive(s((ci))))
}
loop(-1, p)
}
@ -536,9 +536,9 @@ trait ParserMain {
* increasing positive numbers corresponding to increasing verbosity. Typically no more than a few
* levels are defined.
*/
def completions(p: Parser[_], s: String, level: Int): Completions =
def completions(p: Parser[?], s: String, level: Int): Completions =
// The x Completions.empty removes any trailing token completions where append.isEmpty
apply(p)(s).completions(level) x Completions.empty
apply(p)(s).completions(level).x(Completions.empty)
def examples[A](a: Parser[A], completions: Set[String], check: Boolean = false): Parser[A] =
examples(a, new FixedSetExamples(completions), completions.size, check)
@ -575,7 +575,7 @@ trait ParserMain {
} else a
def matched(
t: Parser[_],
t: Parser[?],
seen: Vector[Char] = Vector.empty,
partial: Boolean = false
): Parser[String] =
@ -636,7 +636,7 @@ trait ParserMain {
case (_, _) => new HomParser(a, b)
}
def not(p: Parser[_], failMessage: String): Parser[Unit] = p.result match {
def not(p: Parser[?], failMessage: String): Parser[Unit] = p.result match {
case None => new Not(p, failMessage)
case Some(_) => failure(failMessage)
}
@ -698,7 +698,7 @@ private final class TrapAndFail[A](a: Parser[A]) extends ValidParser[A] {
def derive(c: Char) =
try {
trapAndFail(a derive c)
trapAndFail(a.derive(c))
} catch {
case e: Exception => Invalid(fail(e))
}
@ -722,7 +722,7 @@ private final class OnFailure[A](a: Parser[A], message: String) extends ValidPar
case _: Failure => mkFailure(message); case v: Value[A] => v
}
def derive(c: Char) = onFailure(a derive c, message)
def derive(c: Char) = onFailure(a.derive(c), message)
def completions(level: Int) = a.completions(level)
override def toString = "(" + a + " !!! \"" + message + "\" )"
override def isTokenStart = a.isTokenStart
@ -730,7 +730,7 @@ private final class OnFailure[A](a: Parser[A], message: String) extends ValidPar
private final class SeqParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[(A, B)] {
lazy val result = tuple(a.result, b.result)
lazy val resultEmpty = a.resultEmpty seq b.resultEmpty
lazy val resultEmpty = a.resultEmpty.seq(b.resultEmpty)
def derive(c: Char) = {
val common = a.derive(c) ~ b
@ -740,13 +740,13 @@ private final class SeqParser[A, B](a: Parser[A], b: Parser[B]) extends ValidPar
}
}
def completions(level: Int) = a.completions(level) x b.completions(level)
def completions(level: Int) = a.completions(level).x(b.completions(level))
override def toString = "(" + a + " ~ " + b + ")"
}
private final class HomParser[A](a: Parser[A], b: Parser[A]) extends ValidParser[A] {
lazy val result = tuple(a.result, b.result) map (_._1)
def derive(c: Char) = (a derive c) | (b derive c)
def derive(c: Char) = a.derive(c) | b.derive(c)
lazy val resultEmpty = a.resultEmpty or b.resultEmpty
def completions(level: Int) = a.completions(level) ++ b.completions(level)
override def toString = "(" + a + " | " + b + ")"
@ -754,8 +754,8 @@ private final class HomParser[A](a: Parser[A], b: Parser[A]) extends ValidParser
private final class HetParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[Either[A, B]] {
lazy val result = tuple(a.result, b.result) map { case (a, _) => Left(a) }
def derive(c: Char) = (a derive c) || (b derive c)
lazy val resultEmpty = a.resultEmpty either b.resultEmpty
def derive(c: Char) = a.derive(c) || b.derive(c)
lazy val resultEmpty = a.resultEmpty.either(b.resultEmpty)
def completions(level: Int) = a.completions(level) ++ b.completions(level)
override def toString = "(" + a + " || " + b + ")"
}
@ -777,25 +777,25 @@ private final class ParserSeq[T](a: Seq[Parser[T]], errors: => Seq[String])
}
def completions(level: Int) = a.map(_.completions(level)).reduceLeft(_ ++ _)
def derive(c: Char) = seq0(a.map(_ derive c), errors)
def derive(c: Char) = seq0(a.map(_.derive(c)), errors)
override def toString = "seq(" + a + ")"
}
private final class BindParser[A, B](a: Parser[A], f: A => Parser[B]) extends ValidParser[B] {
lazy val result = a.result flatMap (av => f(av).result)
lazy val resultEmpty = a.resultEmpty flatMap (av => f(av).resultEmpty)
lazy val resultEmpty = a.resultEmpty.flatMap(av => f(av).resultEmpty)
def completions(level: Int) =
a.completions(level) flatMap { c =>
apply(a)(c.append).resultEmpty match {
case _: Failure => Completions.strict(Set.empty + c)
case Value(av) => c x f(av).completions(level)
case Value(av) => c.x(f(av).completions(level))
}
}
def derive(c: Char) = {
val common = a derive c flatMap f
val common = a.derive(c).flatMap(f)
a.resultEmpty match {
case Value(av) => common | derive1(f(av), c)
case _: Failure => common
@ -809,8 +809,8 @@ private final class BindParser[A, B](a: Parser[A], f: A => Parser[B]) extends Va
private final class MapParser[A, B](a: Parser[A], f: A => B) extends ValidParser[B] {
lazy val result = a.result map f
lazy val resultEmpty = a.resultEmpty map f
def derive(c: Char) = (a derive c) map f
lazy val resultEmpty = a.resultEmpty.map(f)
def derive(c: Char) = a.derive(c).map(f)
def completions(level: Int) = a.completions(level)
override def isTokenStart = a.isTokenStart
override def toString = "map(" + a + ")"
@ -822,7 +822,7 @@ private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg:
def filterResult(r: Result[T]) = r.filter(f, msg(seen))
lazy val result = p.result filter f
lazy val resultEmpty = filterResult(p.resultEmpty)
def derive(c: Char) = filterParser(p derive c, f, seen + c, msg)
def derive(c: Char) = filterParser(p.derive(c), f, seen + c, msg)
def completions(level: Int) = p.completions(level) filterS { s =>
filterResult(apply(p)(s).resultEmpty).isValid
@ -832,10 +832,10 @@ private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg:
override def isTokenStart = p.isTokenStart
}
private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], partial: Boolean)
private final class MatchedString(delegate: Parser[?], seenV: Vector[Char], partial: Boolean)
extends ValidParser[String] {
lazy val seen = seenV.mkString
def derive(c: Char) = matched(delegate derive c, seenV :+ c, partial)
def derive(c: Char) = matched(delegate.derive(c), seenV :+ c, partial)
def completions(level: Int) = delegate.completions(level)
def result = if (delegate.result.isDefined) Some(seen) else None
@ -849,7 +849,7 @@ private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], part
private final class TokenStart[T](delegate: Parser[T], seen: String, complete: TokenCompletions)
extends ValidParser[T] {
def derive(c: Char) = mkToken(delegate derive c, seen + c, complete)
def derive(c: Char) = mkToken(delegate.derive(c), seen + c, complete)
def completions(level: Int) = complete match {
case dc: TokenCompletions.Delegating =>
@ -863,16 +863,16 @@ private final class TokenStart[T](delegate: Parser[T], seen: String, complete: T
override def toString = "token('" + complete + ", " + delegate + ")"
}
private final class And[T](a: Parser[T], b: Parser[_]) extends ValidParser[T] {
private final class And[T](a: Parser[T], b: Parser[?]) extends ValidParser[T] {
lazy val result = tuple(a.result, b.result) map { _._1 }
def derive(c: Char) = (a derive c) & (b derive c)
def derive(c: Char) = a.derive(c) & b.derive(c)
def completions(level: Int) = a.completions(level).filterS(s => apply(b)(s).resultEmpty.isValid)
lazy val resultEmpty = a.resultEmpty && b.resultEmpty
override def toString = "(%s) && (%s)".format(a, b)
}
private final class Not(delegate: Parser[_], failMessage: String) extends ValidParser[Unit] {
def derive(c: Char) = if (delegate.valid) not(delegate derive c, failMessage) else this
private final class Not(delegate: Parser[?], failMessage: String) extends ValidParser[Unit] {
def derive(c: Char) = if (delegate.valid) not(delegate.derive(c), failMessage) else this
def completions(level: Int) = Completions.empty
def result = None
@ -914,7 +914,7 @@ private final class ParserWithExamples[T](
def derive(c: Char) =
examples(
delegate derive c,
delegate.derive(c),
exampleSource.withAddedPrefix(c.toString),
maxNumberOfExamples,
removeInvalidExamples
@ -973,7 +973,7 @@ private final class CharacterClass(f: Char => Boolean, label: String) extends Va
private final class Optional[A](delegate: Parser[A]) extends ValidParser[Option[A]] {
def result = delegate.result.map(some[A])
def resultEmpty = Value(None)
def derive(c: Char) = (delegate derive c).map(some[A])
def derive(c: Char) = delegate.derive(c).map(some[A])
def completions(level: Int) = Completion.empty +: delegate.completions(level)
override def toString = delegate.toString + "?"
}
@ -991,7 +991,7 @@ private final class Repeat[T](
def derive(c: Char) =
partial match {
case Some(part) =>
val partD = repeat(Some(part derive c), repeated, min, max, accumulatedReverse)
val partD = repeat(Some(part.derive(c)), repeated, min, max, accumulatedReverse)
part.resultEmpty match {
case Value(pv) => partD | repeatDerive(c, pv :: accumulatedReverse)
case _: Failure => partD
@ -1000,16 +1000,16 @@ private final class Repeat[T](
}
def repeatDerive(c: Char, accRev: List[T]): Parser[Seq[T]] =
repeat(Some(repeated derive c), repeated, scala.math.max(0, min - 1), max.decrement, accRev)
repeat(Some(repeated.derive(c)), repeated, scala.math.max(0, min - 1), max.decrement, accRev)
def completions(level: Int) = {
def pow(comp: Completions, exp: Completions, n: Int): Completions =
if (n == 1) comp else pow(comp x exp, exp, n - 1)
if (n == 1) comp else pow(comp.x(exp), exp, n - 1)
val repC = repeated.completions(level)
val fin = if (min == 0) Completion.empty +: repC else pow(repC, repC, min)
partial match {
case Some(p) => p.completions(level) x fin
case Some(p) => p.completions(level).x(fin)
case None => fin
}
}
@ -1023,7 +1023,7 @@ private final class Repeat[T](
case Some(partialPattern) =>
partialPattern.resultEmpty.map(_ :: accumulatedReverse)
}
(partialAccumulatedOption app repeatedParseEmpty)((x, y) => (x reverse_::: y): Seq[T])
(partialAccumulatedOption.app(repeatedParseEmpty))((x, y) => (x reverse_::: y): Seq[T])
}
private def repeatedParseEmpty: Result[List[T]] = {

View File

@ -38,16 +38,19 @@ trait Parsers {
lazy val DigitSet = Set("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")
/** Parses any single digit and provides that digit as a Char as the result. */
lazy val Digit = charClass(_.isDigit, "digit") examples DigitSet
lazy val Digit = charClass(_.isDigit, "digit").examples(DigitSet)
/** Set containing Chars for hexadecimal digits 0-9 and A-F (but not a-f). */
lazy val HexDigitSet =
Set('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F')
/** Parses a single hexadecimal digit (0-9, a-f, A-F). */
lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(
_.toString
)
lazy val HexDigit =
charClass(c => HexDigitSet(c.toUpper), "hex digit").examples(
HexDigitSet.map(
_.toString
)
)
/** Parses a single letter, according to Char.isLetter, into a Char. */
lazy val Letter = charClass(_.isLetter, "letter")
@ -322,14 +325,14 @@ trait Parsers {
* Applies `rep` zero or more times, separated by `sep`. The result is the (possibly empty)
* sequence of results from the multiple `rep` applications. The `sep` results are discarded.
*/
def repsep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] =
def repsep[T](rep: Parser[T], sep: Parser[?]): Parser[Seq[T]] =
rep1sep(rep, sep) ?? nilSeq[T]
/**
* Applies `rep` one or more times, separated by `sep`. The result is the non-empty sequence of
* results from the multiple `rep` applications. The `sep` results are discarded.
*/
def rep1sep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] =
def rep1sep[T](rep: Parser[T], sep: Parser[?]): Parser[Seq[T]] =
(rep ~ (sep ~> rep).*).map { case (x ~ xs) => x +: xs }
/** Wraps the result of `p` in `Some`. */
@ -390,7 +393,7 @@ trait Parsers {
* Parses a URI that is valid according to the single argument java.net.URI constructor, using
* `ex` as tab completion examples.
*/
def Uri(ex: Set[URI]) = basicUri examples (ex.map(_.toString))
def Uri(ex: Set[URI]) = basicUri.examples(ex.map(_.toString))
}
/** Provides standard [[Parser]] implementations. */
@ -400,7 +403,7 @@ object Parsers extends Parsers
object DefaultParsers extends Parsers with ParserMain {
/** Applies parser `p` to input `s` and returns `true` if the parse was successful. */
def matches(p: Parser[_], s: String): Boolean =
def matches(p: Parser[?], s: String): Boolean =
apply(p)(s).resultEmpty.isValid
/** Returns `true` if `s` parses successfully according to [[ID]]. */

View File

@ -19,7 +19,7 @@ object ProcessError {
def extractLine(s: String, i: Int): (String, Int) = {
val notNewline = (c: Char) => c != '\n' && c != '\r'
val left = takeRightWhile(s.substring(0, i))(notNewline)
val right = s substring i takeWhile notNewline
val right = s.substring(i).takeWhile(notNewline)
(left + right, left.length)
}

View File

@ -62,10 +62,10 @@ object ParserTest extends Properties("Completing Parser") {
(("token '" + in + "'") |: checkOne(in, nested, expect)) &&
(("display '" + in + "'") |: checkOne(in, nestedDisplay, expectDisplay))
def checkOne(in: String, parser: Parser[_], expect: Completion): Prop =
def checkOne(in: String, parser: Parser[?], expect: Completion): Prop =
completions(parser, in, 1) == Completions.single(expect)
def checkAll(in: String, parser: Parser[_], expect: Completions): Prop = {
def checkAll(in: String, parser: Parser[?], expect: Completions): Prop = {
val cs = completions(parser, in, 1)
("completions: " + cs) |: ("Expected: " + expect) |: (cs == expect: Prop)
}
@ -74,7 +74,7 @@ object ParserTest extends Properties("Completing Parser") {
(("token '" + in + "'") |: checkInv(in, nested)) &&
(("display '" + in + "'") |: checkInv(in, nestedDisplay))
def checkInv(in: String, parser: Parser[_]): Prop = {
def checkInv(in: String, parser: Parser[?]): Prop = {
val cs = completions(parser, in, 1)
("completions: " + cs) |: (cs == Completions.nil: Prop)
}
@ -103,7 +103,7 @@ object ParserTest extends Properties("Completing Parser") {
checkOne("asdf", token(any.+.examples("asdf", "qwer")), Completion.suggestion(""))
val colors = Set("blue", "green", "red")
val base = (seen: Seq[String]) => token(ID examples (colors -- seen))
val base = (seen: Seq[String]) => token(ID.examples(colors -- seen))
val sep = token(Space)
val repeat = repeatDep(base, sep)
def completionStrings(ss: Set[String]) = Completions(ss map (Completion.token("", _)))

View File

@ -70,12 +70,12 @@ class FileExamplesTest extends UnitSpec {
}
final class DirectoryStructure(withCompletionPrefix: String) {
var fileExamples: FileExamples = _
var baseDir: File = _
var childFiles: List[File] = _
var childDirectories: List[File] = _
var nestedFiles: List[File] = _
var nestedDirectories: List[File] = _
var fileExamples: FileExamples = scala.compiletime.uninitialized
var baseDir: File = scala.compiletime.uninitialized
var childFiles: List[File] = scala.compiletime.uninitialized
var childDirectories: List[File] = scala.compiletime.uninitialized
var nestedFiles: List[File] = scala.compiletime.uninitialized
var nestedDirectories: List[File] = scala.compiletime.uninitialized
def allRelativizedPaths: List[String] =
(childFiles ++ childDirectories ++ nestedFiles ++ nestedDirectories)
@ -83,8 +83,8 @@ class FileExamplesTest extends UnitSpec {
def prefixedPathsOnly: List[String] =
allRelativizedPaths
.withFilter(_ startsWith withCompletionPrefix)
.map(_ substring withCompletionPrefix.length)
.withFilter(_.startsWith(withCompletionPrefix))
.map(_.substring(withCompletionPrefix.length))
def createSampleDirStructure(tempDir: File): Unit = {
childFiles = toChildFiles(tempDir, List("foo", "bar", "bazaar"))

View File

@ -68,7 +68,7 @@ class BufferedAppender(override val name: String, delegate: Appender) extends Ap
}
private val buffer =
new java.util.Vector[Either[XLogEvent, (Level.Value, Option[String], Option[ObjectEvent[_]])]]
new java.util.Vector[Either[XLogEvent, (Level.Value, Option[String], Option[ObjectEvent[?]])]]
private var recording = false
override def appendLog(level: Level.Value, message: => String): Unit = {

View File

@ -600,7 +600,7 @@ trait Appender extends AutoCloseable {
}
private def appendMessageContent(level: Level.Value, o: AnyRef): Unit = {
def appendEvent(oe: ObjectEvent[_]): Unit = {
def appendEvent(oe: ObjectEvent[?]): Unit = {
val contentType = oe.contentType
contentType match {
case "sbt.internal.util.TraceEvent" => appendTraceEvent(oe.message.asInstanceOf[TraceEvent])

View File

@ -256,7 +256,7 @@ object Terminal {
* already wraps a jline terminal, so we can just return the wrapped jline
* terminal.
*/
private[sbt] def toJLine: jline.Terminal with jline.Terminal2 = term match {
private[sbt] def toJLine: jline.Terminal & jline.Terminal2 = term match {
case _ =>
new jline.Terminal with jline.Terminal2 {
override def init(): Unit = {}
@ -484,7 +484,7 @@ object Terminal {
override private[sbt] def withRawOutput[R](f: => R): R = t.withRawOutput(f)
override def restore(): Unit = t.restore()
override def close(): Unit = {}
override private[sbt] def write(bytes: Int*): Unit = t.write(bytes: _*)
override private[sbt] def write(bytes: Int*): Unit = t.write(bytes*)
override def getLastLine: Option[String] = t.getLastLine
override def getLines: Seq[String] = t.getLines
override private[sbt] def name: String = t.name
@ -1041,7 +1041,7 @@ object Terminal {
override private[sbt] val printStream: PrintStream = new LinePrintStream(outputStream)
override def inputStream: InputStream = in
private[sbt] def write(bytes: Int*): Unit = in.write(bytes: _*)
private[sbt] def write(bytes: Int*): Unit = in.write(bytes*)
private val isStopped = new AtomicBoolean(false)
override def getLineHeightAndWidth(line: String): (Int, Int) = getWidth match {

View File

@ -12,7 +12,7 @@ import xsbti.{ Problem, Severity, Position }
import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
import java.util.Optional
trait ProblemFormats { self: SeverityFormats with PositionFormats with sjsonnew.BasicJsonProtocol =>
trait ProblemFormats { self: SeverityFormats & PositionFormats & sjsonnew.BasicJsonProtocol =>
implicit lazy val ProblemFormat: JsonFormat[Problem] = new JsonFormat[Problem] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Problem = {
jsOpt match {

View File

@ -23,7 +23,7 @@ import scala.collection.concurrent
sealed abstract class LogExchange {
private[sbt] lazy val context: XLoggerContext = init()
private[sbt] val stringCodecs: concurrent.Map[String, ShowLines[_]] = concurrent.TrieMap()
private[sbt] val stringCodecs: concurrent.Map[String, ShowLines[?]] = concurrent.TrieMap()
private[sbt] val builtInStringCodecs: Unit = initStringCodecs()
private[util] val configs = new ConcurrentHashMap[String, LoggerConfig]
private[util] def addConfig(name: String, config: LoggerConfig): Unit =

View File

@ -248,9 +248,9 @@ object Logic {
case None => state // all of the remaining clauses failed on the new facts
case Some(applied) =>
val (proven, unprovenClauses) = findProven(applied)
val processedFacts = state add keepPositive(factsToProcess)
val processedFacts = state.add(keepPositive(factsToProcess))
val newlyProven = proven -- processedFacts.provenSet
val newState = processedFacts add newlyProven
val newState = processedFacts.add(newlyProven)
if (unprovenClauses.isEmpty) newState // no remaining clauses, done.
else {
val unproven = Clauses(unprovenClauses)

View File

@ -57,7 +57,7 @@ object RelationTest extends Properties("Relation") {
property("Groups correctly") = forAll { (entries: List[(Int, Double)], randomInt: Int) =>
val splitInto = math.abs(randomInt) % 10 + 1 // Split into 1-10 groups.
val rel = Relation.empty[Int, Double] ++ entries
val grouped = rel groupBy (_._1 % splitInto)
val grouped = rel.groupBy(_._1 % splitInto)
all(grouped.toSeq) { case (k, rel_k) =>
rel_k._1s forall { _ % splitInto == k }
}

View File

@ -19,13 +19,13 @@ class FileCommands(baseDirectory: File) extends BasicStatementHandler {
lazy val commands = commandMap
def commandMap =
Map(
"touch" nonEmpty touch _,
"delete" nonEmpty delete _,
"exists" nonEmpty exists _,
"mkdir" nonEmpty makeDirectories _,
"absent" nonEmpty absent _,
"touch".nonEmpty(touch),
"delete".nonEmpty(delete),
"exists".nonEmpty(exists),
"mkdir".nonEmpty(makeDirectories),
"absent".nonEmpty(absent),
// "sync" twoArg("Two directory paths", sync _),
"newer".twoArg("Two paths", newer _),
"newer".twoArg("Two paths", newer),
"pause" noArg {
println("Pausing in " + baseDirectory)
/*readLine("Press enter to continue. ") */
@ -34,11 +34,11 @@ class FileCommands(baseDirectory: File) extends BasicStatementHandler {
println()
},
"sleep".oneArg("Time in milliseconds", time => Thread.sleep(time.toLong)),
"exec" nonEmpty (execute _),
"copy" copy (to => rebase(baseDirectory, to)),
"copy-file".twoArg("Two paths", copyFile _),
"must-mirror".twoArg("Two paths", diffFiles _),
"copy-flat" copy flat
"exec".nonEmpty(execute),
"copy".copy(to => rebase(baseDirectory, to)),
"copy-file".twoArg("Two paths", copyFile),
"must-mirror".twoArg("Two paths", diffFiles),
"copy-flat".copy(flat),
)
def apply(command: String, arguments: List[String]): Unit =

View File

@ -15,7 +15,7 @@ import java.net.URL
final class FilteredLoader(parent: ClassLoader) extends ClassLoader(parent) {
@throws(classOf[ClassNotFoundException])
override final def loadClass(className: String, resolve: Boolean): Class[_] = {
override final def loadClass(className: String, resolve: Boolean): Class[?] = {
if (className.startsWith("java.") || className.startsWith("javax."))
super.loadClass(className, resolve)
else

View File

@ -144,7 +144,7 @@ final class ScriptedTests(
// // val sbtHandler = new SbtHandler(testDirectory, launcher, buffered, launchOpts)
// new TestScriptParser(Map('$' -> fileHandler, /* '>' -> sbtHandler, */ '#' -> CommentHandler))
val scriptConfig = new ScriptConfig(label, testDirectory, log)
new TestScriptParser(handlersProvider getHandlers scriptConfig)
new TestScriptParser(handlersProvider.getHandlers(scriptConfig))
}
val (file, pending) = {
val normal = new File(testDirectory, ScriptFilename)
@ -224,7 +224,7 @@ final class ListTests(baseDirectory: File, accept: ScriptedTest => Boolean, log:
log.warn("Tests skipped in group " + group.getName + ":")
skipped.foreach(testName => log.warn(" " + testName.getName))
}
Seq(included.map(_.getName): _*)
Seq(included.map(_.getName)*)
}
}
}

View File

@ -109,7 +109,7 @@ class TestScriptParser(handlers: Map[Char, StatementHandler]) extends RegexParse
("\'" ~> "[^'\n\r]*".r <~ "\'") | "\"[^\"\n\r]*\"".r | WordRegex
def startCharacterParser: Parser[Char] =
elem("start character", handlers.contains _) |
elem("start character", handlers.contains) |
(
(newline | err("expected start character " + handlers.keys.mkString("(", "", ")")))
~> failure("end of input")

View File

@ -6,11 +6,11 @@ object VersionRange {
/** True if the revision is an ivy-range, not a complete revision. */
def isVersionRange(revision: String): Boolean = {
(revision endsWith "+") ||
(revision contains "[") ||
(revision contains "]") ||
(revision contains "(") ||
(revision contains ")")
(revision.endsWith("+")) ||
(revision.contains("[")) ||
(revision.contains("]")) ||
(revision.contains("(")) ||
(revision.contains(")"))
}
// Assuming Ivy is used to resolve conflict, this removes the version range
@ -58,7 +58,7 @@ object VersionRange {
case NumPlusPattern(tail) => (0 until maxDigit).map(plusRange(tail, _)).mkString(",")
case DotNumPlusPattern(base, tail) =>
(0 until maxDigit).map(plusRange(base + "." + tail, _)).mkString(",")
case rev if rev endsWith "+" =>
case rev if rev.endsWith("+") =>
sys.error(s"dynamic revision '$rev' cannot be translated to POM")
case rev if startSym(rev(0)) && stopSym(rev(rev.length - 1)) =>
val start = rev(0)

View File

@ -15,7 +15,9 @@ trait GlobalLockFormat { self: BasicJsonProtocol =>
implicit lazy val globalLockIsoString: IsoString[GlobalLock] =
IsoString.iso(_ => "<lock>", _ => NoGlobalLock)
implicit lazy val GlobalLockFormat: JsonFormat[GlobalLock] = isoStringFormat(globalLockIsoString)
implicit lazy val GlobalLockFormat: JsonFormat[GlobalLock] = isoStringFormat(using
globalLockIsoString
)
}
private[sbt] object GlobalLockFormats {

View File

@ -12,5 +12,5 @@ trait LoggerFormat { self: BasicJsonProtocol =>
implicit lazy val xsbtiLoggerIsoString: IsoString[Logger] =
IsoString.iso(_ => "<logger>", _ => Null)
implicit lazy val LoggerFormat: JsonFormat[Logger] = isoStringFormat(implicitly)
implicit lazy val LoggerFormat: JsonFormat[Logger] = isoStringFormat(using implicitly)
}

View File

@ -35,7 +35,7 @@ object Configurations {
}
private[sbt] def internal(base: Configuration, ext: Configuration*) =
Configuration.of(base.id + "Internal", base.name + "-internal").extend(ext: _*).hide
Configuration.of(base.id + "Internal", base.name + "-internal").extend(ext*).hide
private[sbt] def fullInternal(base: Configuration): Configuration =
internal(base, base, Optional, Provided)
private[sbt] def optionalInternal(base: Configuration): Configuration =
@ -44,10 +44,10 @@ object Configurations {
lazy val Default = Configuration.of("Default", "default")
lazy val Compile = Configuration.of("Compile", "compile")
@deprecated("Create a separate subproject for testing instead", "1.9.0")
lazy val IntegrationTest = Configuration.of("IntegrationTest", "it") extend (Runtime)
lazy val IntegrationTest = Configuration.of("IntegrationTest", "it").extend(Runtime)
lazy val Provided = Configuration.of("Provided", "provided")
lazy val Runtime = Configuration.of("Runtime", "runtime") extend (Compile)
lazy val Test = Configuration.of("Test", "test") extend (Runtime)
lazy val Runtime = Configuration.of("Runtime", "runtime").extend(Compile)
lazy val Test = Configuration.of("Test", "test").extend(Runtime)
lazy val System = Configuration.of("System", "system")
lazy val Optional = Configuration.of("Optional", "optional")
lazy val Pom = Configuration.of("Pom", "pom")
@ -66,9 +66,9 @@ object Configurations {
private[sbt] def removeDuplicates(configs: Iterable[Configuration]) =
Set(
scala.collection.mutable
.Map(configs.map(config => (config.name, config)).toSeq: _*)
.Map(configs.map(config => (config.name, config)).toSeq*)
.values
.toList: _*
.toList*
)
/** Returns true if the configuration should be under the influence of scalaVersion. */
@ -115,9 +115,9 @@ private[sbt] object ConfigurationMacro:
import quotes.reflect.*
def enclosingTerm(sym: Symbol): Symbol =
sym match
case sym if sym.flags is Flags.Macro => enclosingTerm(sym.owner)
case sym if !sym.isTerm => enclosingTerm(sym.owner)
case _ => sym
case sym if sym.flags.is(Flags.Macro) => enclosingTerm(sym.owner)
case sym if !sym.isTerm => enclosingTerm(sym.owner)
case _ => sym
val term = enclosingTerm(Symbol.spliceOwner)
if !term.isValDef then
report.error(

View File

@ -7,7 +7,7 @@ package librarymanagement
import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
trait ConfigurationFormats {
self: sbt.librarymanagement.ConfigurationFormats with sjsonnew.BasicJsonProtocol =>
self: sbt.librarymanagement.ConfigurationFormats & sjsonnew.BasicJsonProtocol =>
implicit lazy val ConfigurationFormat: JsonFormat[sbt.librarymanagement.Configuration] =
new JsonFormat[sbt.librarymanagement.Configuration] {
override def read[J](

View File

@ -455,14 +455,10 @@ trait For2_13Use3Formats { self: sjsonnew.BasicJsonProtocol =>
}
trait CrossVersionFormats {
self: sjsonnew.BasicJsonProtocol
with sbt.librarymanagement.DisabledFormats
with sbt.librarymanagement.BinaryFormats
with sbt.librarymanagement.ConstantFormats
with sbt.librarymanagement.PatchFormats
with sbt.librarymanagement.FullFormats
with sbt.librarymanagement.For3Use2_13Formats
with sbt.librarymanagement.For2_13Use3Formats =>
self: sjsonnew.BasicJsonProtocol & sbt.librarymanagement.DisabledFormats &
sbt.librarymanagement.BinaryFormats & sbt.librarymanagement.ConstantFormats &
sbt.librarymanagement.PatchFormats & sbt.librarymanagement.FullFormats &
sbt.librarymanagement.For3Use2_13Formats & sbt.librarymanagement.For2_13Use3Formats =>
implicit lazy val CrossVersionFormat: JsonFormat[CrossVersion] = {
val format = flatUnionFormat8[
CrossVersion,

View File

@ -67,7 +67,7 @@ object DependencyBuilders {
}
final class RepositoryName private[sbt] (name: String) {
def at(location: String): MavenRepository = {
infix def at(location: String): MavenRepository = {
nonEmpty(location, "Repository location")
MavenRepository(name, location)
}

View File

@ -68,7 +68,7 @@ object DependencyFilter extends DependencyFilterExtra {
}
implicit def fnToConfigurationFilter(f: ConfigRef => Boolean): ConfigurationFilter =
new ConfigurationFilter { def apply(c: ConfigRef) = f(c) }
implicit def subDepFilterToFn[Arg](f: SubDepFilter[Arg, _]): Arg => Boolean = f apply _
implicit def subDepFilterToFn[Arg](f: SubDepFilter[Arg, ?]): Arg => Boolean = f.apply(_)
}
trait DependencyFilter {
def apply(configuration: ConfigRef, module: ModuleID, artifact: Artifact): Boolean

View File

@ -169,7 +169,7 @@ class DependencyResolution private[sbt] (lmEngine: DependencyResolutionInterface
restrictedCopy(m, true)
}
// Adding list of explicit artifacts here.
val exls = Map(config.excludes map { case (k, v) => (k, v.toSet) }: _*)
val exls = Map(config.excludes map { case (k, v) => (k, v.toSet) }*)
val deps = baseModules.distinct flatMap classifiedArtifacts(classifiers, exls, artifacts)
val base = restrictedCopy(id, true).withName(id.name + classifiers.mkString("$", "_", ""))
val moduleSetting = ModuleDescriptorConfiguration(base, ModuleInfo(base.name))

View File

@ -81,7 +81,7 @@ object EvictionError {
case _ =>
List((s.organization, s.name) -> versionScheme)
}
}: _*)
}*)
pairs foreach {
// don't report on a transitive eviction that does not have a winner

View File

@ -36,7 +36,7 @@ private[librarymanagement] abstract class MavenRepositoryFunctions {
private[librarymanagement] abstract class PatternsFunctions {
implicit def defaultPatterns: Patterns = Resolver.defaultPatterns
def apply(artifactPatterns: String*): Patterns = Patterns(true, artifactPatterns: _*)
def apply(artifactPatterns: String*): Patterns = Patterns(true, artifactPatterns*)
def apply(isMavenCompatible: Boolean, artifactPatterns: String*): Patterns = {
val patterns = artifactPatterns.toVector
Patterns()
@ -54,7 +54,7 @@ private[librarymanagement] trait SshBasedRepositoryExtra {
type RepositoryType <: SshBasedRepository
protected def copy(connection: SshConnection): RepositoryType
private def copy(authentication: SshAuthentication): RepositoryType =
copy(connection withAuthentication authentication)
copy(connection.withAuthentication(authentication))
/** Configures this to use the specified user name and password when connecting to the remote repository. */
def as(user: String, password: String): RepositoryType = as(user, Some(password))

View File

@ -15,7 +15,7 @@ final class ResolveException(
failed,
Map(failed map { m =>
m -> Nil
}: _*)
}*)
)
}
@ -36,7 +36,7 @@ object UnresolvedWarning {
def modulePosition(m0: ModuleID): Option[SourcePosition] =
config.modulePositions.find { case (m, _) =>
(m.organization == m0.organization) &&
(m0.name startsWith m.name) &&
(m0.name.startsWith(m.name)) &&
(m.revision == m0.revision)
} map { case (_, p) =>
p

View File

@ -158,7 +158,7 @@ private[librarymanagement] abstract class UpdateReportExtra {
}
def retrieve(f: (ConfigRef, ModuleID, Artifact, File) => File): UpdateReport =
UpdateReport(cachedDescriptor, configurations map { _ retrieve f }, stats, stamps)
UpdateReport(cachedDescriptor, configurations.map { _.retrieve(f) }, stats, stamps)
/** Gets the report for the given configuration, or `None` if the configuration was not resolved. */
def configuration(s: ConfigRef) = configurations.find(_.configuration == s)

View File

@ -36,16 +36,16 @@ class UpdateReportSpec extends AnyFlatSpec with Matchers {
lazy val moduleReport = (
ModuleReport(ModuleID("org", "name", "1.0"), Vector.empty, Vector.empty)
withPublicationDate Some(epochCalendar)
.withPublicationDate(Some(epochCalendar))
)
lazy val organizationArtifactReport =
OrganizationArtifactReport("org", "name", Vector(moduleReport))
val epochCalendar: java.util.Calendar = {
val utc = java.util.TimeZone getTimeZone "UTC"
val utc = java.util.TimeZone.getTimeZone("UTC")
val c = new java.util.GregorianCalendar(utc, java.util.Locale.ENGLISH)
c setTimeInMillis 0L
c.setTimeInMillis(0L)
c
}
}

View File

@ -211,7 +211,7 @@ class VersionNumberSpec extends AnyFreeSpec with Matchers with Inside {
case SemVer => "SemVer"
case PackVer => "PackVer"
case EarlySemVer => "EarlySemVer"
case _ => val s = vnc.name; if (s contains " ") s""""$s"""" else s
case _ => val s = vnc.name; if s.contains(" ") then s""""$s"""" else s
}
s"$prefix be $compatibilityStrategy compatible with $v2" in {
vnc.isCompatible(VersionNumber(v1.value), VersionNumber(v2)) shouldBe expectOutcome

View File

@ -104,7 +104,7 @@ object ResolutionRun {
params.params
.addForceVersion(
(if (isSandboxConfig) Nil
else params.interProjectDependencies.map(_.moduleVersion)): _*
else params.interProjectDependencies.map(_.moduleVersion))*
)
.withForceScalaVersion(params.autoScalaLibOpt.nonEmpty)
.withScalaVersionOpt(params.autoScalaLibOpt.map(_._2))

View File

@ -133,7 +133,7 @@ object Resolvers {
private object IBiblioRepository {
private def stringVector(v: java.util.List[_]): Vector[String] =
private def stringVector(v: java.util.List[?]): Vector[String] =
Option(v).map(_.asScala.toVector).getOrElse(Vector.empty).collect { case s: String =>
s
}

View File

@ -44,7 +44,7 @@ object UpdateRun {
}
.groupBy(_.withConfiguration(Configuration.empty))
.map { case (dep, l) =>
dep.withConfiguration(Configuration.join(l.map(_.configuration).toSeq: _*))
dep.withConfiguration(Configuration.join(l.map(_.configuration).toSeq*))
}
.toSet

View File

@ -51,7 +51,7 @@ private[sbt] object ConvertResolver {
*/
private object ChecksumFriendlyURLResolver {
import java.lang.reflect.AccessibleObject
private def reflectiveLookup[A <: AccessibleObject](f: Class[_] => A): Option[A] =
private def reflectiveLookup[A <: AccessibleObject](f: Class[?] => A): Option[A] =
try {
val cls = classOf[RepositoryResolver]
val thing = f(cls)
@ -377,8 +377,8 @@ private[sbt] object ConvertResolver {
else BasicResolver.DESCRIPTOR_REQUIRED
)
resolver.setCheckconsistency(!patterns.skipConsistencyCheck)
patterns.ivyPatterns.foreach(p => resolver.addIvyPattern(settings substitute p))
patterns.artifactPatterns.foreach(p => resolver.addArtifactPattern(settings substitute p))
patterns.ivyPatterns.foreach(p => resolver.addIvyPattern(settings.substitute(p)))
patterns.artifactPatterns.foreach(p => resolver.addArtifactPattern(settings.substitute(p)))
}
/**
@ -452,7 +452,7 @@ private[sbt] object ConvertResolver {
catch {
case e: java.io.IOException if e.getMessage.contains("destination already exists") =>
val overwriteWarning =
if (destination contains "-SNAPSHOT") s"Attempting to overwrite $destination"
if destination.contains("-SNAPSHOT") then s"Attempting to overwrite $destination"
else
s"Attempting to overwrite $destination (non-SNAPSHOT)\n\tYou need to remove it from the cache manually to take effect."
import org.apache.ivy.util.Message

View File

@ -121,7 +121,7 @@ object CustomPomParser {
}
private def hash(ss: Seq[String]): String =
Hash.toHex(Hash(ss.flatMap(_ getBytes "UTF-8").toArray))
Hash.toHex(Hash(ss.flatMap(_.getBytes("UTF-8")).toArray))
// Unfortunately, ModuleDescriptorParserRegistry is add-only and is a singleton instance.
lazy val registerDefault: Unit = ModuleDescriptorParserRegistry.getInstance.addParser(default)
@ -136,12 +136,11 @@ object CustomPomParser {
val MyHash = MakeTransformHash(md)
// sbt 0.13.1 used "sbtTransformHash" instead of "e:sbtTransformHash" until #1192 so read both
Option(extraInfo).isDefined &&
((Option(extraInfo get TransformedHashKey) orElse Option(
extraInfo get oldTransformedHashKey
)) match {
(Option(extraInfo.get(TransformedHashKey))
.orElse(Option(extraInfo.get(oldTransformedHashKey))) match
case Some(MyHash) => true
case _ => false
})
)
}
private def defaultTransformImpl(

View File

@ -113,7 +113,7 @@ private[sbt] class FakeResolver(private var name: String, cacheDir: File, module
val mrid = dd.getDependencyRevisionId()
val artifact = modules get ((organisation, name, revision)) map { arts =>
val artifacts: Array[DependencyArtifactDescriptor] = arts.toArray map (_ artifactOf dd)
val artifacts: Array[DependencyArtifactDescriptor] = arts.toArray.map(_.artifactOf(dd))
val moduleDescriptor = DefaultModuleDescriptor.newDefaultInstance(mrid, artifacts)
val defaultArtifact = arts.headOption match {
case Some(FakeArtifact(name, tpe, ext, _)) =>
@ -163,13 +163,13 @@ private[sbt] class FakeResolver(private var name: String, cacheDir: File, module
override def listTokenValues(
tokens: Array[String],
criteria: java.util.Map[_, _]
): Array[java.util.Map[_, _]] =
criteria: java.util.Map[?, ?]
): Array[java.util.Map[?, ?]] =
Array.empty
override def listTokenValues(
token: String,
otherTokenValues: java.util.Map[_, _]
otherTokenValues: java.util.Map[?, ?]
): Array[String] =
Array.empty

View File

@ -515,9 +515,9 @@ private[sbt] object IvySbt {
private[sbt] def isChanging(dd: DependencyDescriptor): Boolean =
dd.isChanging || isChanging(dd.getDependencyRevisionId)
private[sbt] def isChanging(module: ModuleID): Boolean =
module.revision endsWith "-SNAPSHOT"
module.revision.endsWith("-SNAPSHOT")
private[sbt] def isChanging(mrid: ModuleRevisionId): Boolean =
mrid.getRevision endsWith "-SNAPSHOT"
mrid.getRevision.endsWith("-SNAPSHOT")
def resolverChain(
name: String,
@ -564,7 +564,7 @@ private[sbt] object IvySbt {
def hasImplicitClassifier(artifact: IArtifact): Boolean = {
import scala.jdk.CollectionConverters._
artifact.getQualifiedExtraAttributes.asScala.keys
.exists(_.asInstanceOf[String] startsWith "m:")
.exists(_.asInstanceOf[String].startsWith("m:"))
}
private def setModuleConfigurations(
settings: IvySettings,
@ -956,17 +956,16 @@ private[sbt] object IvySbt {
val deps = new java.util.LinkedHashMap[ModuleRevisionId, List[DependencyDescriptor]]
for (dd <- dependencies) {
val id = dd.getDependencyRevisionId
val updated = deps get id match {
val updated = deps.get(id) match
case null => dd :: Nil
case v => dd :: v
}
deps.put(id, updated)
}
import scala.jdk.CollectionConverters._
deps.values.asScala.toSeq.flatMap { dds =>
val mergeable = dds.lazyZip(dds.tail).forall(ivyint.MergeDescriptors.mergeable _)
if (mergeable) dds.reverse.reduceLeft(ivyint.MergeDescriptors.apply _) :: Nil else dds
val mergeable = dds.lazyZip(dds.tail).forall(ivyint.MergeDescriptors.mergeable)
if (mergeable) dds.reverse.reduceLeft(ivyint.MergeDescriptors.apply) :: Nil else dds
}
}

View File

@ -127,7 +127,7 @@ object IvyActions {
Option(deliver(module, configuration, log))
}
val artifacts = Map(configuration.artifacts: _*)
val artifacts = Map(configuration.artifacts*)
val checksums = configuration.checksums
module.withModule(log) { case (ivy, md, _) =>
val resolver = ivy.getSettings.getResolver(resolverName)

View File

@ -82,14 +82,14 @@ object IvyRetrieve {
confReport: ConfigurationResolveReport,
dep: IvyNode
): ModuleReport = {
def toExtraAttributes(ea: ju.Map[_, _]): Map[String, String] =
def toExtraAttributes(ea: ju.Map[?, ?]): Map[String, String] =
Map(ea.entrySet.toArray collect {
case entry: ju.Map.Entry[_, _]
if nonEmptyString(entry.getKey.toString).isDefined && nonEmptyString(
entry.getValue.toString
).isDefined =>
(entry.getKey.toString, entry.getValue.toString)
}: _*)
}*)
def toCaller(caller: IvyCaller): Caller = {
val m = toModuleID(caller.getModuleRevisionId)
val callerConfigurations = caller.getCallerConfigurations.toVector collect {

View File

@ -186,7 +186,7 @@ object IvyScalaUtil {
if (configurations.isEmpty) names
else {
val configSet = configurationSet(configurations)
configSet.intersect(HashSet(names: _*))
configSet.intersect(HashSet(names*))
configSet.toArray
}
}

View File

@ -503,7 +503,7 @@ class MakePom(val log: Logger) {
r match { case c: ChainResolver => flatten(castResolvers(c.getResolvers)); case _ => r :: Nil }
// cast the contents of a pre-generics collection
private def castResolvers(s: java.util.Collection[_]): Seq[DependencyResolver] = {
private def castResolvers(s: java.util.Collection[?]): Seq[DependencyResolver] = {
import scala.jdk.CollectionConverters._
s.asScala.toSeq.map(_.asInstanceOf[DependencyResolver])
}
@ -531,7 +531,7 @@ class MakePom(val log: Logger) {
configurations: Option[Iterable[Configuration]]
): Seq[DependencyDescriptor] = {
val keepConfigurations = IvySbt.getConfigurations(module, configurations)
val keepSet: Set[String] = Set(keepConfigurations.toSeq: _*)
val keepSet: Set[String] = Set(keepConfigurations.toSeq*)
def translate(dependency: DependencyDescriptor) = {
val keep = dependency.getModuleConfigurations
.filter((conf: String) => keepSet.contains(conf))

View File

@ -285,7 +285,7 @@ private[sbt] class CachedResolutionResolveCache {
): (Vector[ModuleReport], Vector[ModuleReport]) = {
val moduleIdMap = Map(conflicts map { x =>
x.module -> x
}: _*)
}*)
(
surviving map moduleIdMap,
evicted map moduleIdMap map {
@ -412,7 +412,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
}
log.debug("- Unresolved path " + path.toString)
m -> path
}: _*)
}*)
val failed = failedPaths.keys.toSeq
Left(new ResolveException(messages, failed, failedPaths))
}
@ -507,7 +507,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
)
}
}
new ResolveException(messages, failed, ListMap(failedPaths: _*))
new ResolveException(messages, failed, ListMap(failedPaths*))
}
def mergeReports(
@ -589,7 +589,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
oar.organization == organization && oar.name == name
}
((organization, name), xs)
}: _*)
}*)
// this returns a List of Lists of (org, name). should be deterministic
def detectLoops(
allModules: Map[(String, String), Vector[OrganizationArtifactReport]]
@ -625,7 +625,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
loopLists.toList
}
val allModules2: mutable.Map[(String, String), Vector[OrganizationArtifactReport]] =
mutable.Map(allModules0.toSeq: _*)
mutable.Map(allModules0.toSeq*)
@tailrec def breakLoops(loops: List[List[(String, String)]]): Unit =
loops match {
case Nil => ()
@ -836,7 +836,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
}
Seq(((organization, name), oars))
}
Map(reports: _*)
Map(reports*)
}
/**
@ -988,7 +988,7 @@ private[sbt] trait CachedResolutionResolveEngine extends ResolveEngine {
node.getRealConfs(conf).toVector
}
conf0.getName -> remapped
}: _*)
}*)
// This emulates test-internal extending test configuration etc.
val remappedConfigs: Map[String, Vector[String]] =
rootModuleConfs.foldLeft(remappedConfigs0) { (acc0, c) =>

View File

@ -5,34 +5,21 @@ import sjsonnew._
import sbt.librarymanagement._
trait UpdateOptionsFormat {
self: BasicJsonProtocol
with ModuleIDFormats
with ResolverFormats
with sbt.librarymanagement.ArtifactFormats
with sbt.librarymanagement.ConfigRefFormats
with sbt.librarymanagement.ChecksumFormats
with sbt.librarymanagement.InclExclRuleFormats
with sbt.librarymanagement.CrossVersionFormats
with sbt.librarymanagement.DisabledFormats
with sbt.librarymanagement.BinaryFormats
with sbt.librarymanagement.ConstantFormats
with sbt.librarymanagement.PatchFormats
with sbt.librarymanagement.FullFormats
with sbt.librarymanagement.For3Use2_13Formats
with sbt.librarymanagement.For2_13Use3Formats
with sbt.librarymanagement.ChainedResolverFormats
with sbt.librarymanagement.MavenRepoFormats
with sbt.librarymanagement.MavenCacheFormats
with sbt.librarymanagement.PatternsFormats
with sbt.librarymanagement.FileConfigurationFormats
with sbt.librarymanagement.FileRepositoryFormats
with sbt.librarymanagement.URLRepositoryFormats
with sbt.librarymanagement.SshConnectionFormats
with sbt.librarymanagement.SshAuthenticationFormats
with sbt.librarymanagement.SshRepositoryFormats
with sbt.librarymanagement.SftpRepositoryFormats
with sbt.librarymanagement.PasswordAuthenticationFormats
with sbt.librarymanagement.KeyFileAuthenticationFormats =>
self: BasicJsonProtocol & ModuleIDFormats & ResolverFormats &
sbt.librarymanagement.ArtifactFormats & sbt.librarymanagement.ConfigRefFormats &
sbt.librarymanagement.ChecksumFormats & sbt.librarymanagement.InclExclRuleFormats &
sbt.librarymanagement.CrossVersionFormats & sbt.librarymanagement.DisabledFormats &
sbt.librarymanagement.BinaryFormats & sbt.librarymanagement.ConstantFormats &
sbt.librarymanagement.PatchFormats & sbt.librarymanagement.FullFormats &
sbt.librarymanagement.For3Use2_13Formats & sbt.librarymanagement.For2_13Use3Formats &
sbt.librarymanagement.ChainedResolverFormats & sbt.librarymanagement.MavenRepoFormats &
sbt.librarymanagement.MavenCacheFormats & sbt.librarymanagement.PatternsFormats &
sbt.librarymanagement.FileConfigurationFormats & sbt.librarymanagement.FileRepositoryFormats &
sbt.librarymanagement.URLRepositoryFormats & sbt.librarymanagement.SshConnectionFormats &
sbt.librarymanagement.SshAuthenticationFormats & sbt.librarymanagement.SshRepositoryFormats &
sbt.librarymanagement.SftpRepositoryFormats &
sbt.librarymanagement.PasswordAuthenticationFormats &
sbt.librarymanagement.KeyFileAuthenticationFormats =>
/* This is necessary to serialize/deserialize `directResolvers`. */
private implicit val moduleIdJsonKeyFormat: sjsonnew.JsonKeyFormat[ModuleID] = {
new sjsonnew.JsonKeyFormat[ModuleID] {

View File

@ -119,17 +119,23 @@ object EvictionErrorSpec extends BaseIvySpecification {
def oldAkkaPvp = List("com.typesafe.akka" % "*" % "pvp")
lazy val `akkaActor2.1.4` =
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
lazy val `akkaActor2.3.0` =
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
lazy val `akkaActor2.6.0` =
ModuleID("com.typesafe.akka", "akka-actor", "2.6.0").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.6.0")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
lazy val `scala2.10.4` =
ModuleID("org.scala-lang", "scala-library", "2.10.4").withConfigurations(Some("compile"))
lazy val `scala2.12.17` =
@ -137,13 +143,17 @@ object EvictionErrorSpec extends BaseIvySpecification {
lazy val `scala2.13.3` =
ModuleID("org.scala-lang", "scala-library", "2.13.3").withConfigurations(Some("compile"))
lazy val `bananaSesame0.4` =
ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary // uses akka-actor 2.1.4
ModuleID("org.w3", "banana-sesame", "0.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary) // uses akka-actor 2.1.4
lazy val `akkaRemote2.3.4` =
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary // uses akka-actor 2.3.4
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary) // uses akka-actor 2.3.4
lazy val `http4s0.21.11` =
("org.http4s" %% "http4s-blaze-server" % "0.21.11").withConfigurations(Some("compile"))
// https://repo1.maven.org/maven2/org/typelevel/cats-effect_2.13/3.0.0-M4/cats-effect_2.13-3.0.0-M4.pom

View File

@ -311,17 +311,23 @@ object EvictionWarningSpec extends BaseIvySpecification {
}
def akkaActor214 =
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
def akkaActor230 =
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
def akkaActor234 =
ModuleID("com.typesafe.akka", "akka-actor", "2.3.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.3.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
def scala2102 =
ModuleID("org.scala-lang", "scala-library", "2.10.2").withConfigurations(Some("compile"))
def scala2103 =
@ -336,17 +342,23 @@ object EvictionWarningSpec extends BaseIvySpecification {
Some("compile")
) // uses commons-io 2.4
def unfilteredUploads080 =
ModuleID("net.databinder", "unfiltered-uploads", "0.8.0").withConfigurations(
Some("compile")
) cross CrossVersion.binary // uses commons-io 1.4
ModuleID("net.databinder", "unfiltered-uploads", "0.8.0")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary) // uses commons-io 1.4
def bananaSesame04 =
ModuleID("org.w3", "banana-sesame", "0.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary // uses akka-actor 2.1.4
ModuleID("org.w3", "banana-sesame", "0.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary) // uses akka-actor 2.1.4
def akkaRemote234 =
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary // uses akka-actor 2.3.4
ModuleID("com.typesafe.akka", "akka-remote", "2.3.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary) // uses akka-actor 2.3.4
def fullOptions = EvictionWarningOptions.full
def javaLibDirectDeps = Vector(commonsIo14, commonsIo24)

View File

@ -24,15 +24,21 @@ object InconsistentDuplicateSpec extends BasicTestSuite {
}
def akkaActor214 =
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.1.4")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
def akkaActor230 =
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
Some("compile")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0")
.withConfigurations(
Some("compile")
)
.cross(CrossVersion.binary)
def akkaActor230Test =
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0").withConfigurations(
Some("test")
) cross CrossVersion.binary
ModuleID("com.typesafe.akka", "akka-actor", "2.3.0")
.withConfigurations(
Some("test")
)
.cross(CrossVersion.binary)
}

View File

@ -10,8 +10,10 @@ object PlatformResolutionSpec extends BaseIvySpecification {
cleanCache()
val m = exampleAutoModule(platform = None)
assert(
update(m).configurations.head.modules.map(_.toString).mkString
contains "com.github.scopt:scopt_2.13:4.1.0"
update(m).configurations.head.modules
.map(_.toString)
.mkString
.contains("com.github.scopt:scopt_2.13:4.1.0")
)
}
@ -19,8 +21,10 @@ object PlatformResolutionSpec extends BaseIvySpecification {
cleanCache()
val m = exampleAutoModule(platform = Some("sjs1"))
assert(
update(m).configurations.head.modules.map(_.toString).mkString
contains "com.github.scopt:scopt_sjs1_2.13"
update(m).configurations.head.modules
.map(_.toString)
.mkString
.contains("com.github.scopt:scopt_sjs1_2.13")
)
}
@ -33,8 +37,10 @@ object PlatformResolutionSpec extends BaseIvySpecification {
platform = Some(sjs1),
)
assert(
update(m).configurations.head.modules.map(_.toString).mkString
contains "junit:junit:4.13.1"
update(m).configurations.head.modules
.map(_.toString)
.mkString
.contains("junit:junit:4.13.1")
)
}
@ -47,8 +53,10 @@ object PlatformResolutionSpec extends BaseIvySpecification {
platform = None,
)
assert(
update(m).configurations.head.modules.map(_.toString).mkString
contains "com.github.scopt:scopt_sjs1_2.13"
update(m).configurations.head.modules
.map(_.toString)
.mkString
.contains("com.github.scopt:scopt_sjs1_2.13")
)
}
@ -61,8 +69,10 @@ object PlatformResolutionSpec extends BaseIvySpecification {
platform = None,
)
assert(
update(m).configurations.head.modules.map(_.toString).mkString
contains "com.github.scopt:scopt_2.13:4.1.0"
update(m).configurations.head.modules
.map(_.toString)
.mkString
.contains("com.github.scopt:scopt_2.13:4.1.0")
)
}

View File

@ -71,10 +71,10 @@ abstract class ResolutionSpec extends AbstractEngineSpec {
val report = update(m)
val modules: Seq[String] = report.configurations.head.modules map { _.toString }
assert(modules exists { (x: String) =>
x contains """org.jboss.netty:netty:3.2.0.Final"""
x.contains("""org.jboss.netty:netty:3.2.0.Final""")
})
assert(!(modules exists { (x: String) =>
x contains """org.jboss.netty:netty:3.2.1.Final"""
x.contains("""org.jboss.netty:netty:3.2.1.Final""")
}))
}

View File

@ -43,9 +43,9 @@ private[sbt] object ForkTests {
constant(TestOutput(TestResult.Passed, Map.empty[String, SuiteResult], Iterable.empty))
else
mainTestTask(runners, opts, classpath, converter, fork, log, config.parallel).tagw(
config.tags: _*
config.tags*
)
main.tagw(tags: _*).dependsOn(all(opts.setup): _*) flatMap { results =>
main.tagw(tags*).dependsOn(all(opts.setup)*) flatMap { results =>
all(opts.cleanup).join.map(_ => results)
}
}
@ -61,7 +61,7 @@ private[sbt] object ForkTests {
tags: (Tag, Int)*
): Task[TestOutput] = {
val opts = processOptions(config, tests, log)
apply(runners, opts, config, classpath, converter, fork, log, tags: _*)
apply(runners, opts, config, classpath, converter, fork, log, tags*)
}
def apply(
@ -191,7 +191,7 @@ private[sbt] object ForkTests {
}
}
private def forkFingerprint(f: Fingerprint): Fingerprint with Serializable =
private def forkFingerprint(f: Fingerprint): Fingerprint & Serializable =
f match {
case s: SubclassFingerprint => new ForkMain.SubclassFingerscan(s)
case a: AnnotatedFingerprint => new ForkMain.AnnotatedFingerscan(a)
@ -222,9 +222,9 @@ private final class React(
log.trace(t); react()
case Array(group: String, tEvents: Array[Event]) =>
val events = tEvents.toSeq
listeners.foreach(_ startGroup group)
listeners.foreach(_.startGroup(group))
val event = TestEvent(events)
listeners.foreach(_ testEvent event)
listeners.foreach(_.testEvent(event))
val suiteResult = SuiteResult(events)
results += group -> suiteResult
listeners.foreach(_.endGroup(group, suiteResult.result))

View File

@ -40,10 +40,10 @@ object Pkg:
def JarManifest(m: Manifest) = PackageOption.JarManifest(m)
def MainClass(mainClassName: String) = PackageOption.MainClass(mainClassName)
def MainfestAttributes(attributes: (Attributes.Name, String)*) =
PackageOption.ManifestAttributes(attributes: _*)
PackageOption.ManifestAttributes(attributes*)
def ManifestAttributes(attributes: (String, String)*) = {
val converted = for ((name, value) <- attributes) yield (new Attributes.Name(name), value)
PackageOption.ManifestAttributes(converted: _*)
PackageOption.ManifestAttributes(converted*)
}
// 2010-01-01
private val default2010Timestamp: Long = 1262304000000L
@ -191,7 +191,7 @@ object Pkg:
import Attributes.Name._
val attribKeys = Seq(SPECIFICATION_TITLE, SPECIFICATION_VERSION, SPECIFICATION_VENDOR)
val attribVals = Seq(name, version, orgName)
PackageOption.ManifestAttributes(attribKeys.zip(attribVals): _*)
PackageOption.ManifestAttributes(attribKeys.zip(attribVals)*)
}
def addImplManifestAttributes(
name: String,
@ -216,7 +216,7 @@ object Pkg:
val attribVals = Seq(name, version, orgName, org)
PackageOption.ManifestAttributes(attribKeys.zip(attribVals) ++ {
homepage map (h => (IMPLEMENTATION_URL, h.toString))
}: _*)
}*)
}
def makeJar(
@ -243,7 +243,7 @@ object Pkg:
given manifestFormat: JsonFormat[Manifest] = projectFormat[Manifest, Array[Byte]](
m => {
val bos = new java.io.ByteArrayOutputStream()
m write bos
m.write(bos)
bos.toByteArray
},
bs => new Manifest(new java.io.ByteArrayInputStream(bs))
@ -308,7 +308,7 @@ object PackageOption:
unbuilder.endObject()
PackageOption.ManifestAttributes(attributes.map { case (k, v) =>
Attributes.Name(k) -> v
}: _*)
}*)
case None => deserializationError("Expected JsObject but found None")
override def write[J](obj: PackageOption.ManifestAttributes, builder: Builder[J]): Unit =
builder.beginObject()

View File

@ -63,8 +63,8 @@ object RawCompileLike {
options,
maxErrors
)
val cachedComp = inputChanged(cacheStoreFactory make "inputs") { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory make "output") {
val cachedComp = inputChanged(cacheStoreFactory.make("inputs")) { (inChanged, in: Inputs) =>
inputChanged(cacheStoreFactory.make("output")) {
(outChanged, outputs: FilesInfo[PlainFileInfo]) =>
if (inChanged || outChanged)
doCompile(sources, classpath, outputDirectory, options, maxErrors, log)

View File

@ -81,13 +81,13 @@ object Sync {
!target.exists ||
target.isDirectory != source.isDirectory
val updates = relation filter outofdate
val updates = relation.filter(outofdate)
val (cleanDirs, cleanFiles) = (updates._2s ++ removeTargets).partition(_.isDirectory)
IO.delete(cleanFiles)
IO.deleteIfEmpty(cleanDirs)
updates.all.foreach((copy _).tupled)
updates.all.foreach((copy).tupled)
writeInfoVirtual(store, relation, currentInfo, fileConverter)(inStyle.format)
relation

View File

@ -385,7 +385,7 @@ object Tests {
testListeners: Vector[TestReportListener],
config: Execution
): Task[Output] = {
def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_()): _*)
def fj(actions: Iterable[() => Unit]): Task[Unit] = nop.dependsOn(actions.toSeq.fork(_())*)
def partApp(actions: Iterable[ClassLoader => Unit]) = actions.toSeq map { a => () =>
a(loader)
}
@ -399,7 +399,7 @@ object Tests {
makeParallel(loader, runnables, setupTasks, config.tags).map(_.toList)
else
makeSerial(loader, runnables, setupTasks)
val taggedMainTasks = mainTasks.tagw(config.tags: _*)
val taggedMainTasks = mainTasks.tagw(config.tags*)
taggedMainTasks
.map(processResults)
.flatMap { results =>
@ -467,7 +467,7 @@ object Tests {
Info[(String, (SuiteResult, Seq[TestTask]))]().setName(name),
Action.Pure(() => (name, fun.apply()), `inline` = false)
)
val taggedBase = base.tagw(tags: _*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_)): _*)
val taggedBase = base.tagw(tags*).tag(fun.tags.map(ConcurrentRestrictions.Tag(_))*)
taggedBase flatMap { case (name, (result, nested)) =>
val nestedRunnables = createNestedRunnables(loader, fun, nested)
toTasks(loader, nestedRunnables, tags).map { currentResultMap =>
@ -509,7 +509,7 @@ object Tests {
case Nil => acc
}
task { processRunnable(runnables.toList, List.empty) } dependsOn (setupTasks)
task { processRunnable(runnables.toList, List.empty) }.dependsOn(setupTasks)
}
def processResults(results: Iterable[(String, SuiteResult)]): Output =
@ -531,7 +531,7 @@ object Tests {
{ case (Output(v1, m1, _), Output(v2, m2, _)) =>
Output(
(if (severity(v1) < severity(v2)) v2 else v1): TestResult,
Map((m1.toSeq ++ m2.toSeq): _*),
Map((m1.toSeq ++ m2.toSeq)*),
Iterable.empty[Summary]
)
}
@ -550,7 +550,7 @@ object Tests {
(e.overall, e.events)
}
val m = ms reduce { (m1: Map[String, SuiteResult], m2: Map[String, SuiteResult]) =>
Map((m1.toSeq ++ m2.toSeq): _*)
Map((m1.toSeq ++ m2.toSeq)*)
}
Output(overall(rs), m, Iterable.empty)
}

View File

@ -22,7 +22,7 @@ class CacheIvyTest extends Properties("CacheIvy") {
import sjsonnew.support.scalajson.unsafe.Converter
private class InMemoryStore(converter: SupportConverter[JValue]) extends CacheStore {
private var content: JValue = _
private var content: JValue = scala.compiletime.uninitialized
override def delete(): Unit = ()
override def close(): Unit = ()
@ -131,6 +131,6 @@ class CacheIvyTest extends Properties("CacheIvy") {
}
import sbt.librarymanagement.LibraryManagementCodec._
cachePreservesEquality(m, eq _, str)
cachePreservesEquality(m, eq, str)
}
}

View File

@ -97,9 +97,9 @@ object BasicCommands {
}
private def earlyParser: State => Parser[String] = (s: State) => {
val p1 = token(EarlyCommand + "(") flatMap (_ => otherCommandParser(s) <~ token(")"))
val p2 = (token("-") | token("--")) flatMap (_ => levelParser)
val p3 = (token("-") | token("--")) flatMap (_ => addPluginSbtFileStringParser)
val p1 = token(EarlyCommand + "(").flatMap(_ => otherCommandParser(s) <~ token(")"))
val p2 = (token("-") | token("--")).flatMap(_ => levelParser)
val p3 = (token("-") | token("--")).flatMap(_ => addPluginSbtFileStringParser)
p1 | p2 | p3
}
@ -280,8 +280,8 @@ object BasicCommands {
lazy val otherCommandParser: State => Parser[String] =
(s: State) => token(OptSpace ~> combinedLax(s, NotSpaceClass ~ any.*))
def combinedLax(s: State, any: Parser[_]): Parser[String] =
matched((s.combinedParser: Parser[_]) | token(any, hide = const(true)))
def combinedLax(s: State, any: Parser[?]): Parser[String] =
matched((s.combinedParser: Parser[?]) | token(any, hide = const(true)))
def ifLast: Command =
Command(IfLast, Help.more(IfLast, IfLastDetailed))(otherCommandParser)((s, arg) =>
@ -340,7 +340,7 @@ object BasicCommands {
def argsStr = args mkString ", "
def cpStr = cp mkString File.pathSeparator
def fromCpStr = if (cp.isEmpty) "" else s" from $cpStr"
state.log info s"Applying State transformations $argsStr$fromCpStr"
state.log.info(s"Applying State transformations $argsStr$fromCpStr")
val loader =
if (cp.isEmpty) parentLoader else toLoader(cp.map(f => Paths.get(f)), parentLoader)
val loaded =
@ -370,14 +370,14 @@ object BasicCommands {
s.source match {
case Some(c) if c.channelName.startsWith("network") =>
s"${DisconnectNetworkChannel} ${c.channelName}" :: s
case _ => s exit true
case _ => s.exit(true)
}
}
def shutdown: Command = Command.command(Shutdown, shutdownBrief, shutdownBrief) { s =>
s.source match {
case Some(c) if c.channelName.startsWith("network") =>
s"${DisconnectNetworkChannel} ${c.channelName}" :: (Exec(Shutdown, None) +: s)
case _ => s exit true
case _ => s.exit(true)
}
}
@ -395,7 +395,7 @@ object BasicCommands {
def historyParser(s: State): Parser[() => State] =
Command.applyEffect(HistoryCommands.actionParser) { histFun =>
val hp = (s get historyPath).flatten
val hp = s.get(historyPath).flatten
val lines = hp.toList.flatMap(p => IO.readLines(p)).toIndexedSeq
histFun(CHistory(lines, hp)) match {
case Some(commands) =>
@ -406,8 +406,10 @@ object BasicCommands {
}
def oldshell: Command = Command.command(OldShell, Help.more(Shell, OldShellDetailed)) { s =>
val history = (s get historyPath) getOrElse (new File(s.baseDir, ".history")).some
val prompt = (s get shellPrompt) match { case Some(pf) => pf(s); case None => "> " }
val history = s.get(historyPath).getOrElse(new File(s.baseDir, ".history").some)
val prompt = s.get(shellPrompt) match
case Some(pf) => pf(s)
case None => "> "
val reader = new FullReader(history, s.combinedParser, LineReader.HandleCONT, Terminal.console)
val line = reader.readLine(prompt)
line match {
@ -427,7 +429,7 @@ object BasicCommands {
Command(Client, Help.more(Client, ClientDetailed))(_ => clientParser)(runClient)
def clientParser: Parser[Seq[String]] =
(token(Space) ~> repsep(StringBasic, token(Space))) | (token(EOF) map (_ => nilSeq))
(token(Space) ~> repsep(StringBasic, token(Space))) | (token(EOF).map(_ => nilSeq))
def runClient(s0: State, inputArg: Seq[String]): State = {
val arguments = inputArg.toList ++
@ -485,7 +487,7 @@ object BasicCommands {
def alias: Command =
Command(AliasCommand, Help.more(AliasCommand, AliasDetailed)) { s =>
val name = token(OpOrID.examples(aliasNames(s): _*))
val name = token(OpOrID.examples(aliasNames(s)*))
val assign = token(OptSpace ~ '=' ~ OptSpace)
val sfree = removeAliases(s)
val to = matched(sfree.combinedParser, partial = true).failOnException | any.+.string
@ -500,13 +502,12 @@ object BasicCommands {
case _ => printAliases(s); s
}
def addAlias(s: State, name: String, value: String): State =
if (Command validID name) {
if Command.validID(name) then
val removed = removeAlias(s, name)
if (value.isEmpty) removed else addAlias0(removed, name, value)
} else {
if value.isEmpty then removed else addAlias0(removed, name, value)
else
System.err.println("Invalid alias name '" + name + "'.")
s.fail
}
private def addAlias0(s: State, name: String, value: String): State =
s.copy(definedCommands = newAlias(name, value) +: s.definedCommands)
@ -515,18 +516,18 @@ object BasicCommands {
def removeAlias(s: State, name: String): State =
s.copy(definedCommands = s.definedCommands.filter(c => !isAliasNamed(name, c)))
def removeTagged(s: State, tag: AttributeKey[_]): State =
def removeTagged(s: State, tag: AttributeKey[?]): State =
s.copy(definedCommands = removeTagged(s.definedCommands, tag))
def removeTagged(as: Seq[Command], tag: AttributeKey[_]): Seq[Command] =
as.filter(c => !(c.tags contains tag))
def removeTagged(as: Seq[Command], tag: AttributeKey[?]): Seq[Command] =
as.filter(c => !(c.tags.contains(tag)))
def isAliasNamed(name: String, c: Command): Boolean = isNamed(name, getAlias(c))
def isNamed(name: String, alias: Option[(String, String)]): Boolean =
alias match { case None => false; case Some((n, _)) => name == n }
def getAlias(c: Command): Option[(String, String)] = c.tags get CommandAliasKey
def getAlias(c: Command): Option[(String, String)] = c.tags.get(CommandAliasKey)
def printAlias(s: State, name: String): Unit = printAliases(aliases(s, (n, _) => n == name))
def printAliases(s: State): Unit = printAliases(allAliases(s))

View File

@ -41,7 +41,7 @@ private[sbt] final class SimpleCommand(
val tags: AttributeMap
) extends Command {
assert(Command validID name, s"'$name' is not a valid command name.")
assert(Command.validID(name), s"'$name' is not a valid command name.")
def help = const(help0)
@ -104,7 +104,7 @@ object Command {
/** Construct a single-argument command with the given name and effect. */
def single(name: String, help: Help = Help.empty)(f: (State, String) => State): Command =
make(name, help)(state => token(trimmed(spacedAny(name)) map apply1(f, state)))
make(name, help)(state => token(trimmed(spacedAny(name)).map(apply1(f, state))))
def single(name: String, briefHelp: (String, String), detail: String)(
f: (State, String) => State
@ -117,7 +117,7 @@ object Command {
def args(name: String, display: String, help: Help = Help.empty)(
f: (State, Seq[String]) => State
): Command =
make(name, help)(state => spaceDelimited(display) map apply1(f, state))
make(name, help)(state => spaceDelimited(display).map(apply1(f, state)))
def args(name: String, briefHelp: (String, String), detail: String, display: String)(
f: (State, Seq[String]) => State
@ -143,7 +143,7 @@ object Command {
def validID(name: String): Boolean = DefaultParsers.matches(OpOrID, name)
def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = p map (t => () => f(t))
def applyEffect[T](p: Parser[T])(f: T => State): Parser[() => State] = p.map(t => () => f(t))
def applyEffect[T](
parser: State => Parser[T]
@ -176,13 +176,11 @@ object Command {
commandMap: Map[String, State => Parser[() => State]]
): State => Parser[() => State] =
state =>
token(OpOrID examples commandMap.keys.toSet) flatMap (
id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
}
)
token(OpOrID.examples(commandMap.keys.toSet)).flatMap: id =>
(commandMap get id) match {
case None => failure(invalidValue("command", commandMap.keys)(id))
case Some(c) => c(state)
}
// overload instead of default parameter to keep binary compatibility
@deprecated("Use overload that takes the onParseError callback", since = "1.9.4")
@ -250,7 +248,7 @@ private final class Help0(
def ++(h: Help): Help =
new Help0(
Help0.this.brief ++ h.brief,
Map(Help0.this.detail.toSeq ++ h.detail.toSeq: _*),
Map(Help0.this.detail.toSeq ++ h.detail.toSeq*),
more ++ h.more
)
}

View File

@ -46,7 +46,7 @@ object CommandUtil {
def fill(s: String, size: Int): String = s + " " * math.max(size - s.length, 0)
def withAttribute[T](s: State, key: AttributeKey[T], ifMissing: String)(f: T => State): State =
s get key match {
s.get(key) match {
case None =>
s.log.error(ifMissing); s.fail
case Some(nav) => f(nav)
@ -54,7 +54,7 @@ object CommandUtil {
def singleArgument(exampleStrings: Set[String]): Parser[String] = {
val arg = (NotSpaceClass ~ any.*) map { case (ns, s) => (ns +: s).mkString }
token(Space) ~> token(arg examples exampleStrings)
token(Space) ~> token(arg.examples(exampleStrings))
}
def detail(selected: String, detailMap: Map[String, String]): String =

View File

@ -179,13 +179,13 @@ trait StateOps extends Any {
def put[T](key: AttributeKey[T], value: T): State
/** Removes the `key` and any associated value from the custom attributes map. */
def remove(key: AttributeKey[_]): State
def remove(key: AttributeKey[?]): State
/** Sets the value associated with `key` in the custom attributes map by transforming the current value. */
def update[T](key: AttributeKey[T])(f: Option[T] => T): State
/** Returns true if `key` exists in the custom attributes map, false if it does not exist. */
def has(key: AttributeKey[_]): Boolean
def has(key: AttributeKey[?]): Boolean
/** The application base directory, which is not necessarily the current working directory. */
def baseDir: File
@ -359,11 +359,11 @@ object State {
def clearGlobalLog = setNext(ClearGlobalLog)
def keepLastLog = setNext(KeepLastLog)
def exit(ok: Boolean) = runExitHooks().setNext(new Return(Exit(if (ok) 0 else 1)))
def get[T](key: AttributeKey[T]) = s.attributes get key
def get[T](key: AttributeKey[T]) = s.attributes.get(key)
def put[T](key: AttributeKey[T], value: T) = s.copy(attributes = s.attributes.put(key, value))
def update[T](key: AttributeKey[T])(f: Option[T] => T): State = put(key, f(get(key)))
def has(key: AttributeKey[_]) = s.attributes contains key
def remove(key: AttributeKey[_]) = s.copy(attributes = s.attributes remove key)
infix def has(key: AttributeKey[?]) = s.attributes.contains(key)
def remove(key: AttributeKey[?]) = s.copy(attributes = s.attributes.remove(key))
def log = s.globalLogging.full
def handleError(t: Throwable): State = handleException(t, s, log)
def fail = {
@ -397,13 +397,19 @@ object State {
def setInteractive(i: Boolean) = s.put(BasicKeys.interactive, i)
def classLoaderCache: IncClassLoaderCache =
s get BasicKeys.classLoaderCache getOrElse (throw new IllegalStateException(
"Tried to get classloader cache for uninitialized state."
))
s.get(BasicKeys.classLoaderCache)
.getOrElse(
throw new IllegalStateException(
"Tried to get classloader cache for uninitialized state."
)
)
private[sbt] def extendedClassLoaderCache: ClassLoaderCache =
s get BasicKeys.extendedClassLoaderCache getOrElse (throw new IllegalStateException(
"Tried to get extended classloader cache for uninitialized state."
))
s.get(BasicKeys.extendedClassLoaderCache)
.getOrElse(
throw new IllegalStateException(
"Tried to get extended classloader cache for uninitialized state."
)
)
def initializeClassLoaderCache: State = {
s.get(BasicKeys.extendedClassLoaderCache).foreach(_.close())
val cache = newClassLoaderCache
@ -454,7 +460,7 @@ object State {
private[sbt] def logFullException(e: Throwable, log: Logger): Unit = {
e.printStackTrace(System.err)
log.trace(e)
log.error(ErrorHandling reducedToString e)
log.error(ErrorHandling.reducedToString(e))
log.error("Use 'last' for the full log.")
}
private[sbt] def getBoolean(s: State, key: AttributeKey[Boolean], default: Boolean): Boolean =

View File

@ -24,7 +24,7 @@ private[sbt] object LegacyWatched {
@tailrec def shouldTerminate: Boolean =
(System.in.available > 0) && (watched.terminateWatch(System.in.read()) || shouldTerminate)
val log = s.log
s get ContinuousEventMonitor match {
s.get(ContinuousEventMonitor) match {
case None =>
val watchState = WatchState.empty(watched.watchService(), watched.watchSources(s))
// This is the first iteration, so run the task and create a new EventMonitor
@ -45,7 +45,7 @@ private[sbt] object LegacyWatched {
(ClearOnFailure :: next :: FailureWall :: repeat :: s)
.put(ContinuousEventMonitor, monitor: EventMonitor)
case Some(eventMonitor) =>
Watched.printIfDefined(watched watchingMessage eventMonitor.state())
Watched.printIfDefined(watched.watchingMessage(eventMonitor.state()))
@tailrec def impl(): State = {
val triggered =
try eventMonitor.awaitEvent()
@ -58,7 +58,7 @@ private[sbt] object LegacyWatched {
false
}
if (triggered) {
Watched.printIfDefined(watched triggeredMessage eventMonitor.state())
Watched.printIfDefined(watched.triggeredMessage(eventMonitor.state()))
ClearOnFailure :: next :: FailureWall :: repeat :: s
} else if (shouldTerminate) {
while (System.in.available() > 0) System.in.read()

View File

@ -361,7 +361,7 @@ class NetworkClient(
else Nil
val processBuilder =
new ProcessBuilder((nohup ++ cmd): _*)
new ProcessBuilder((nohup ++ cmd)*)
.directory(arguments.baseDirectory)
.redirectInput(Redirect.PIPE)
processBuilder.environment.put(Terminal.TERMINAL_PROPS, props)

View File

@ -193,7 +193,7 @@ private[sbt] object Server {
def acl(owner: UserPrincipal) = {
val builder = AclEntry.newBuilder
builder.setPrincipal(owner)
builder.setPermissions(AclEntryPermission.values(): _*)
builder.setPermissions(AclEntryPermission.values()*)
builder.setType(AclEntryType.ALLOW)
builder.build
}

View File

@ -55,7 +55,7 @@ private[sbt] object UITask {
object Reader {
// Avoid filling the stack trace since it isn't helpful here
object interrupted extends InterruptedException
def terminalReader(parser: Parser[_])(
def terminalReader(parser: Parser[?])(
terminal: Terminal,
state: State
): Reader = new Reader {

View File

@ -57,43 +57,43 @@ end BuildSyntax
object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
type Classpath = Seq[Attributed[HashedVirtualFileRef]]
def settings(ss: SettingsDefinition*): Seq[Setting[_]] = ss.flatMap(_.settings)
def settings(ss: SettingsDefinition*): Seq[Setting[?]] = ss.flatMap(_.settings)
val onComplete = SettingKey[() => Unit](
"onComplete",
"Hook to run when task evaluation completes. The type of this setting is subject to change, pending the resolution of SI-2915."
) // .withRank(DSetting)
val triggeredBy = AttributeKey[Seq[Task[_]]]("triggered-by")
val runBefore = AttributeKey[Seq[Task[_]]]("run-before")
val resolvedScoped = SettingKey[ScopedKey[_]](
val triggeredBy = AttributeKey[Seq[Task[?]]]("triggered-by")
val runBefore = AttributeKey[Seq[Task[?]]]("run-before")
val resolvedScoped = SettingKey[ScopedKey[?]](
"resolved-scoped",
"The ScopedKey for the referencing setting or task.",
KeyRanks.DSetting
)
private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[_]](
private[sbt] val taskDefinitionKey = AttributeKey[ScopedKey[?]](
"task-definition-key",
"Internal: used to map a task back to its ScopedKey.",
Invisible
)
lazy val showFullKey: Show[ScopedKey[_]] = showFullKey(None)
lazy val showFullKey: Show[ScopedKey[?]] = showFullKey(None)
def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[_]] =
Show[ScopedKey[_]]((key: ScopedKey[_]) => displayFull(key, keyNameColor))
def showFullKey(keyNameColor: Option[String]): Show[ScopedKey[?]] =
Show[ScopedKey[?]]((key: ScopedKey[?]) => displayFull(key, keyNameColor))
@deprecated("Use showRelativeKey2 which doesn't take the unused multi param", "1.1.1")
def showRelativeKey(
current: ProjectRef,
multi: Boolean,
keyNameColor: Option[String] = None
): Show[ScopedKey[_]] =
): Show[ScopedKey[?]] =
showRelativeKey2(current, keyNameColor)
def showRelativeKey2(
current: ProjectRef,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](key => {
): Show[ScopedKey[?]] =
Show[ScopedKey[?]](key => {
val color: String => String = withColor(_, keyNameColor)
key.scope.extra.toOption
.flatMap(_.get(Scope.customShowString).map(color))
@ -104,7 +104,7 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
private[sbt] def showShortKey(
keyNameColor: Option[String],
): Show[ScopedKey[_]] = {
): Show[ScopedKey[?]] = {
def displayShort(
project: Reference
): String = {
@ -115,7 +115,7 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
case _ => Reference.display(project) + trailing
}
}
Show[ScopedKey[_]](key =>
Show[ScopedKey[?]](key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
@ -129,14 +129,14 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
currentBuild: URI,
multi: Boolean,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
): Show[ScopedKey[?]] =
showBuildRelativeKey2(currentBuild, keyNameColor)
def showBuildRelativeKey2(
currentBuild: URI,
keyNameColor: Option[String] = None,
): Show[ScopedKey[_]] =
Show[ScopedKey[_]](key =>
): Show[ScopedKey[?]] =
Show[ScopedKey[?]](key =>
Scope.display(
key.scope,
withColor(key.key.label, keyNameColor),
@ -193,15 +193,15 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
case _ => Reference.display(project) + " /"
}
def displayFull(scoped: ScopedKey[_]): String = displayFull(scoped, None)
def displayFull(scoped: ScopedKey[?]): String = displayFull(scoped, None)
def displayFull(scoped: ScopedKey[_], keyNameColor: Option[String]): String =
def displayFull(scoped: ScopedKey[?], keyNameColor: Option[String]): String =
Scope.display(scoped.scope, withColor(scoped.key.label, keyNameColor))
def displayMasked(scoped: ScopedKey[_], mask: ScopeMask): String =
def displayMasked(scoped: ScopedKey[?], mask: ScopeMask): String =
Scope.displayMasked(scoped.scope, scoped.key.label, mask)
def displayMasked(scoped: ScopedKey[_], mask: ScopeMask, showZeroConfig: Boolean): String =
def displayMasked(scoped: ScopedKey[?], mask: ScopeMask, showZeroConfig: Boolean): String =
Scope.displayMasked(scoped.scope, scoped.key.label, mask, showZeroConfig)
def withColor(s: String, color: Option[String]): String =
@ -229,9 +229,9 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
else if (s1 == GlobalScope) Some(s2) // s2 is more specific
else super.intersect(s1, s2)
private def definedSettingString(s: Setting[_]): String =
private def definedSettingString(s: Setting[?]): String =
s"derived setting ${s.key.key.label}${positionString(s)}"
private def positionString(s: Setting[_]): String =
private def positionString(s: Setting[?]): String =
s.positionString match { case None => ""; case Some(pos) => s" defined at $pos" }
/**
@ -417,7 +417,7 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
inline def toTask(arg: String): Initialize[Task[A1]] =
import TaskExtra.singleInputTask
FullInstance.flatten(
(Def.stateKey zipWith in)((sTask, it) =>
Def.stateKey.zipWith(in)((sTask, it) =>
sTask map { s =>
Parser.parse(arg, it.parser(s)) match
case Right(a) => Def.value[Task[A1]](a)
@ -460,7 +460,7 @@ object Def extends BuildSyntax with Init[Scope] with InitializeImplicits:
base.copy(info = base.info.set(isDummyTask, true))
}
private[sbt] def isDummy(t: Task[_]): Boolean =
private[sbt] def isDummy(t: Task[?]): Boolean =
t.info.attributes.get(isDummyTask) getOrElse false
end Def

View File

@ -18,7 +18,7 @@ import sbt.util.Applicative
/** Parses input and produces a task to run. Constructed using the companion object. */
final class InputTask[A1] private (val parser: State => Parser[Task[A1]]):
def mapTask[S](f: Task[A1] => Task[S]): InputTask[S] =
InputTask[S](s => parser(s) map f)
InputTask[S](s => parser(s).map(f))
def partialInput(in: String): InputTask[A1] =
InputTask[A1](s => Parser(parser(s))(in))
@ -75,7 +75,7 @@ object InputTask:
def free[A1](p: State => Parser[Task[A1]]): InputTask[A1] = make(p)
def free[A1, A2](p: State => Parser[A1])(c: A1 => Task[A2]): InputTask[A2] =
free(s => p(s) map c)
free(s => p(s).map(c))
def separate[A1, A2](
p: State => Parser[A1]

View File

@ -95,17 +95,17 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions {
def projectConfigurations: Seq[Configuration] = Nil
/** The `Setting`s to add in the scope of each project that activates this AutoPlugin. */
def projectSettings: Seq[Setting[_]] = Nil
def projectSettings: Seq[Setting[?]] = Nil
/**
* The `Setting` to add to the build scope for each project that activates this AutoPlugin.
* The settings returned here are guaranteed to be added to a given build scope only once
* regardless of how many projects for that build activate this AutoPlugin.
*/
def buildSettings: Seq[Setting[_]] = Nil
def buildSettings: Seq[Setting[?]] = Nil
/** The `Setting`s to add to the global scope exactly once if any project activates this AutoPlugin. */
def globalSettings: Seq[Setting[_]] = Nil
def globalSettings: Seq[Setting[?]] = Nil
// TODO?: def commands: Seq[Command]
@ -113,7 +113,7 @@ abstract class AutoPlugin extends Plugins.Basic with PluginsFunctions {
def extraProjects: Seq[Project] = Nil
/** The [[Project]]s to add to the current build based on an existing project. */
def derivedProjects(@deprecated("unused", "") proj: ProjectDefinition[_]): Seq[Project] = Nil
def derivedProjects(@deprecated("unused", "") proj: ProjectDefinition[?]): Seq[Project] = Nil
private[sbt] def unary_! : Exclude = Exclude(this)
@ -164,7 +164,7 @@ sealed trait PluginsFunctions {
object Plugins extends PluginsFunctions {
private[sbt] var defaultRequires: Plugins = _
private[sbt] var defaultRequires: Plugins = scala.compiletime.uninitialized
/**
* Given the available auto plugins `defined`, returns a function that selects [[AutoPlugin]]s for the provided [[AutoPlugin]]s.
@ -280,7 +280,7 @@ object Plugins extends PluginsFunctions {
private def duplicateProvidesError(byAtom: Seq[(Atom, AutoPlugin)]): Unit = {
val dupsByAtom = Map(byAtom.groupBy(_._1).toSeq.map { case (k, v) =>
k -> v.map(_._2)
}: _*)
}*)
val dupStrings =
for ((atom, dups) <- dupsByAtom if dups.size > 1)
yield s"${atom.label} by ${dups.mkString(", ")}"
@ -437,7 +437,7 @@ ${listConflicts(conflicting)}""")
import java.lang.reflect.Field
import scala.util.control.Exception.catching
// Make sure that we don't detect user-defined methods called autoImport
def existsAutoImportVal(clazz: Class[_]): Option[Field] = {
def existsAutoImportVal(clazz: Class[?]): Option[Field] = {
catching(classOf[NoSuchFieldException])
.opt(clazz.getDeclaredField(autoImport))
.orElse(Option(clazz.getSuperclass).flatMap(existsAutoImportVal))

View File

@ -43,7 +43,7 @@ object Previous {
import sjsonnew.BasicJsonProtocol.StringJsonFormat
private[sbt] type ScopedTaskKey[T] = ScopedKey[Task[T]]
private type AnyTaskKey = ScopedTaskKey[Any]
private type Streams = sbt.std.Streams[ScopedKey[_]]
private type Streams = sbt.std.Streams[ScopedKey[?]]
/** The stream where the task value is persisted. */
private final val StreamName = "previous"
@ -61,7 +61,7 @@ object Previous {
def setTask(newTask: ScopedKey[Task[T]]) = new Referenced(newTask, format)
private[sbt] def read(streams: Streams): Option[T] =
try Option(streams(key.cacheKey).cacheStoreFactory.make(StreamName).read[T]()(stamped))
try Option(streams(key.cacheKey).cacheStoreFactory.make(StreamName).read[T]()(using stamped))
catch { case NonFatal(_) => None }
}
@ -142,7 +142,7 @@ object Previous {
ref <- map.get(key.asInstanceOf[Key[Any]])
} {
val out = streams(key.cacheKey).cacheStoreFactory.make(StreamName)
try out.write(v)(ref.stamped)
try out.write(v)(using ref.stamped)
catch { case NonFatal(_) => }
}
}
@ -154,8 +154,9 @@ object Previous {
/** Public as a macro implementation detail. Do not call directly. */
def runtime[T](skey: TaskKey[T])(implicit format: JsonFormat[T]): Initialize[Task[Option[T]]] = {
val inputs =
(Global / cache) zip Def.validated(skey, selfRefOk = true) zip (Global / references)
val inputs = (Global / cache)
.zip(Def.validated(skey, selfRefOk = true))
.zip(Global / references)
inputs { case ((prevTask, resolved), refs) =>
val key = Key(resolved, resolved)
refs.recordReference(key, format) // always evaluated on project load

View File

@ -38,7 +38,7 @@ sealed trait ProjectDefinition[PR <: ProjectReference] {
* The explicitly defined sequence of settings that configure this project.
* These do not include the automatically appended settings as configured by `auto`.
*/
def settings: Seq[Setting[_]]
def settings: Seq[Setting[?]]
/**
* The references to projects that are aggregated by this project.
@ -65,7 +65,7 @@ sealed trait ProjectDefinition[PR <: ProjectReference] {
/** The [[AutoPlugin]]s enabled for this project. This value is only available on a loaded Project. */
private[sbt] def autoPlugins: Seq[AutoPlugin]
private[sbt] def commonSettings: Seq[Setting[_]]
private[sbt] def commonSettings: Seq[Setting[?]]
override final def hashCode: Int = id.hashCode ^ base.hashCode ^ getClass.hashCode
@ -142,7 +142,7 @@ sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeP
/** Appends settings to the current settings sequence for this project. */
def settings(ss: Def.SettingsDefinition*): Project =
copy(settings = (settings: Seq[Def.Setting[_]]) ++ Def.settings(ss: _*))
copy(settings = (settings: Seq[Def.Setting[?]]) ++ Def.settings(ss*))
/**
* Sets the [[AutoPlugin]]s of this project.
@ -163,7 +163,7 @@ sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeP
/** Definitively set the [[ProjectOrigin]] for this project. */
private[sbt] def setProjectOrigin(origin: ProjectOrigin): Project = copy(projectOrigin = origin)
private[sbt] def setCommonSettings(settings: Seq[Setting[_]]): Project =
private[sbt] def setCommonSettings(settings: Seq[Setting[?]]): Project =
copy(commonSettings = settings)
/**
@ -177,15 +177,15 @@ sealed trait Project extends ProjectDefinition[ProjectReference] with CompositeP
def withId(id: String): Project = copy(id = id)
/** Sets the base directory for this project. */
def in(dir: File): Project = copy(base = dir)
infix def in(dir: File): Project = copy(base = dir)
private[sbt] def copy(
id: String = id,
base: File = base,
aggregate: Seq[ProjectReference] = aggregate,
dependencies: Seq[ClasspathDep[ProjectReference]] = dependencies,
settings: Seq[Setting[_]] = settings,
commonSettings: Seq[Setting[_]] = commonSettings,
settings: Seq[Setting[?]] = settings,
commonSettings: Seq[Setting[?]] = commonSettings,
configurations: Seq[Configuration] = configurations,
plugins: Plugins = plugins,
autoPlugins: Seq[AutoPlugin] = autoPlugins,
@ -255,8 +255,8 @@ object Project:
val base: File,
val aggregate: Seq[PR],
val dependencies: Seq[ClasspathDep[PR]],
val settings: Seq[Def.Setting[_]],
val commonSettings: Seq[Def.Setting[_]],
val settings: Seq[Def.Setting[?]],
val commonSettings: Seq[Def.Setting[?]],
val configurations: Seq[Configuration],
val plugins: Plugins,
val autoPlugins: Seq[AutoPlugin],
@ -273,8 +273,8 @@ object Project:
base: File,
aggregate: Seq[ProjectReference],
dependencies: Seq[ClasspathDep[ProjectReference]],
settings: Seq[Def.Setting[_]],
commonSettings: Seq[Def.Setting[_]],
settings: Seq[Def.Setting[?]],
commonSettings: Seq[Def.Setting[?]],
configurations: Seq[Configuration],
plugins: Plugins,
autoPlugins: Seq[AutoPlugin],
@ -301,8 +301,8 @@ object Project:
base: File,
aggregate: Seq[ProjectRef],
dependencies: Seq[ClasspathDep[ProjectRef]],
settings: Seq[Def.Setting[_]],
commonSettings: Seq[Def.Setting[_]],
settings: Seq[Def.Setting[?]],
commonSettings: Seq[Def.Setting[?]],
configurations: Seq[Configuration],
plugins: Plugins,
autoPlugins: Seq[AutoPlugin],
@ -328,23 +328,23 @@ object Project:
private def validProjectIDStart(id: String): Boolean =
DefaultParsers.parse(id, DefaultParsers.IDStart).isRight
def fillTaskAxis(scoped: ScopedKey[_]): ScopedKey[_] =
def fillTaskAxis(scoped: ScopedKey[?]): ScopedKey[?] =
ScopedKey(Scope.fillTaskAxis(scoped.scope, scoped.key), scoped.key)
def mapScope(f: Scope => Scope): [a] => ScopedKey[a] => ScopedKey[a] =
[a] => (k: ScopedKey[a]) => ScopedKey(f(k.scope), k.key)
def transform(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] =
def transform(g: Scope => Scope, ss: Seq[Def.Setting[?]]): Seq[Def.Setting[?]] =
val f = mapScope(g)
ss.map { setting =>
setting.mapKey(f).mapReferenced(f)
}
def transformRef(g: Scope => Scope, ss: Seq[Def.Setting[_]]): Seq[Def.Setting[_]] =
def transformRef(g: Scope => Scope, ss: Seq[Def.Setting[?]]): Seq[Def.Setting[?]] =
val f = mapScope(g)
ss.map(_ mapReferenced f)
ss.map(_.mapReferenced(f))
def inThisBuild(ss: Seq[Setting[_]]): Seq[Setting[_]] =
def inThisBuild(ss: Seq[Setting[?]]): Seq[Setting[?]] =
inScope(ThisScope.copy(project = Select(ThisBuild)))(ss)
private[sbt] def inThisBuild[T](i: Initialize[T]): Initialize[T] =
@ -353,13 +353,13 @@ object Project:
private[sbt] def inConfig[T](conf: Configuration, i: Initialize[T]): Initialize[T] =
inScope(ThisScope.copy(config = Select(conf)), i)
def inTask(t: Scoped)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
def inTask(t: Scoped)(ss: Seq[Setting[?]]): Seq[Setting[?]] =
inScope(ThisScope.copy(task = Select(t.key)))(ss)
private[sbt] def inTask[A](t: Scoped, i: Initialize[A]): Initialize[A] =
inScope(ThisScope.copy(task = Select(t.key)), i)
def inScope(scope: Scope)(ss: Seq[Setting[_]]): Seq[Setting[_]] =
def inScope(scope: Scope)(ss: Seq[Setting[?]]): Seq[Setting[?]] =
Project.transform(Scope.replaceThis(scope), ss)
private[sbt] def inScope[A](scope: Scope, i: Initialize[A]): Initialize[A] =

View File

@ -35,7 +35,7 @@ sealed trait Reference:
final def /[K](key: Scoped.ScopingSetting[K]): K = key.rescope(asScope)
final def /(key: AttributeKey[_]): Scope = asScope.rescope(key)
final def /(key: AttributeKey[?]): Scope = asScope.rescope(key)
end Reference
/** A fully resolved, unique identifier for a project or build. */
@ -85,11 +85,11 @@ object Reference {
case (_: BuildRef, _: ProjectRef) => -1
case (_: ProjectRef, _: BuildRef) => 1
}
implicit val buildRefOrdering: Ordering[BuildRef] = (a, b) => a.build compareTo b.build
implicit val buildRefOrdering: Ordering[BuildRef] = (a, b) => a.build.compareTo(b.build)
implicit val projectRefOrdering: Ordering[ProjectRef] = (a, b) => {
val bc = a.build compareTo b.build
if (bc == 0) a.project compareTo b.project else bc
val bc = a.build.compareTo(b.build)
if bc == 0 then a.project.compareTo(b.project) else bc
}
def display(ref: Reference): String =

View File

@ -18,7 +18,7 @@ import sbt.io.IO
final case class Scope(
project: ScopeAxis[Reference],
config: ScopeAxis[ConfigKey],
task: ScopeAxis[AttributeKey[_]],
task: ScopeAxis[AttributeKey[?]],
extra: ScopeAxis[AttributeMap]
):
def rescope(project: Reference): Scope = copy(project = Select(project))
@ -66,7 +66,7 @@ object Scope:
case s => s
}
def fillTaskAxis(scope: Scope, key: AttributeKey[_]): Scope =
def fillTaskAxis(scope: Scope, key: AttributeKey[?]): Scope =
scope.task match {
case _: Select[_] => scope
case _ => scope.copy(task = Select(key))
@ -103,7 +103,7 @@ object Scope:
if (!uri.isAbsolute && current.isOpaque && uri.getSchemeSpecificPart == ".")
current // this handles the shortcut of referring to the current build using "."
else
IO.directoryURI(current resolve uri)
IO.directoryURI(current.resolve(uri))
def resolveReference(
current: URI,
@ -264,7 +264,7 @@ object Scope:
rootProject: URI => String,
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey],
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
taskInherit: AttributeKey[?] => Seq[AttributeKey[?]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
): Scope => Seq[Scope] =
delegates(
@ -285,7 +285,7 @@ object Scope:
rootProject: URI => String,
projectInherit: ProjectRef => Seq[ProjectRef],
configInherit: (ResolvedReference, ConfigKey) => Seq[ConfigKey],
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
taskInherit: AttributeKey[?] => Seq[AttributeKey[?]],
): Scope => Seq[Scope] = {
val index = delegates(refs, configurations, projectInherit, configInherit)
scope => indexedDelegates(resolve, index, rootProject, taskInherit)(scope)
@ -296,7 +296,7 @@ object Scope:
resolve: Reference => ResolvedReference,
index: DelegateIndex,
rootProject: URI => String,
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
taskInherit: AttributeKey[?] => Seq[AttributeKey[?]],
extraInherit: (ResolvedReference, AttributeMap) => Seq[AttributeMap]
)(rawScope: Scope): Seq[Scope] =
indexedDelegates(resolve, index, rootProject, taskInherit)(rawScope)
@ -305,7 +305,7 @@ object Scope:
resolve: Reference => ResolvedReference,
index: DelegateIndex,
rootProject: URI => String,
taskInherit: AttributeKey[_] => Seq[AttributeKey[_]],
taskInherit: AttributeKey[?] => Seq[AttributeKey[?]],
)(rawScope: Scope): Seq[Scope] = {
val scope = Scope.replaceThis(GlobalScope)(rawScope)

View File

@ -74,7 +74,7 @@ object SlashSyntax:
}
sealed trait HasSlashKeyOrAttrKey extends HasSlashKey {
def /(key: AttributeKey[_]): Scope = scope.rescope(key)
def /(key: AttributeKey[?]): Scope = scope.rescope(key)
}
/** RichReference wraps a reference to provide the `/` operator for scoping. */
@ -91,7 +91,7 @@ object SlashSyntax:
/** RichConfiguration wraps a configuration to provide the `/` operator for scoping. */
final class RichConfiguration(protected val scope: Scope) extends HasSlashKeyOrAttrKey {
// This is for handling `Zero / Zero / Zero / name`.
def /(taskAxis: ScopeAxis[AttributeKey[_]]): Scope =
def /(taskAxis: ScopeAxis[AttributeKey[?]]): Scope =
scope.copy(task = taskAxis)
}

View File

@ -23,7 +23,7 @@ import scala.reflect.ClassTag
/** An abstraction on top of Settings for build configuration and task definition. */
sealed trait Scoped extends Equals:
def scope: Scope
val key: AttributeKey[_]
val key: AttributeKey[?]
override def equals(that: Any): Boolean =
(this eq that.asInstanceOf[AnyRef]) || (that match {
@ -68,7 +68,7 @@ sealed abstract class SettingKey[A1]
override def toString: String = s"SettingKey($scope / $key)"
final def toTask: Initialize[Task[A1]] = this apply inlineTask
final def toTask: Initialize[Task[A1]] = this.apply(inlineTask)
final def scopedKey: ScopedKey[A1] = ScopedKey(scope, key)
@ -130,7 +130,7 @@ sealed abstract class SettingKey[A1]
final def withRank(rank: Int): SettingKey[A1] =
SettingKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[SettingKey[_]]
def canEqual(that: Any): Boolean = that.isInstanceOf[SettingKey[?]]
end SettingKey
/**
@ -214,7 +214,7 @@ sealed abstract class TaskKey[A1]
final def withRank(rank: Int): TaskKey[A1] =
TaskKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[TaskKey[_]]
def canEqual(that: Any): Boolean = that.isInstanceOf[TaskKey[?]]
end TaskKey
/**
@ -251,12 +251,12 @@ sealed trait InputKey[A1]
final inline def ~=(f: A1 => A1): Setting[InputTask[A1]] = transform(f)
final inline def transform(f: A1 => A1): Setting[InputTask[A1]] =
set(scopedKey(_ mapTask { _ map f }))
set(scopedKey(_ mapTask { _.map(f) }))
final def withRank(rank: Int): InputKey[A1] =
InputKey(AttributeKey.copyWithRank(key, rank))
def canEqual(that: Any): Boolean = that.isInstanceOf[InputKey[_]]
def canEqual(that: Any): Boolean = that.isInstanceOf[InputKey[?]]
end InputKey
/** Methods and types related to constructing settings, including keys, scopes, and initializations. */
@ -361,8 +361,8 @@ object Scoped:
def doFinally(t: Task[Unit]): Initialize[Task[A1]] = onTask(_.doFinally(t))
def ||[T >: A1](alt: Task[T]): Initialize[Task[T]] = onTask(_ || alt)
def &&[T](alt: Task[T]): Initialize[Task[T]] = onTask(_ && alt)
def tag(tags: Tag*): Initialize[Task[A1]] = onTask(_.tag(tags: _*))
def tagw(tags: (Tag, Int)*): Initialize[Task[A1]] = onTask(_.tagw(tags: _*))
def tag(tags: Tag*): Initialize[Task[A1]] = onTask(_.tag(tags*))
def tagw(tags: (Tag, Int)*): Initialize[Task[A1]] = onTask(_.tagw(tags*))
// Task-specific extensions
def dependsOn(tasks: Initialize[? <: Task[?]]*): Initialize[Task[A1]] =
@ -372,7 +372,7 @@ object Scoped:
def dependsOnSeq(tasks: Seq[AnyInitTask]): Initialize[Task[A1]] =
init.zipWith(
Initialize.joinAny[Task](coerceToAnyTaskSeq(tasks))
)((thisTask, deps) => thisTask.dependsOn(deps: _*))
)((thisTask, deps) => thisTask.dependsOn(deps*))
def failure: Initialize[Task[Incomplete]] = init(_.failure)
def result: Initialize[Task[Result[A1]]] = init(_.result)
def triggeredBy[A2](tasks: Initialize[Task[A2]]*): Initialize[Task[A1]] =
@ -381,7 +381,7 @@ object Scoped:
nonLocal(tasks.toSeq.asInstanceOf[Seq[AnyInitTask]], Def.runBefore)
private def nonLocal(
tasks: Seq[AnyInitTask],
key: AttributeKey[Seq[Task[_]]]
key: AttributeKey[Seq[Task[?]]]
): Initialize[Task[A1]] =
Initialize
.joinAny[Task](coerceToAnyTaskSeq(tasks))
@ -390,26 +390,26 @@ object Scoped:
extension [A1](init: Initialize[InputTask[A1]])
@targetName("onTaskInitializeInputTask")
protected def onTask[T](f: Task[A1] => Task[T]): Initialize[InputTask[T]] =
init(_ mapTask f)
init(_.mapTask(f))
@targetName("flatMapTaskValueInitializeInputTask")
def flatMapTaskValue[T](f: A1 => Task[T]): Initialize[InputTask[T]] =
onTask(_.result flatMap (f compose successM))
onTask(_.result.flatMap(f compose successM))
@targetName("mapInitializeInputTask")
def map[A2](f: A1 => A2): Initialize[InputTask[A2]] =
onTask(_.result map (f compose successM))
onTask(_.result.map(f compose successM))
@targetName("andFinallyInitializeInputTask")
def andFinally(fin: => Unit): Initialize[InputTask[A1]] = onTask(_ andFinally fin)
def andFinally(fin: => Unit): Initialize[InputTask[A1]] = onTask(_.andFinally(fin))
@targetName("doFinallyInitializeInputTask")
def doFinally(t: Task[Unit]): Initialize[InputTask[A1]] = onTask(_ doFinally t)
def doFinally(t: Task[Unit]): Initialize[InputTask[A1]] = onTask(_.doFinally(t))
@targetName("||_InitializeInputTask")
def ||[T >: A1](alt: Task[T]): Initialize[InputTask[T]] = onTask(_ || alt)
@targetName("&&_InitializeInputTask")
def &&[T](alt: Task[T]): Initialize[InputTask[T]] = onTask(_ && alt)
@targetName("tagInitializeInputTask")
def tag(tags: Tag*): Initialize[InputTask[A1]] = onTask(_.tag(tags: _*))
def tag(tags: Tag*): Initialize[InputTask[A1]] = onTask(_.tag(tags*))
@targetName("tagwInitializeInputTask")
def tagw(tags: (Tag, Int)*): Initialize[InputTask[A1]] = onTask(_.tagw(tags: _*))
def tagw(tags: (Tag, Int)*): Initialize[InputTask[A1]] = onTask(_.tagw(tags*))
// InputTask specific extensions
@targetName("dependsOnInitializeInputTask")
@ -423,7 +423,7 @@ object Scoped:
@targetName("dependsOnSeqInitializeInputTask")
def dependsOnSeq(tasks: Seq[AnyInitTask]): Initialize[InputTask[A1]] =
init.zipWith(Initialize.joinAny[Task](coerceToAnyTaskSeq(tasks)))((thisTask, deps) =>
thisTask.mapTask(_.dependsOn(deps: _*))
thisTask.mapTask(_.dependsOn(deps*))
)
end Syntax
@ -456,7 +456,7 @@ object Scoped:
inline def ~=(inline f: A1 => A1): Setting[Task[A1]] = transform(f)
inline def transform(f: A1 => A1): Setting[Task[A1]] = set(scopedKey(_ map f))
inline def transform(f: A1 => A1): Setting[Task[A1]] = set(scopedKey(_.map(f)))
def toSettingKey: SettingKey[Task[A1]] = scopedSetting(scope, key)
@ -494,7 +494,7 @@ object Scoped:
private def coerceToAnyTaskSeq(tasks: Seq[AnyInitTask]): Seq[Def.Initialize[Task[Any]]] =
tasks.asInstanceOf[Seq[Def.Initialize[Task[Any]]]]
type AnyInitTask = Initialize[Task[_]]
type AnyInitTask = Initialize[Task[?]]
implicit def richTaskSeq[T](in: Seq[Initialize[Task[T]]]): RichTaskSeq[T] = new RichTaskSeq(in)
final class RichTaskSeq[T](keys: Seq[Initialize[Task[T]]]) {
@ -507,7 +507,7 @@ object Scoped:
def dependOn: Initialize[Task[Unit]] =
Initialize
.joinAny[Task](coerceToAnyTaskSeq(keys))
.apply(deps => nop.dependsOn(deps: _*))
.apply(deps => nop.dependsOn(deps*))
}
sealed abstract class RichTaskables[Tup <: Tuple](final val keys: Tuple.Map[Tup, Taskable]):
@ -672,7 +672,7 @@ object Scoped:
// format: on
private[sbt] def extendScoped(s1: Scoped, ss: Seq[Scoped]): Seq[AttributeKey[_]] =
private[sbt] def extendScoped(s1: Scoped, ss: Seq[Scoped]): Seq[AttributeKey[?]] =
s1.key +: ss.map(_.key)
end Scoped
@ -744,7 +744,7 @@ object InputKey:
description: String,
extend1: Scoped,
extendN: Scoped*
): InputKey[A1] = apply(label, description, KeyRanks.DefaultInputRank, extend1, extendN: _*)
): InputKey[A1] = apply(label, description, KeyRanks.DefaultInputRank, extend1, extendN*)
def apply[A1: ClassTag](
label: String,

View File

@ -87,7 +87,7 @@ object FullInstance:
type Tup = (Task[Initialize[Task[A1]]], Task[SS], [a] => Initialize[a] => Initialize[a])
Def.app[Tup, Task[A1]]((in, settingsData, Def.capturedTransformations)) {
case (a: Task[Initialize[Task[A1]]], data: Task[SS], f) =>
TaskExtra.multT2Task((a, data)).flatMapN { case (a, d) => f(a) evaluate d }
TaskExtra.multT2Task((a, data)).flatMapN { case (a, d) => f(a).evaluate(d) }
}
def flattenFun[A1, A2](
@ -96,7 +96,7 @@ object FullInstance:
type Tup = (Task[A1 => Initialize[Task[A2]]], Task[SS], [a] => Initialize[a] => Initialize[a])
Def.app[Tup, A1 => Task[A2]]((in, settingsData, Def.capturedTransformations)) {
case (a: Task[A1 => Initialize[Task[A2]]] @unchecked, data: Task[SS] @unchecked, f) =>
(s: A1) => TaskExtra.multT2Task((a, data)).flatMapN { case (af, d) => f(af(s)) evaluate d }
(s: A1) => TaskExtra.multT2Task((a, data)).flatMapN { case (af, d) => f(af(s)).evaluate(d) }
}
end FullInstance

View File

@ -70,9 +70,9 @@ private[sbt] object KeyMacro:
import qctx.reflect._
def enclosingTerm0(sym: Symbol): Symbol =
sym match
case sym if sym.flags is Flags.Macro => enclosingTerm0(sym.owner)
case sym if !sym.isTerm => enclosingTerm0(sym.owner)
case _ => sym
case sym if sym.flags.is(Flags.Macro) => enclosingTerm0(sym.owner)
case sym if !sym.isTerm => enclosingTerm0(sym.owner)
case _ => sym
enclosingTerm0(Symbol.spliceOwner)
private def enclosingClass(using Quotes) =

View File

@ -25,7 +25,7 @@ object AppendSpec {
s
}
Global / onLoad += doSideEffect _
Global / onLoad += (() => doSideEffect())
Global / onLoad += (() => doSideEffect())
Global / onLoad += (() => println("foo"))
}

View File

@ -37,7 +37,7 @@ object SlashSyntaxTest extends sbt.SlashSyntax {
val uTest = "com.lihaoyi" %% "utest" % "0.5.3"
Seq[Setting[_]](
Seq[Setting[?]](
Global / cancelable := true,
ThisBuild / scalaVersion := "2.12.3",
console / scalacOptions += "-deprecation",

View File

@ -129,7 +129,7 @@ class TaskPosSpec {
import sbt._, Def._
val foo = taskKey[String]("")
Def.task[String] {
def inner(s: KeyedInitialize[_]) = println(s)
def inner(s: KeyedInitialize[?]) = println(s)
inner(foo)
""
}

View File

@ -45,7 +45,7 @@ object Assign {
val bk = taskKey[Seq[Int]]("b")
val ck = settingKey[File]("c")
val intTask = taskKey[Int]("int")
val sk = taskKey[Set[_]]("s")
val sk = taskKey[Set[?]]("s")
val bgList = taskKey[Seq[Int]]("")
val ik = inputKey[Int]("i")

View File

@ -27,7 +27,7 @@ abstract class BackgroundJobService extends Closeable {
* then you should get an InterruptedException while blocking on the process, and
* then you could process.destroy() for example.
*/
def runInBackground(spawningTask: ScopedKey[_], state: State)(
def runInBackground(spawningTask: ScopedKey[?], state: State)(
start: (Logger, File) => Unit
): JobHandle
@ -40,7 +40,7 @@ abstract class BackgroundJobService extends Closeable {
* then you should get an InterruptedException while blocking on the process, and
* then you could process.destroy() for example.
*/
private[sbt] def runInBackgroundWithLoader(spawningTask: ScopedKey[_], state: State)(
private[sbt] def runInBackgroundWithLoader(spawningTask: ScopedKey[?], state: State)(
start: (Logger, File) => (Option[ClassLoader], () => Unit)
): JobHandle = runInBackground(spawningTask, state) { (logger, file) =>
start(logger, file)._2.apply()
@ -95,7 +95,7 @@ object BackgroundJobService {
import DefaultParsers._
(state, handles) => {
val stringIdParser: Parser[Seq[String]] = Space ~> token(
NotSpace examples handles.map(_.id.toString).toSet,
NotSpace.examples(handles.map(_.id.toString).toSet),
description = "<job id>"
).+
stringIdParser.map { strings =>
@ -108,7 +108,7 @@ object BackgroundJobService {
abstract class JobHandle {
def id: Long
def humanReadableName: String
def spawningTask: ScopedKey[_]
def spawningTask: ScopedKey[?]
}
/**

View File

@ -76,7 +76,7 @@ object BuildPaths {
def getFileSetting(stateKey: AttributeKey[File], property: String, default: => File)(
state: State
): File =
state get stateKey orElse getFileProperty(property) getOrElse default
state.get(stateKey).orElse(getFileProperty(property)).getOrElse(default)
def getFileProperty(name: String): Option[File] = Option(System.getProperty(name)) flatMap {
path =>

View File

@ -45,7 +45,7 @@ object Cross {
val x = Project.extract(state)
import x._
val knownVersions = crossVersions(x, currentRef)
val version = token(StringBasic.examples(knownVersions: _*)).map { arg =>
val version = token(StringBasic.examples(knownVersions*)).map { arg =>
val force = arg.endsWith("!")
val versionArg = if (force) arg.dropRight(1) else arg
versionArg.split("=", 2) match {
@ -105,9 +105,9 @@ object Cross {
private def crossVersions(extracted: Extracted, proj: ResolvedReference): Seq[String] = {
import extracted._
((proj / crossScalaVersions) get structure.data) getOrElse {
(proj / crossScalaVersions).get(structure.data).getOrElse {
// reading scalaVersion is a one-time deal
((proj / scalaVersion) get structure.data).toSeq
(proj / scalaVersion).get(structure.data).toSeq
}
}
@ -171,7 +171,7 @@ object Cross {
Seq(s"$SwitchCommand $verbose $version!", aggCommand)
}
case Right((keys, taskArgs)) =>
def project(key: ScopedKey[_]): Option[ProjectRef] = key.scope.project.toOption match {
def project(key: ScopedKey[?]): Option[ProjectRef] = key.scope.project.toOption match {
case Some(p: ProjectRef) => Some(p)
case _ => None
}
@ -226,7 +226,7 @@ object Cross {
restoreCapturedSession(state, Project.extract(state))
}
private val CapturedSession = AttributeKey[Seq[Setting[_]]]("crossCapturedSession")
private val CapturedSession = AttributeKey[Seq[Setting[?]]]("crossCapturedSession")
private def captureCurrentSession(state: State, extracted: Extracted): State = {
state.put(CapturedSession, extracted.session.rawAppend)
@ -279,7 +279,7 @@ object Cross {
case ScalaHomeVersion(homePath, resolveVersion, _) =>
val home = IO.resolve(extracted.currentProject.base, homePath)
if (home.exists()) {
val instance = ScalaInstance(home)(state.classLoaderCache.apply _)
val instance = ScalaInstance(home)(state.classLoaderCache.apply)
val version = resolveVersion.getOrElse(instance.actualVersion)
(version, Some((home, instance)))
} else {
@ -414,7 +414,7 @@ object Cross {
}
}
val filterKeys: Set[AttributeKey[_]] = Set(scalaVersion, scalaHome, scalaInstance).map(_.key)
val filterKeys: Set[AttributeKey[?]] = Set(scalaVersion, scalaHome, scalaInstance).map(_.key)
val projectsContains: Reference => Boolean = projects.map(_._1).toSet.contains

View File

@ -153,17 +153,17 @@ object Defaults extends BuildCommon {
def lock(app: xsbti.AppConfiguration): xsbti.GlobalLock = LibraryManagement.lock(app)
private[sbt] def globalDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] =
private[sbt] def globalDefaults(ss: Seq[Setting[?]]): Seq[Setting[?]] =
Def.defaultSettings(inScope(GlobalScope)(ss))
def buildCore: Seq[Setting[_]] = thisBuildCore ++ globalCore
def thisBuildCore: Seq[Setting[_]] =
def buildCore: Seq[Setting[?]] = thisBuildCore ++ globalCore
def thisBuildCore: Seq[Setting[?]] =
inScope(GlobalScope.copy(project = Select(ThisBuild)))(
Seq(
managedDirectory := baseDirectory.value / "lib_managed"
)
)
private[sbt] lazy val globalCore: Seq[Setting[_]] = globalDefaults(
private[sbt] lazy val globalCore: Seq[Setting[?]] = globalDefaults(
defaultTestTasks(test) ++ defaultTestTasks(testOnly) ++ defaultTestTasks(testQuick) ++ Seq(
excludeFilter :== HiddenFileFilter,
fileInputs :== Nil,
@ -181,9 +181,11 @@ object Defaults extends BuildCommon {
) ++ globalIvyCore ++ globalJvmCore ++ Watch.defaults
) ++ globalSbtCore
private[sbt] lazy val globalJvmCore: Seq[Setting[_]] =
private[sbt] lazy val globalJvmCore: Seq[Setting[?]] =
Seq(
compilerCache := state.value get Keys.stateCompilerCache getOrElse CompilerCache.fresh,
compilerCache := state.value
.get(Keys.stateCompilerCache)
.getOrElse(CompilerCache.fresh),
sourcesInBase :== true,
autoAPIMappings := false,
apiMappings := Map.empty,
@ -252,9 +254,9 @@ object Defaults extends BuildCommon {
"JAVA_HOME" -> javaHome
)
private[sbt] lazy val globalIvyCore: Seq[Setting[_]] =
private[sbt] lazy val globalIvyCore: Seq[Setting[?]] =
Seq(
internalConfigurationMap :== Configurations.internalMap _,
internalConfigurationMap :== Configurations.internalMap,
credentials :== SysProp.sbtCredentialsEnv.toList,
exportJars :== true,
trackInternalDependencies :== TrackLevel.TrackAlways,
@ -302,7 +304,7 @@ object Defaults extends BuildCommon {
)
/** Core non-plugin settings for sbt builds. These *must* be on every build or the sbt engine will fail to run at all. */
private[sbt] lazy val globalSbtCore: Seq[Setting[_]] = globalDefaults(
private[sbt] lazy val globalSbtCore: Seq[Setting[?]] = globalDefaults(
Seq(
outputStrategy :== None, // TODO - This might belong elsewhere.
buildStructure := Project.structure(state.value),
@ -332,11 +334,11 @@ object Defaults extends BuildCommon {
finally IO.delete(taskTemporaryDirectory.value)
},
// // extraLoggers is deprecated
SettingKey[ScopedKey[_] => Seq[XAppender]]("extraLoggers") :== { _ =>
SettingKey[ScopedKey[?] => Seq[XAppender]]("extraLoggers") :== { _ =>
Nil
},
extraAppenders := {
val f = SettingKey[ScopedKey[_] => Seq[XAppender]]("extraLoggers").value
val f = SettingKey[ScopedKey[?] => Seq[XAppender]]("extraLoggers").value
s =>
f(s).map {
case a: Appender => a
@ -426,7 +428,7 @@ object Defaults extends BuildCommon {
++ RemoteCache.globalSettings
)
private[sbt] lazy val buildLevelJvmSettings: Seq[Setting[_]] = Seq(
private[sbt] lazy val buildLevelJvmSettings: Seq[Setting[?]] = Seq(
exportPipelining := usePipelining.value,
sourcePositionMappers := Nil, // Never set a default sourcePositionMapper, see #6352! Whatever you are trying to solve, do it in the foldMappers method.
// The virtual file value cache needs to be global or sbt will run out of direct byte buffer memory.
@ -526,7 +528,7 @@ object Defaults extends BuildCommon {
.getOrElse(pos)
}
def defaultTestTasks(key: Scoped): Seq[Setting[_]] =
def defaultTestTasks(key: Scoped): Seq[Setting[?]] =
inTask(key)(
Seq(
tags := Seq(Tags.Test -> 1),
@ -535,7 +537,7 @@ object Defaults extends BuildCommon {
)
// TODO: This should be on the new default settings for a project.
def projectCore: Seq[Setting[_]] = Seq(
def projectCore: Seq[Setting[?]] = Seq(
name := thisProject.value.id,
logManager := LogManager.defaults(extraAppenders.value, ConsoleOut.terminalOut),
onLoadMessage := (onLoadMessage or
@ -545,7 +547,7 @@ object Defaults extends BuildCommon {
)
// Appended to JvmPlugin.projectSettings
def paths: Seq[Setting[_]] = Seq(
def paths: Seq[Setting[?]] = Seq(
baseDirectory := thisProject.value.base,
target := rootOutputDirectory.value.resolve(outputPath.value).toFile(),
// Use a different history path for jline3 because the jline2 format is
@ -652,7 +654,7 @@ object Defaults extends BuildCommon {
)
// This exists for binary compatibility and probably never should have been public.
def addBaseSources: Seq[Def.Setting[Task[Seq[File]]]] = Nil
lazy val outputConfigPaths: Seq[Setting[_]] = Seq(
lazy val outputConfigPaths: Seq[Setting[?]] = Seq(
classDirectory := target.value / (prefix(configuration.value.name) + "classes"),
backendOutput := {
val converter = fileConverter.value
@ -684,7 +686,7 @@ object Defaults extends BuildCommon {
// Scala 2.10 shades jline in the console so we need to make sure that it loads a compatible
// jansi version. Because of the shading, console does not work with the thin client for 2.10.x.
if (scalaVersion.value.startsWith("2.10.")) new ClassLoader(topLoader) {
override protected def loadClass(name: String, resolve: Boolean): Class[_] = {
override protected def loadClass(name: String, resolve: Boolean): Class[?] = {
if (name.startsWith("org.fusesource")) throw new ClassNotFoundException(name)
super.loadClass(name, resolve)
}
@ -735,7 +737,7 @@ object Defaults extends BuildCommon {
console / classpathOptions := ClasspathOptionsUtil.replNoboot(scalaVersion.value),
)
// must be a val: duplication detected by object identity
private lazy val compileBaseGlobal: Seq[Setting[_]] = globalDefaults(
private lazy val compileBaseGlobal: Seq[Setting[?]] = globalDefaults(
Seq(
auxiliaryClassFiles :== Nil,
incOptions := IncOptions.of(),
@ -872,7 +874,7 @@ object Defaults extends BuildCommon {
}
}
def defaultCompileSettings: Seq[Setting[_]] =
def defaultCompileSettings: Seq[Setting[?]] =
globalDefaults(
Seq(
enableBinaryCompileAnalysis :== true,
@ -880,7 +882,7 @@ object Defaults extends BuildCommon {
)
)
lazy val configTasks: Seq[Setting[_]] = docTaskSettings(doc) ++
lazy val configTasks: Seq[Setting[?]] = docTaskSettings(doc) ++
inTask(compile)(compileInputsSettings) ++
inTask(compileJava)(
Seq(
@ -1063,7 +1065,7 @@ object Defaults extends BuildCommon {
)
)
lazy val projectTasks: Seq[Setting[_]] = Seq(
lazy val projectTasks: Seq[Setting[?]] = Seq(
cleanFiles := cleanFilesTask.value,
cleanKeepFiles := Vector.empty,
cleanKeepGlobs ++= historyPath.value.map(_.toGlob).toVector,
@ -1118,7 +1120,7 @@ object Defaults extends BuildCommon {
override def triggeredMessage(s: WatchState) = trigMsg(s)
override def watchService() = getService()
override def watchSources(s: State) =
EvaluateTask(Project structure s, key, s, base) match
EvaluateTask(Project.structure(s), key, s, base) match
case Some((_, Result.Value(ps))) => ps
case Some((_, Result.Inc(i))) => throw i
case None => sys.error("key not found: " + Def.displayFull(key))
@ -1312,11 +1314,11 @@ object Defaults extends BuildCommon {
testOptions :== Nil,
testOptionDigests := Nil,
testResultLogger :== TestResultLogger.Default,
testOnly / testFilter :== (IncrementalTest.selectedFilter _),
testOnly / testFilter :== (IncrementalTest.selectedFilter),
extraTestDigests :== Nil,
)
)
lazy val testTasks: Seq[Setting[_]] =
lazy val testTasks: Seq[Setting[?]] =
testTaskOptions(test) ++ testTaskOptions(testOnly) ++ testTaskOptions(
testQuick
) ++ testDefaults ++ Seq(
@ -1403,7 +1405,7 @@ object Defaults extends BuildCommon {
*/
lazy val ConfigZero: Scope = ThisScope.copy(config = Zero)
lazy val ConfigGlobal: Scope = ConfigZero
def testTaskOptions(key: Scoped): Seq[Setting[_]] =
def testTaskOptions(key: Scoped): Seq[Setting[?]] =
inTask(key)(
Seq(
testListeners := {
@ -1503,7 +1505,7 @@ object Defaults extends BuildCommon {
}
@nowarn
def inputTests(key: InputKey[_]): Initialize[InputTask[Unit]] =
def inputTests(key: InputKey[?]): Initialize[InputTask[Unit]] =
inputTests0.mapReferenced(Def.mapScope((s) => s.rescope(key.key)))
private lazy val inputTests0: Initialize[InputTask[Unit]] = {
@ -1513,9 +1515,9 @@ object Defaults extends BuildCommon {
val filter = testFilter.value
val config = testExecution.value
val st = state.value
given display: Show[ScopedKey[_]] = Project.showContextKey(st)
given display: Show[ScopedKey[?]] = Project.showContextKey(st)
val modifiedOpts =
Tests.Filters(filter(selected)) +: Tests.Argument(frameworkOptions: _*) +: config.options
Tests.Filters(filter(selected)) +: Tests.Argument(frameworkOptions*) +: config.options
val newConfig = config.copy(options = modifiedOpts)
val output = allTestGroupsTask(
s,
@ -1649,7 +1651,7 @@ object Defaults extends BuildCommon {
converter,
opts,
s.log,
(Tags.ForkedTestGroup, 1) +: group.tags: _*
(Tags.ForkedTestGroup, 1) +: group.tags*
)
case Tests.InProcess =>
if (javaOptions.nonEmpty) {
@ -1732,16 +1734,16 @@ object Defaults extends BuildCommon {
Nil
}
lazy val packageBase: Seq[Setting[_]] = Seq(
lazy val packageBase: Seq[Setting[?]] = Seq(
artifact := Artifact(moduleName.value)
) ++ Defaults.globalDefaults(
Seq(
packageOptions :== Nil,
artifactName :== (Artifact.artifactName _)
artifactName :== (Artifact.artifactName)
)
)
lazy val packageConfig: Seq[Setting[_]] =
lazy val packageConfig: Seq[Setting[?]] =
inTask(packageBin)(
Seq(
packageOptions := {
@ -2252,7 +2254,7 @@ object Defaults extends BuildCommon {
()
}
def docTaskSettings(key: TaskKey[File] = doc): Seq[Setting[_]] =
def docTaskSettings(key: TaskKey[File] = doc): Seq[Setting[?]] =
inTask(key)(
Seq(
apiMappings ++= {
@ -2378,7 +2380,7 @@ object Defaults extends BuildCommon {
def consoleTask: Initialize[Task[Unit]] = consoleTask(fullClasspath, console)
def consoleQuickTask = consoleTask(externalDependencyClasspath, consoleQuick)
def consoleTask(classpath: TaskKey[Classpath], task: TaskKey[_]): Initialize[Task[Unit]] =
def consoleTask(classpath: TaskKey[Classpath], task: TaskKey[?]): Initialize[Task[Unit]] =
Def.task {
val si = (task / scalaInstance).value
val s = streams.value
@ -2604,9 +2606,9 @@ object Defaults extends BuildCommon {
)
}
def compileInputsSettings: Seq[Setting[_]] =
def compileInputsSettings: Seq[Setting[?]] =
compileInputsSettings(dependencyPicklePath)
def compileInputsSettings(classpathTask: TaskKey[Classpath]): Seq[Setting[_]] = {
def compileInputsSettings(classpathTask: TaskKey[Classpath]): Seq[Setting[?]] = {
Seq(
compileOptions := {
val c = fileConverter.value
@ -2699,7 +2701,7 @@ object Defaults extends BuildCommon {
private[sbt] def none[A]: Option[A] = (None: Option[A])
private[sbt] def jnone[A]: Optional[A] = none[A].toOptional
def compileAnalysisSettings: Seq[Setting[_]] = Seq(
def compileAnalysisSettings: Seq[Setting[?]] = Seq(
previousCompile := {
val setup = compileIncSetup.value
val store = analysisStore(compileAnalysisFile)
@ -2779,7 +2781,7 @@ object Defaults extends BuildCommon {
def runMainParser: (State, Seq[String]) => Parser[(String, Seq[String])] = {
import DefaultParsers._
(state, mainClasses) =>
Space ~> token(NotSpace examples mainClasses.toSet) ~ spaceDelimited("<arg>")
Space ~> token(NotSpace.examples(mainClasses.toSet)) ~ spaceDelimited("<arg>")
}
def testOnlyParser: (State, Seq[String]) => Parser[(Seq[String], Seq[String])] = {
@ -2793,9 +2795,9 @@ object Defaults extends BuildCommon {
private def distinctParser(exs: Set[String], raw: Boolean): Parser[Seq[String]] = {
import DefaultParsers._
import Parser.and
val base = token(Space) ~> token(and(NotSpace, not("--", "Unexpected: ---")) examples exs)
val base = token(Space) ~> token(and(NotSpace, not("--", "Unexpected: ---")).examples(exs))
val recurse = base flatMap { ex =>
val (matching, notMatching) = exs.partition(GlobFilter(ex).accept _)
val (matching, notMatching) = exs.partition(GlobFilter(ex).accept)
distinctParser(notMatching, raw) map { result =>
if (raw) ex +: result else matching.toSeq ++ result
}
@ -2812,36 +2814,36 @@ object Defaults extends BuildCommon {
// 1. runnerSettings is added unscoped via JvmPlugin.
// 2. In addition it's added scoped to run task.
lazy val runnerSettings: Seq[Setting[_]] = Seq(runnerTask, forkOptions := forkOptionsTask.value)
private lazy val newRunnerSettings: Seq[Setting[_]] =
lazy val runnerSettings: Seq[Setting[?]] = Seq(runnerTask, forkOptions := forkOptionsTask.value)
private lazy val newRunnerSettings: Seq[Setting[?]] =
Seq(runner := ClassLoaders.runner.value, forkOptions := forkOptionsTask.value)
lazy val baseTasks: Seq[Setting[_]] = projectTasks ++ packageBase
lazy val baseTasks: Seq[Setting[?]] = projectTasks ++ packageBase
lazy val configSettings: Seq[Setting[_]] =
lazy val configSettings: Seq[Setting[?]] =
Classpaths.configSettings ++ configTasks ++ configPaths ++ packageConfig ++
Classpaths.compilerPluginConfig ++ deprecationSettings ++
BuildServerProtocol.configSettings
lazy val compileSettings: Seq[Setting[_]] =
lazy val compileSettings: Seq[Setting[?]] =
configSettings ++ (mainBgRunMainTask +: mainBgRunTask) ++ Classpaths.addUnmanagedLibrary
lazy val testSettings: Seq[Setting[_]] = configSettings ++ testTasks
lazy val testSettings: Seq[Setting[?]] = configSettings ++ testTasks
@nowarn
@deprecated(
"Create a separate subproject instead of using IntegrationTest and in addition avoid using itSettings",
"1.9.0"
)
lazy val itSettings: Seq[Setting[_]] = inConfig(IntegrationTest) {
lazy val itSettings: Seq[Setting[?]] = inConfig(IntegrationTest) {
testSettings
}
lazy val defaultConfigs: Seq[Setting[_]] = inConfig(Compile)(compileSettings) ++
lazy val defaultConfigs: Seq[Setting[?]] = inConfig(Compile)(compileSettings) ++
inConfig(Test)(testSettings) ++
inConfig(Runtime)(Classpaths.configSettings)
// These are project level settings that MUST be on every project.
lazy val coreDefaultSettings: Seq[Setting[_]] =
lazy val coreDefaultSettings: Seq[Setting[?]] =
projectCore ++ disableAggregation ++ Seq(
// Missing but core settings
baseDirectory := thisProject.value.base,
@ -2855,7 +2857,7 @@ object Defaults extends BuildCommon {
publishM2 / skip := (publish / skip).value
)
// build.sbt is treated a Scala source of metabuild, so to enable deprecation flag on build.sbt we set the option here.
lazy val deprecationSettings: Seq[Setting[_]] =
lazy val deprecationSettings: Seq[Setting[?]] =
inConfig(Compile)(
Seq(
scalacOptions := {
@ -2921,7 +2923,7 @@ object Classpaths {
concatSettings(a: Initialize[Seq[T]], b) // forward to widened variant
// Included as part of JvmPlugin#projectSettings.
lazy val configSettings: Seq[Setting[_]] = classpaths ++ Seq(
lazy val configSettings: Seq[Setting[?]] = classpaths ++ Seq(
products := makeProducts.value,
pickleProducts := makePickleProducts.value,
productDirectories := classDirectory.value :: Nil,
@ -2932,7 +2934,7 @@ object Classpaths {
update.value
)
)
private def classpaths: Seq[Setting[_]] =
private def classpaths: Seq[Setting[?]] =
Seq(
externalDependencyClasspath := concat(unmanagedClasspath, managedClasspath).value,
dependencyClasspath := concat(internalDependencyClasspath, externalDependencyClasspath).value,
@ -3080,13 +3082,13 @@ object Classpaths {
key: SettingKey[T],
pkgTasks: Seq[TaskKey[HashedVirtualFileRef]]
): Initialize[Seq[T]] =
(forallIn(key, pkgTasks) zipWith forallIn(publishArtifact, pkgTasks))(_ zip _ collect {
(forallIn(key, pkgTasks).zipWith(forallIn(publishArtifact, pkgTasks)))(_ zip _ collect {
case (a, true) => a
})
def forallIn[T](
key: Scoped.ScopingSetting[SettingKey[T]], // should be just SettingKey[T] (mea culpa)
pkgTasks: Seq[TaskKey[_]],
pkgTasks: Seq[TaskKey[?]],
): Initialize[Seq[T]] =
pkgTasks.map(pkg => (pkg.scope / pkg / key)).join
@ -3106,7 +3108,7 @@ object Classpaths {
val sbtV = (pluginCrossBuild / sbtBinaryVersion).value
sbtV != "1.0" && !sbtV.startsWith("0.")
}
val jvmPublishSettings: Seq[Setting[_]] = Seq(
val jvmPublishSettings: Seq[Setting[?]] = Seq(
artifacts := artifactDefs(defaultArtifactTasks).value,
packagedArtifacts := Def
.ifS(publishSbtPluginMavenStyle)(mavenArtifactsOfSbtPlugin)(packagedDefaultArtifacts)
@ -3197,7 +3199,7 @@ object Classpaths {
pomFile
}
val ivyPublishSettings: Seq[Setting[_]] = publishGlobalDefaults ++ Seq(
val ivyPublishSettings: Seq[Setting[?]] = publishGlobalDefaults ++ Seq(
artifacts :== Nil,
packagedArtifacts :== Map.empty,
makePom := {
@ -3288,11 +3290,11 @@ object Classpaths {
)
)
val ivyBaseSettings: Seq[Setting[_]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq(
val ivyBaseSettings: Seq[Setting[?]] = baseGlobalDefaults ++ sbtClassifiersTasks ++ Seq(
conflictWarning := conflictWarning.value.copy(label = Reference.display(thisProjectRef.value)),
unmanagedBase := baseDirectory.value / "lib",
normalizedName := Project.normalizeModuleID(name.value),
isSnapshot := (isSnapshot or version(_ endsWith "-SNAPSHOT")).value,
isSnapshot := (isSnapshot or version(_.endsWith("-SNAPSHOT"))).value,
description := (description or name).value,
organization := (organization or normalizedName).value,
organizationName := (organizationName or organization).value,
@ -3327,7 +3329,7 @@ object Classpaths {
}
},
bootResolvers := {
(appConfiguration map bootRepositories).value
appConfiguration.map(bootRepositories).value
},
fullResolvers :=
(Def.task {
@ -3643,7 +3645,7 @@ object Classpaths {
IvyXml.generateIvyXmlSettings() ++
LMCoursier.publicationsSetting(Seq(Compile, Test).map(c => c -> CConfiguration(c.name)))
val jvmBaseSettings: Seq[Setting[_]] = Seq(
val jvmBaseSettings: Seq[Setting[?]] = Seq(
libraryDependencies ++= autoLibraryDependency(
autoScalaLibrary.value && scalaHome.value.isEmpty && managedScalaInstance.value,
sbtPlugin.value,
@ -3731,7 +3733,7 @@ object Classpaths {
private[sbt] def defaultProjectID: Initialize[ModuleID] = Def.setting {
val p0 = ModuleID(organization.value, moduleName.value, version.value)
.cross((projectID / crossVersion).value)
.artifacts(artifacts.value: _*)
.artifacts(artifacts.value*)
val p1 = apiURL.value match {
case Some(u) => p0.extra(SbtPomExtraProperties.POM_API_KEY -> u.toExternalForm)
case _ => p0
@ -3832,43 +3834,46 @@ object Classpaths {
},
dependencyResolution := dependencyResolutionTask.value,
csrConfiguration := LMCoursier.updateSbtClassifierConfigurationTask.value,
(TaskGlobal / updateSbtClassifiers) := (Def.task {
val lm = dependencyResolution.value
val s = streams.value
val is = ivySbt.value
val mod = classifiersModule.value
val updateConfig0 = updateConfiguration.value
val updateConfig = updateConfig0
.withMetadataDirectory(dependencyCacheDirectory.value)
.withArtifactFilter(
updateConfig0.artifactFilter.map(af => af.withInverted(!af.inverted))
)
val app = appConfiguration.value
val srcTypes = sourceArtifactTypes.value
val docTypes = docArtifactTypes.value
val log = s.log
val out = is.withIvy(log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (update / unresolvedWarningConfiguration).value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
// val noExplicitCheck = ivy.map(_.withCheckExplicit(false))
LibraryManagement.transitiveScratch(
lm,
"sbt",
GetClassifiersConfiguration(
mod,
excludes.toVector,
updateConfig,
srcTypes.toVector,
docTypes.toVector
),
uwConfig,
log
) match {
case Left(_) => ???
case Right(ur) => ur
(TaskGlobal / updateSbtClassifiers) := (Def
.task {
val lm = dependencyResolution.value
val s = streams.value
val is = ivySbt.value
val mod = classifiersModule.value
val updateConfig0 = updateConfiguration.value
val updateConfig = updateConfig0
.withMetadataDirectory(dependencyCacheDirectory.value)
.withArtifactFilter(
updateConfig0.artifactFilter.map(af => af.withInverted(!af.inverted))
)
val app = appConfiguration.value
val srcTypes = sourceArtifactTypes.value
val docTypes = docArtifactTypes.value
val log = s.log
val out = is.withIvy(log)(_.getSettings.getDefaultIvyUserDir)
val uwConfig = (update / unresolvedWarningConfiguration).value
withExcludes(out, mod.classifiers, lock(app)) { excludes =>
// val noExplicitCheck = ivy.map(_.withCheckExplicit(false))
LibraryManagement.transitiveScratch(
lm,
"sbt",
GetClassifiersConfiguration(
mod,
excludes.toVector,
updateConfig,
srcTypes.toVector,
docTypes.toVector
),
uwConfig,
log
) match {
case Left(_) => ???
case Right(ur) => ur
}
}
}
} tag (Tags.Update, Tags.Network)).value
.tag(Tags.Update, Tags.Network))
.value
)
) ++
inTask(scalaCompilerBridgeScope)(
@ -3946,7 +3951,7 @@ object Classpaths {
@deprecated("Use variant without delivery key", "1.1.1")
def publishTask(
config: TaskKey[PublishConfiguration],
deliverKey: TaskKey[_],
deliverKey: TaskKey[?],
): Initialize[Task[Unit]] =
publishTask(config)
@ -4032,8 +4037,8 @@ object Classpaths {
Option[ScalaInstance],
File,
File,
Seq[ScopedKey[_]],
ScopedKey[_],
Seq[ScopedKey[?]],
ScopedKey[?],
Option[FiniteDuration],
Boolean,
ProjectRef,
@ -4190,26 +4195,26 @@ object Classpaths {
val projRef = thisProjectRef.value
val st = state.value
val s = streams.value
val cacheStoreFactory = s.cacheStoreFactory sub updateCacheName.value
val cacheStoreFactory = s.cacheStoreFactory.sub(updateCacheName.value)
import sbt.librarymanagement.LibraryManagementCodec._
def modulePositions: Map[ModuleID, SourcePosition] =
try {
val extracted = (Project extract st)
val extracted = Project.extract(st)
val sk = (projRef / Zero / Zero / libraryDependencies).scopedKey
val empty = extracted.structure.data.set(sk.scope, sk.key, Nil)
val settings = extracted.structure.settings filter { (s: Setting[_]) =>
val settings = extracted.structure.settings filter { (s: Setting[?]) =>
(s.key.key == libraryDependencies.key) &&
(s.key.scope.project == Select(projRef))
}
Map(settings.asInstanceOf[Seq[Setting[Seq[ModuleID]]]].flatMap { s =>
s.init.evaluate(empty) map { _ -> s.pos }
}: _*)
}*)
} catch {
case NonFatal(_) => Map()
}
val outCacheStore = cacheStoreFactory make "output_dsp"
val f = Tracked.inputChanged(cacheStoreFactory make "input_dsp") {
val outCacheStore = cacheStoreFactory.make("output_dsp")
val f = Tracked.inputChanged(cacheStoreFactory.make("input_dsp")) {
(inChanged: Boolean, in: Seq[ModuleID]) =>
given NoPositionFormat: JsonFormat[NoPosition.type] = asSingleton(NoPosition)
given LinePositionFormat: IsoLList.Aux[LinePosition, String :*: Int :*: LNil] =
@ -4503,7 +4508,10 @@ object Classpaths {
ClasspathImpl.getClasspath(key, dep, conf, data)
def defaultConfigurationTask(p: ResolvedReference, data: Settings[Scope]): Configuration =
flatten((p / defaultConfiguration) get data) getOrElse Configurations.Default
flatten(
(p / defaultConfiguration)
.get(data)
).getOrElse(Configurations.Default)
def flatten[T](o: Option[Option[T]]): Option[T] = o flatMap idFun
@ -4529,7 +4537,7 @@ object Classpaths {
else
Nil
def addUnmanagedLibrary: Seq[Setting[_]] =
def addUnmanagedLibrary: Seq[Setting[?]] =
Seq((Compile / unmanagedJars) ++= unmanagedScalaLibrary.value)
def unmanagedScalaLibrary: Initialize[Task[Seq[HashedVirtualFileRef]]] =
@ -4900,14 +4908,14 @@ trait BuildExtra extends BuildCommon with DefExtra {
config: Configuration,
mainClass: String,
baseArguments: String*
): Vector[Setting[_]] = {
): Vector[Setting[?]] = {
Vector(
scoped := Def
.input(_ => Def.spaceDelimited())
.flatMapTask { result =>
initScoped(
scoped.scopedKey,
ClassLoaders.runner mapReferenced Project.mapScope(_.rescope(config)),
ClassLoaders.runner.mapReferenced(Project.mapScope(_.rescope(config))),
).zipWith(Def.task {
((config / fullClasspath).value, streams.value, fileConverter.value, result)
}) { (rTask, t) =>
@ -4928,11 +4936,11 @@ trait BuildExtra extends BuildCommon with DefExtra {
config: Configuration,
mainClass: String,
arguments: String*
): Vector[Setting[_]] =
): Vector[Setting[?]] =
Vector(
scoped := initScoped(
scoped.scopedKey,
ClassLoaders.runner mapReferenced Project.mapScope(_.rescope(config)),
ClassLoaders.runner.mapReferenced(Project.mapScope(_.rescope(config))),
).zipWith(Def.task { ((config / fullClasspath).value, streams.value, fileConverter.value) }) {
case (rTask, t) =>
(t, rTask).mapN { case ((cp, s, converter), r) =>
@ -4942,21 +4950,21 @@ trait BuildExtra extends BuildCommon with DefExtra {
}.value
) ++ inTask(scoped)((config / forkOptions) := forkOptionsTask.value)
def initScoped[T](sk: ScopedKey[_], i: Initialize[T]): Initialize[T] =
def initScoped[T](sk: ScopedKey[?], i: Initialize[T]): Initialize[T] =
initScope(fillTaskAxis(sk.scope, sk.key), i)
def initScope[T](s: Scope, i: Initialize[T]): Initialize[T] =
i mapReferenced Project.mapScope(Scope.replaceThis(s))
i.mapReferenced(Project.mapScope(Scope.replaceThis(s)))
/**
* Disables post-compilation hook for determining tests for tab-completion (such as for 'test-only').
* This is useful for reducing Test/compile time when not running test.
*/
def noTestCompletion(config: Configuration = Test): Setting[_] =
def noTestCompletion(config: Configuration = Test): Setting[?] =
inConfig(config)(Seq(definedTests := detectTests.value)).head
def filterKeys(ss: Seq[Setting[_]], transitive: Boolean = false)(
f: ScopedKey[_] => Boolean
): Seq[Setting[_]] =
def filterKeys(ss: Seq[Setting[?]], transitive: Boolean = false)(
f: ScopedKey[?] => Boolean
): Seq[Setting[?]] =
ss filter (s => f(s.key) && (!transitive || s.dependencies.forall(f)))
implicit def sbtStateToUpperStateOps(s: State): UpperStateOps =
@ -4989,7 +4997,7 @@ trait BuildCommon {
extension (s: Seq[HashedVirtualFileRef])
/** Converts the `Seq[HashedVirtualFileRef]` to a Classpath, which is an alias for `Seq[Attributed[HashedVirtualFileRef]]`. */
def classpath: Classpath = Attributed blankSeq s
def classpath: Classpath = Attributed.blankSeq(s)
def overrideConfigs(cs: Configuration*)(
configurations: Seq[Configuration]
@ -5006,10 +5014,10 @@ trait BuildCommon {
}
// these are intended for use in in put tasks for creating parsers
def getFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State): Option[T] =
def getFromContext[T](task: TaskKey[T], context: ScopedKey[?], s: State): Option[T] =
SessionVar.get(SessionVar.resolveContext(task.scopedKey, context.scope, s), s)
def loadFromContext[T](task: TaskKey[T], context: ScopedKey[_], s: State)(implicit
def loadFromContext[T](task: TaskKey[T], context: ScopedKey[?], s: State)(implicit
f: JsonFormat[T]
): Option[T] =
SessionVar.load(SessionVar.resolveContext(task.scopedKey, context.scope, s), s)
@ -5018,7 +5026,7 @@ trait BuildCommon {
def loadForParser[P, T](task: TaskKey[T])(
f: (State, Option[T]) => Parser[P]
)(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] =
loadForParserI(task)(Def value f)(format)
loadForParserI(task)(Def.value(f))(format)
def loadForParserI[P, T](task: TaskKey[T])(
init: Initialize[(State, Option[T]) => Parser[P]]
)(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] =
@ -5029,7 +5037,7 @@ trait BuildCommon {
def getForParser[P, T](
task: TaskKey[T]
)(init: (State, Option[T]) => Parser[P]): Initialize[State => Parser[P]] =
getForParserI(task)(Def value init)
getForParserI(task)(Def.value(init))
def getForParserI[P, T](
task: TaskKey[T]
)(init: Initialize[(State, Option[T]) => Parser[P]]): Initialize[State => Parser[P]] =

View File

@ -333,7 +333,7 @@ object EvaluateTask {
): T =
(extracted.currentRef / key).get(structure.data).getOrElse(default)
def injectSettings: Seq[Setting[_]] = Seq(
def injectSettings: Seq[Setting[?]] = Seq(
Global / state ::= dummyState,
Global / streamsManager ::= Def.dummyStreamsManager,
Global / executionRoots ::= dummyRoots,
@ -392,12 +392,12 @@ object EvaluateTask {
}
}
def logIncResult(result: Result[_], state: State, streams: Streams) = result match {
def logIncResult(result: Result[?], state: State, streams: Streams) = result match {
case Result.Inc(i) => logIncomplete(i, state, streams); case _ => ()
}
def logIncomplete(result: Incomplete, state: State, streams: Streams): Unit = {
val all = Incomplete linearize result
val all = Incomplete.linearize(result)
val keyed =
all collect { case Incomplete(Some(key: ScopedKey[_]), _, msg, _, ex) =>
(key, msg, ex)
@ -426,10 +426,10 @@ object EvaluateTask {
private def contextDisplay(state: State, highlight: Boolean) =
Project.showContextKey(state, if (highlight) Some(RED) else None)
def suppressedMessage(key: ScopedKey[_])(implicit display: Show[ScopedKey[_]]): String =
def suppressedMessage(key: ScopedKey[?])(implicit display: Show[ScopedKey[?]]): String =
"Stack trace suppressed. Run 'last %s' for the full log.".format(display.show(key))
def getStreams(key: ScopedKey[_], streams: Streams): TaskStreams =
def getStreams(key: ScopedKey[?], streams: Streams): TaskStreams =
streams(ScopedKey(Project.fillTaskAxis(key).scope, Keys.streams.key))
def withStreams[T](structure: BuildStructure, state: State)(f: Streams => T): T = {
@ -456,7 +456,7 @@ object EvaluateTask {
def nodeView(
state: State,
streams: Streams,
roots: Seq[ScopedKey[_]],
roots: Seq[ScopedKey[?]],
dummies: DummyTaskMap = DummyTaskMap(Nil)
): NodeView =
Transform(
@ -568,7 +568,7 @@ object EvaluateTask {
state: State,
streams: Streams
): Unit =
for (referenced <- (Global / Previous.references) get Project.structure(state).data)
for (referenced <- (Global / Previous.references).get(Project.structure(state).data))
Previous.complete(referenced, results, streams)
def applyResults[T](
@ -582,7 +582,7 @@ object EvaluateTask {
Function.chain(
results.toTypedSeq flatMap {
case results.TPair(_, Result.Value(KeyValue(_, st: StateTransform))) => Some(st.transform)
case results.TPair(Task(info, _), Result.Value(v)) => info.post(v) get transformState
case results.TPair(Task(info, _), Result.Value(v)) => info.post(v).get(transformState)
case _ => Nil
}
)
@ -647,7 +647,7 @@ object EvaluateTask {
}
// if the return type Seq[Setting[_]] is not explicitly given, scalac hangs
val injectStreams: ScopedKey[_] => Seq[Setting[_]] = scoped =>
val injectStreams: ScopedKey[?] => Seq[Setting[?]] = scoped =>
if (scoped.key == streams.key) {
Seq(scoped.scope / streams := {
(streamsManager.map { mgr =>

View File

@ -45,12 +45,12 @@ class ExecuteProgressAdapter(ep: ExecuteProgress) extends ExecuteProgress2 {
override def afterCommand(cmd: String, result: Either[Throwable, State]): Unit = {}
override def initial(): Unit = ep.initial()
override def afterRegistered(
task: TaskId[_],
allDeps: Iterable[TaskId[_]],
pendingDeps: Iterable[TaskId[_]]
task: TaskId[?],
allDeps: Iterable[TaskId[?]],
pendingDeps: Iterable[TaskId[?]]
): Unit = ep.afterRegistered(task, allDeps, pendingDeps)
override def afterReady(task: TaskId[_]): Unit = ep.afterReady(task)
override def beforeWork(task: TaskId[_]): Unit = ep.beforeWork(task)
override def afterReady(task: TaskId[?]): Unit = ep.afterReady(task)
override def beforeWork(task: TaskId[?]): Unit = ep.beforeWork(task)
override def afterWork[A](task: TaskId[A], result: Either[TaskId[A], Result[A]]): Unit =
ep.afterWork(task, result)
override def afterCompleted[A](task: TaskId[A], result: Result[A]): Unit =
@ -68,12 +68,12 @@ object ExecuteProgress2 {
xs.foreach(_.afterCommand(cmd, result))
override def initial(): Unit = xs.foreach(_.initial())
override def afterRegistered(
task: TaskId[_],
allDeps: Iterable[TaskId[_]],
pendingDeps: Iterable[TaskId[_]]
task: TaskId[?],
allDeps: Iterable[TaskId[?]],
pendingDeps: Iterable[TaskId[?]]
): Unit = xs.foreach(_.afterRegistered(task, allDeps, pendingDeps))
override def afterReady(task: TaskId[_]): Unit = xs.foreach(_.afterReady(task))
override def beforeWork(task: TaskId[_]): Unit = xs.foreach(_.beforeWork(task))
override def afterReady(task: TaskId[?]): Unit = xs.foreach(_.afterReady(task))
override def beforeWork(task: TaskId[?]): Unit = xs.foreach(_.beforeWork(task))
override def afterWork[A](task: TaskId[A], result: Either[TaskId[A], Result[A]]): Unit =
xs.foreach(_.afterWork(task, result))
override def afterCompleted[A](task: TaskId[A], result: Result[A]): Unit =

View File

@ -23,7 +23,7 @@ final case class Extracted(
structure: BuildStructure,
session: SessionSettings,
currentRef: ProjectRef
)(implicit val showKey: Show[ScopedKey[_]]) {
)(implicit val showKey: Show[ScopedKey[?]]) {
def rootProject = structure.rootProject
lazy val currentUnit = structure units currentRef.build
lazy val currentProject = currentUnit defined currentRef.project
@ -113,16 +113,16 @@ final case class Extracted(
show = Aggregation.defaultShow(state, false),
)
private def resolve[K <: Scoped.ScopingSetting[K] with Scoped](key: K): K =
private def resolve[K <: Scoped.ScopingSetting[K] & Scoped](key: K): K =
Scope.resolveScope(GlobalScope, currentRef.build, rootProject)(key.scope) / key
private def getOrError[T](scope: Scope, key: AttributeKey[_], value: Option[T])(implicit
display: Show[ScopedKey[_]]
private def getOrError[T](scope: Scope, key: AttributeKey[?], value: Option[T])(implicit
display: Show[ScopedKey[?]]
): T =
value getOrElse sys.error(display.show(ScopedKey(scope, key)) + " is undefined.")
private def getOrError[T](scope: Scope, key: AttributeKey[T])(implicit
display: Show[ScopedKey[_]]
display: Show[ScopedKey[?]]
): T =
getOrError(scope, key, structure.data.get(scope, key))(display)
@ -130,24 +130,24 @@ final case class Extracted(
"This discards session settings. Migrate to appendWithSession or appendWithoutSession.",
"1.2.0"
)
def append(settings: Seq[Setting[_]], state: State): State =
def append(settings: Seq[Setting[?]], state: State): State =
appendWithoutSession(settings, state)
/** Appends the given settings to all the build state settings, including session settings. */
def appendWithSession(settings: Seq[Setting[_]], state: State): State =
def appendWithSession(settings: Seq[Setting[?]], state: State): State =
appendImpl(settings, state, session.mergeSettings)
/**
* Appends the given settings to the original build state settings, discarding any settings
* appended to the session in the process.
*/
def appendWithoutSession(settings: Seq[Setting[_]], state: State): State =
def appendWithoutSession(settings: Seq[Setting[?]], state: State): State =
appendImpl(settings, state, session.original)
private def appendImpl(
settings: Seq[Setting[_]],
settings: Seq[Setting[?]],
state: State,
sessionSettings: Seq[Setting[_]],
sessionSettings: Seq[Setting[?]],
): State = {
val appendSettings =
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)

View File

@ -62,7 +62,7 @@ object Keys {
val showTiming = settingKey[Boolean]("If true, the command success message includes the completion time.").withRank(CSetting)
val timingFormat = settingKey[java.text.DateFormat]("The format used for displaying the completion time.").withRank(CSetting)
@deprecated("", "1.4.0")
val extraLoggers = settingKey[ScopedKey[_] => Seq[XAppender]]("A function that provides additional loggers for a given setting.").withRank(DSetting)
val extraLoggers = settingKey[ScopedKey[?] => Seq[XAppender]]("A function that provides additional loggers for a given setting.").withRank(DSetting)
val extraAppenders = settingKey[AppenderSupplier]("A function that provides additional loggers for a given setting.").withRank(DSetting)
val useLog4J = settingKey[Boolean]("Toggles whether or not to use log4j for sbt internal loggers.").withRank(Invisible)
val logManager = settingKey[LogManager]("The log manager, which creates Loggers for different contexts.").withRank(DSetting)
@ -644,7 +644,7 @@ object Keys {
@cacheLevel(include = Array.empty)
val streams = taskKey[TaskStreams]("Provides streams for logging and persisting data.").withRank(DTask)
val taskDefinitionKey = Def.taskDefinitionKey
val (executionRoots, dummyRoots) = Def.dummy[Seq[ScopedKey[_]]]("executionRoots", "The list of root tasks for this task execution. Roots are the top-level tasks that were directly requested to be run.")
val (executionRoots, dummyRoots) = Def.dummy[Seq[ScopedKey[?]]]("executionRoots", "The list of root tasks for this task execution. Roots are the top-level tasks that were directly requested to be run.")
val state = Def.stateKey
val streamsManager = Def.streamsManagerKey
// wrapper to work around SI-2915
@ -670,8 +670,8 @@ object Keys {
val lintUnused = inputKey[Unit]("Check for keys unused by other settings and tasks.")
val lintIncludeFilter = settingKey[String => Boolean]("Filters key names that should be included in the lint check.")
val lintExcludeFilter = settingKey[String => Boolean]("Filters key names that should be excluded in the lint check.")
val excludeLintKeys = settingKey[Set[Def.KeyedInitialize[_]]]("Keys excluded from lintUnused task")
val includeLintKeys = settingKey[Set[Def.KeyedInitialize[_]]]("Task keys that are included into lintUnused task")
val excludeLintKeys = settingKey[Set[Def.KeyedInitialize[?]]]("Keys excluded from lintUnused task")
val includeLintKeys = settingKey[Set[Def.KeyedInitialize[?]]]("Task keys that are included into lintUnused task")
val lintUnusedKeysOnLoad = settingKey[Boolean]("Toggles whether or not to check for unused keys during startup")
val useScalaReplJLine = settingKey[Boolean]("Toggles whether or not to use sbt's forked jline in the scala repl. Enabling this flag may break the thin client in the scala console.").withRank(KeyRanks.Invisible)
@ -692,8 +692,8 @@ object Keys {
val triggeredBy = Def.triggeredBy
val runBefore = Def.runBefore
type Streams = std.Streams[ScopedKey[_]]
type TaskStreams = std.TaskStreams[ScopedKey[_]]
type Streams = std.Streams[ScopedKey[?]]
type TaskStreams = std.TaskStreams[ScopedKey[?]]
}
// format: on

Some files were not shown because too many files have changed in this diff Show More