Fix or mute warnings

This commit is contained in:
Adrien Piquerez 2024-03-05 10:39:00 +01:00
parent 5d0a0b7356
commit c15a4031a9
108 changed files with 266 additions and 287 deletions

View File

@ -19,7 +19,7 @@ import java.net.URLClassLoader
import java.nio.charset.StandardCharsets
import java.nio.file.{ Files, Path, Paths, StandardOpenOption }
import java.security.MessageDigest
import scala.collection.JavaConverters.*
import scala.jdk.CollectionConverters.*
import scala.quoted.*
import sbt.io.Hash
@ -255,13 +255,13 @@ class Eval(
private[this] def getGeneratedFiles(moduleName: String): Seq[Path] =
backingDir match
case Some(dir) =>
asScala(
Files
.list(dir)
.filter(!Files.isDirectory(_))
.filter(_.getFileName.toString.contains(moduleName))
.iterator
).toList
Files
.list(dir)
.filter(!Files.isDirectory(_))
.filter(_.getFileName.toString.contains(moduleName))
.iterator
.asScala
.toList
case None => Nil
private[this] def makeModuleName(hash: String): String = "$Wrap" + hash.take(10)

View File

@ -173,24 +173,15 @@ private[sbt] object EvaluateConfigurations {
val allGeneratedFiles: Seq[Path] = (definitions.generated ++ dslEntries.flatMap(_.generated))
loader => {
val projects = {
val compositeProjects = definitions.values(loader).collect { case p: CompositeProject =>
p
}
val compositeProjects = definitions
.values(loader)
.collect { case p: CompositeProject => p }
// todo: resolveBase?
CompositeProject.expand(compositeProjects) // .map(resolveBase(file.getParentFile, _))
}
val (settingsRaw, manipulationsRaw) =
dslEntries map (_.result apply loader) partition {
case DslEntry.ProjectSettings(_) => true
case _ => false
}
val settings = settingsRaw flatMap {
case DslEntry.ProjectSettings(settings) => settings
case _ => Nil
}
val manipulations = manipulationsRaw map { case DslEntry.ProjectManipulation(f) =>
f
}
val loadedDslEntries = dslEntries.map(_.result.apply(loader))
val settings = loadedDslEntries.collect { case DslEntry.ProjectSettings(s) => s }.flatten
val manipulations = loadedDslEntries.collect { case DslEntry.ProjectManipulation(f) => f }
// TODO -get project manipulations.
new LoadedSbtFile(
settings,

View File

@ -54,7 +54,7 @@ private[sbt] object SbtParser:
""".stripMargin
private final val defaultClasspath =
sbt.io.Path.makeString(sbt.io.IO.classLocationPath[Product].toFile :: Nil)
sbt.io.Path.makeString(sbt.io.IO.classLocationPath(classOf[Product]).toFile :: Nil)
def isIdentifier(ident: String): Boolean =
val code = s"val $ident = 0; val ${ident}${ident} = $ident"

View File

@ -19,6 +19,7 @@ import sbt.util.{
}
import xsbti.VirtualFile
import Types.Id
import scala.annotation.nowarn
/**
* Implementation of a macro that provides a direct syntax for applicative functors and monads. It
@ -266,7 +267,7 @@ trait Cont:
val expr = input.term.asExprOf[F[a]]
typed[F[A1]](
'{
$applicativeExpr.map[a, A1]($expr.asInstanceOf[F[a]])($lambda)
$applicativeExpr.map[a @nowarn, A1]($expr.asInstanceOf[F[a @nowarn]])($lambda)
}.asTerm
).asExprOf[F[A1]]
eitherTree match

View File

@ -8,8 +8,9 @@ import scala.collection.mutable
import sbt.util.cacheLevel
import sbt.util.CacheLevelTag
trait ContextUtil[C <: Quotes & scala.Singleton](val qctx: C, val valStart: Int):
trait ContextUtil[C <: Quotes & scala.Singleton](val valStart: Int):
import qctx.reflect.*
val qctx: C
given qctx.type = qctx
private var counter: Int = valStart - 1

View File

@ -18,7 +18,7 @@ import scala.quoted.*
* code matched using `appTransformer`, which is a generic function with a single type param and a
* single term param like `X.wrapInit[A](...)`.
*/
trait Convert[C <: Quotes & Singleton](override val qctx: C) extends ContextUtil[C]:
trait Convert[C <: Quotes & Singleton] extends ContextUtil[C]:
import qctx.reflect.*
def convert[A: Type](nme: String, in: Term): Converted

View File

@ -27,8 +27,8 @@ object ConvertTestMacro:
convert1.transformWrappers(expr.asTerm, substitute, Symbol.spliceOwner).asExprOf[Boolean]
class InputInitConvert[C <: Quotes & scala.Singleton](override val qctx: C)
extends Convert[C](qctx)
with ContextUtil[C](qctx, 0):
extends Convert[C]
with ContextUtil[C](0):
// with TupleBuilder[C](qctx)
// with TupleNBuilder[C](qctx):
import qctx.reflect.*

View File

@ -60,22 +60,21 @@ sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSo
* the part of the path already written by the user.
*/
class FileExamples(base: File, prefix: String = "") extends ExampleSource {
override def apply(): Stream[String] = files(base).map(_ substring prefix.length)
override def apply(): LazyList[String] = files(base).map(_.substring(prefix.length))
override def withAddedPrefix(addedPrefix: String): FileExamples =
new FileExamples(base, prefix + addedPrefix)
protected def files(directory: File): Stream[String] = {
val childPaths = IO.listFiles(directory).toStream
val prefixedDirectChildPaths = childPaths map { IO.relativize(base, _).get } filter {
_ startsWith prefix
}
val dirsToRecurseInto = childPaths filter { _.isDirectory } map {
IO.relativize(base, _).get
} filter {
dirStartsWithPrefix
}
prefixedDirectChildPaths append dirsToRecurseInto.flatMap(dir => files(new File(base, dir)))
protected def files(directory: File): LazyList[String] = {
val childPaths = LazyList(IO.listFiles(directory)*)
val prefixedDirectChildPaths = childPaths
.map(IO.relativize(base, _).get)
.filter(_.startsWith(prefix))
val dirsToRecurseInto = childPaths
.filter(_.isDirectory)
.map(IO.relativize(base, _).get)
.filter(dirStartsWithPrefix)
prefixedDirectChildPaths ++ dirsToRecurseInto.flatMap(dir => files(new File(base, dir)))
}
private def dirStartsWithPrefix(relativizedPath: String): Boolean =

View File

@ -76,7 +76,7 @@ object HistoryCommands {
val lines = h.lines.toArray
command.foreach(lines(lines.length - 1) = _)
h.path foreach { h =>
IO.writeLines(h, lines)
IO.writeLines(h, lines.toSeq)
}
command.toList.some
}

View File

@ -11,7 +11,7 @@ package complete
import jline.console.ConsoleReader
import jline.console.completer.{ Completer, CompletionHandler }
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
object JLineCompletion {
def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit =
@ -57,7 +57,7 @@ object JLineCompletion {
// always provides dummy completions so that the custom completion handler gets called
// (ConsoleReader doesn't call the handler if there aren't any completions)
// the custom handler will then throw away the candidates and call the custom function
private[this] final object DummyCompletor extends Completer {
private[this] object DummyCompletor extends Completer {
override def complete(
buffer: String,
cursor: Int,

View File

@ -230,7 +230,7 @@ object Parser extends ParserMain:
case Some(av) => success(f(av))
case None =>
a match {
case m: MapParser[_, A] => m.map(f)
case m: MapParser[?, ?] => m.map(f)
case _ => new MapParser(a, f)
}
}

View File

@ -21,7 +21,7 @@ import org.jline.terminal.impl.{ AbstractTerminal, DumbTerminal }
import org.jline.terminal.impl.jansi.JansiSupportImpl
import org.jline.terminal.impl.jansi.win.JansiWinSysTerminal
import org.jline.utils.OSUtils
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import scala.util.Try
import java.util.concurrent.LinkedBlockingQueue

View File

@ -20,7 +20,7 @@ import sbt.internal.util.ConsoleAppender.{
}
import scala.collection.mutable.ArrayBuffer
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
private[sbt] final class ProgressState(
val progressLines: AtomicReference[Seq[String]],
@ -96,7 +96,7 @@ private[sbt] final class ProgressState(
): Unit = {
if (hasProgress) {
val canClearPrompt = currentLineBytes.get.isEmpty
addBytes(terminal, bytes)
addBytes(terminal, bytes.toSeq)
val toWrite = new ArrayBuffer[Byte]
terminal.prompt match {
case a: Prompt.AskUser if a.render().nonEmpty && canClearPrompt => toWrite ++= cleanPrompt

View File

@ -8,7 +8,7 @@
package sbt.internal.util
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
private[sbt] sealed trait Prompt {
def mkPrompt: () => String

View File

@ -146,7 +146,7 @@ object InterfaceUtil {
val get2: A2 = a2
override def toString: String = s"ConcreteT2($a1, $a2)"
override def equals(o: Any): Boolean = o match {
case o: ConcreteT2[A1, A2] =>
case o: ConcreteT2[?, ?] =>
this.get1 == o.get1 &&
this.get2 == o.get2
case _ => false

View File

@ -11,7 +11,7 @@ import sbt.internal.util._
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
/**
* Provides a context for generating loggers during task evaluation. The logger context can be

View File

@ -34,7 +34,7 @@ object Relation {
make(forward filter { case (a, bs) => bs.nonEmpty }, reverse)
}
def merge[A, B](rels: Traversable[Relation[A, B]]): Relation[A, B] =
def merge[A, B](rels: Iterable[Relation[A, B]]): Relation[A, B] =
rels.foldLeft(Relation.empty[A, B])(_ ++ _)
private[sbt] def remove[X, Y](map: M[X, Y], from: X, to: Y): M[X, Y] =
@ -48,7 +48,7 @@ object Relation {
private[sbt] def combine[X, Y](a: M[X, Y], b: M[X, Y]): M[X, Y] =
b.foldLeft(a)((map, mapping) => add(map, mapping._1, mapping._2))
private[sbt] def add[X, Y](map: M[X, Y], from: X, to: Traversable[Y]): M[X, Y] =
private[sbt] def add[X, Y](map: M[X, Y], from: X, to: Iterable[Y]): M[X, Y] =
map.updated(from, get(map, from) ++ to)
private[sbt] def get[X, Y](map: M[X, Y], t: X): Set[Y] = map.getOrElse(t, Set.empty[Y])
@ -83,19 +83,19 @@ trait Relation[A, B] {
def +(a: A, b: B): Relation[A, B]
/** Includes in the relation `(a, b)` for all `b` in `bs`. */
def +(a: A, bs: Traversable[B]): Relation[A, B]
def +(a: A, bs: Iterable[B]): Relation[A, B]
/** Returns the union of the relation `r` with this relation. */
def ++(r: Relation[A, B]): Relation[A, B]
/** Includes the given pairs in this relation. */
def ++(rs: Traversable[(A, B)]): Relation[A, B]
def ++(rs: Iterable[(A, B)]): Relation[A, B]
/** Removes all elements `(_1, _2)` for all `_1` in `_1s` from this relation. */
def --(_1s: Traversable[A]): Relation[A, B]
def --(_1s: Iterable[A]): Relation[A, B]
/** Removes all `pairs` from this relation. */
def --(pairs: TraversableOnce[(A, B)]): Relation[A, B]
def --(pairs: IterableOnce[(A, B)]): Relation[A, B]
/** Removes all `relations` from this relation. */
def --(relations: Relation[A, B]): Relation[A, B]
@ -107,10 +107,10 @@ trait Relation[A, B] {
def -(pair: (A, B)): Relation[A, B]
/** Returns the set of all `_1`s such that `(_1, _2)` is in this relation. */
def _1s: collection.Set[A]
def _1s: Set[A]
/** Returns the set of all `_2`s such that `(_1, _2)` is in this relation. */
def _2s: collection.Set[B]
def _2s: Set[B]
/** Returns the number of pairs in this relation */
def size: Int
@ -131,7 +131,7 @@ trait Relation[A, B] {
def groupBy[K](discriminator: ((A, B)) => K): Map[K, Relation[A, B]]
/** Returns all pairs in this relation. */
def all: Traversable[(A, B)]
def all: Iterable[(A, B)]
/**
* Represents this relation as a `Map` from a `_1` to the set of `_2`s such that `(_1, _2)` is in
@ -168,22 +168,22 @@ private final class MRelation[A, B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]])
def size = (fwd.valuesIterator map (_.size)).sum
def all: Traversable[(A, B)] =
fwd.iterator.flatMap { case (a, bs) => bs.iterator.map(b => (a, b)) }.toTraversable
def all: Iterable[(A, B)] =
fwd.iterator.flatMap { case (a, bs) => bs.iterator.map(b => (a, b)) }.to(Iterable)
def +(pair: (A, B)) = this + (pair._1, Set(pair._2))
def +(from: A, to: B) = this + (from, to :: Nil)
def +(from: A, to: Traversable[B]) =
def +(from: A, to: Iterable[B]) =
if (to.isEmpty) this
else new MRelation(add(fwd, from, to), to.foldLeft(rev)((map, t) => add(map, t, from :: Nil)))
def ++(rs: Traversable[(A, B)]) = rs.foldLeft(this: Relation[A, B]) { _ + _ }
def ++(rs: Iterable[(A, B)]) = rs.foldLeft(this: Relation[A, B]) { _ + _ }
def ++(other: Relation[A, B]) =
new MRelation[A, B](combine(fwd, other.forwardMap), combine(rev, other.reverseMap))
def --(ts: Traversable[A]): Relation[A, B] = ts.foldLeft(this: Relation[A, B]) { _ - _ }
def --(pairs: TraversableOnce[(A, B)]): Relation[A, B] =
pairs.foldLeft(this: Relation[A, B])(_ - _)
def --(ts: Iterable[A]): Relation[A, B] = ts.foldLeft(this: Relation[A, B]) { _ - _ }
def --(pairs: IterableOnce[(A, B)]): Relation[A, B] =
pairs.iterator.foldLeft(this: Relation[A, B])(_ - _)
def --(relations: Relation[A, B]): Relation[A, B] = --(relations.all)
def -(pair: (A, B)): Relation[A, B] =
@ -205,14 +205,14 @@ private final class MRelation[A, B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]])
}
def groupBy[K](discriminator: ((A, B)) => K): Map[K, Relation[A, B]] =
(all.groupBy(discriminator) mapValues { Relation.empty[A, B] ++ _ }).toMap
all.groupBy(discriminator).view.mapValues { Relation.empty[A, B] ++ _ }.toMap
def contains(a: A, b: B): Boolean = forward(a)(b)
override def equals(other: Any) = other match {
// We assume that the forward and reverse maps are consistent, so we only use the forward map
// for equality. Note that key -> Empty is semantically the same as key not existing.
case o: MRelation[A, B] =>
case o: MRelation[?, ?] =>
forwardMap.filterNot(_._2.isEmpty) == o.forwardMap.filterNot(_._2.isEmpty)
case _ => false
}

View File

@ -28,7 +28,7 @@ object ScriptedRunnerImpl {
val context = LoggerContext()
val runner = new ScriptedTests(resourceBaseDirectory, bufferLog, handlersProvider)
val logger = newLogger(context)
val allTests = get(tests, resourceBaseDirectory, logger) flatMap {
val allTests = get(tests.toSeq, resourceBaseDirectory, logger) flatMap {
case ScriptedTest(group, name) =>
runner.scriptedTest(group, name, logger, context)
}
@ -197,7 +197,8 @@ final case class ScriptedTest(group: String, name: String) {
}
object ListTests {
def list(directory: File, filter: java.io.FileFilter) = wrapNull(directory.listFiles(filter))
def list(directory: File, filter: java.io.FileFilter): Seq[File] =
wrapNull(directory.listFiles(filter)).toSeq
}
import ListTests._
final class ListTests(baseDirectory: File, accept: ScriptedTest => Boolean, log: Logger) {

View File

@ -157,8 +157,8 @@ private[sbt] object ForkTests {
acceptorThread.start()
val cpFiles = classpath.map(converter.toPath).map(_.toFile())
val fullCp = cpFiles ++ Seq(
IO.classLocationPath[ForkMain].toFile,
IO.classLocationPath[Framework].toFile
IO.classLocationPath(classOf[ForkMain]).toFile,
IO.classLocationPath(classOf[Framework]).toFile,
)
val options = Seq(
"-classpath",
@ -220,10 +220,11 @@ private final class React(
case t: Throwable =>
log.trace(t); react()
case Array(group: String, tEvents: Array[Event]) =>
val events = tEvents.toSeq
listeners.foreach(_ startGroup group)
val event = TestEvent(tEvents)
val event = TestEvent(events)
listeners.foreach(_ testEvent event)
val suiteResult = SuiteResult(tEvents)
val suiteResult = SuiteResult(events)
results += group -> suiteResult
listeners.foreach(_.endGroup(group, suiteResult.result))
react()

View File

@ -10,7 +10,7 @@ package sbt
import java.io.File
import java.time.OffsetDateTime
import java.util.jar.{ Attributes, Manifest }
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import sbt.io.IO
import sjsonnew.{
@ -122,7 +122,7 @@ object Pkg:
(in: Vector[(HashedVirtualFileRef, String)] :*: VirtualFileRef :*: Seq[PackageOption] :*:
LNil) => Configuration(in.head, in.tail.head, in.tail.tail.head),
)
given JsonFormat[Configuration] = summon[JsonFormat[Configuration]]
given JsonFormat[Configuration] = isolistFormat
end Configuration
/**
@ -158,12 +158,11 @@ object Pkg:
val main = manifest.getMainAttributes
for option <- conf.options do
option match
case PackageOption.JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest); ()
case PackageOption.JarManifest(mergeManifest) => mergeManifests(manifest, mergeManifest)
case PackageOption.MainClass(mainClassName) =>
main.put(Attributes.Name.MAIN_CLASS, mainClassName); ()
case PackageOption.ManifestAttributes(attributes @ _*) => main.asScala ++= attributes; ()
main.put(Attributes.Name.MAIN_CLASS, mainClassName)
case PackageOption.ManifestAttributes(attributes @ _*) => main.asScala ++= attributes
case PackageOption.FixedTimestamp(value) => ()
case _ => log.warn("Ignored unknown package option " + option)
setVersion(main)
manifest

View File

@ -46,19 +46,19 @@ object Sync {
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
outStyle: FileInfo.Style = FileInfo.exists
): Traversable[(File, File)] => Relation[File, File] =
): Iterable[(File, File)] => Relation[File, File] =
sync(store, inStyle)
def sync(
store: CacheStore,
fileConverter: FileConverter
): Traversable[(File, File)] => Relation[File, File] =
): Iterable[(File, File)] => Relation[File, File] =
sync(store, FileInfo.lastModified, fileConverter)
def sync(
store: CacheStore,
inStyle: FileInfo.Style = FileInfo.lastModified,
): Traversable[(File, File)] => Relation[File, File] =
): Iterable[(File, File)] => Relation[File, File] =
sync(store, inStyle, MappedFileConverter.empty)
/** this function ensures that the latest files in /src are also in /target, so that they are synchronised */
@ -66,7 +66,7 @@ object Sync {
store: CacheStore,
inStyle: FileInfo.Style,
fileConverter: FileConverter
): Traversable[(File, File)] => Relation[File, File] =
): Iterable[(File, File)] => Relation[File, File] =
mappings => {
val relation = Relation.empty ++ mappings
noDuplicateTargets(relation)

View File

@ -415,10 +415,8 @@ object Tests {
tasks.join.map(_.foldLeft(Map.empty[String, SuiteResult]) { case (sum, e) =>
val merged = sum.toSeq ++ e.toSeq
val grouped = merged.groupBy(_._1)
grouped
.mapValues(_.map(_._2).foldLeft(SuiteResult.Empty) { case (resultSum, result) =>
resultSum + result
})
grouped.view
.mapValues(_.map(_._2).foldLeft(SuiteResult.Empty)(_ + _))
.toMap
})
}

View File

@ -491,12 +491,10 @@ object BasicCommands {
def runAlias(s: State, args: Option[(String, Option[Option[String]])]): State =
args match {
case None =>
printAliases(s); s
case Some(x ~ None) if !x.isEmpty =>
printAlias(s, x.trim); s
case Some(x ~ None) if !x.isEmpty => printAlias(s, x.trim); s
case Some(name ~ Some(None)) => removeAlias(s, name.trim)
case Some(name ~ Some(Some(value))) => addAlias(s, name.trim, value.trim)
case _ => printAliases(s); s
}
def addAlias(s: State, name: String, value: String): State =
if (Command validID name) {

View File

@ -293,6 +293,6 @@ object Help {
}
trait CommandDefinitions extends (State => State) {
def commands: Seq[Command] = ReflectUtilities.allVals[Command](this).values.toSeq
def commands: Seq[Command] = ReflectUtilities.allValsC(this, classOf[Command]).values.toSeq
def apply(s: State): State = s ++ commands
}

View File

@ -43,8 +43,8 @@ object ApplicationID {
delegate.name,
delegate.version,
delegate.mainClass,
delegate.mainComponents,
delegate.mainComponents.toSeq,
delegate.crossVersionedValue,
delegate.classpathExtra
delegate.classpathExtra.toSeq
)
}

View File

@ -233,10 +233,10 @@ object State {
final class Return(val result: xsbti.MainResult) extends Next
/** Indicates that global logging should be rotated. */
final object ClearGlobalLog extends Next
object ClearGlobalLog extends Next
/** Indicates that the previous log file should be preserved instead of discarded. */
final object KeepLastLog extends Next
object KeepLastLog extends Next
/**
* Provides a list of recently executed commands. The commands are stored as processed instead of as entered by the user.
@ -424,7 +424,7 @@ object State {
.sameElements(rest.map(_.toURI.toURL)) =>
cache.cachedCustomClassloader(
jars.toList,
() => new UncloseableURLLoader(jars, fullScalaLoader)
() => new UncloseableURLLoader(jars.toSeq, fullScalaLoader)
)
()
case _ =>

View File

@ -19,6 +19,7 @@ import sbt.io._
import scala.concurrent.duration._
import scala.util.Properties
import scala.annotation.nowarn
@deprecated("Watched is no longer used to implement continuous execution", "1.3.0")
trait Watched {
@ -52,6 +53,8 @@ trait Watched {
object Watched {
type WatchSource = Source
@nowarn
def terminateWatch(key: Int): Boolean = Watched.isEnter(key)
private def waitMessage(project: String): String =
@ -82,6 +85,7 @@ object Watched {
}
@nowarn
private[sbt] val newWatchService: () => WatchService =
(() => createWatchService()).label("Watched.newWatchService")
def createWatchService(pollDelay: FiniteDuration): WatchService = {

View File

@ -16,7 +16,7 @@ import sbt.internal.util.Terminal
import sbt.protocol.EventMessage
import sbt.util.Level
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
/**
* A command channel represents an IO device such as network socket or human

View File

@ -23,7 +23,7 @@ import xsbti.ScalaProvider
import xsbti.compile.{ ClasspathOptions, ScalaInstance }
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import scala.util.control.NonFatal
private object ClassLoaderCache {
@ -227,7 +227,6 @@ private[sbt] class ClassLoaderCache(
case null =>
case classLoader => close(classLoader)
}
case (_, _) =>
}
delegate.clear()
}

View File

@ -24,6 +24,7 @@ import Util._
import sbt.util.Show
import xsbti.{ HashedVirtualFileRef, VirtualFile }
import sjsonnew.JsonFormat
import scala.reflect.ClassTag
/** A concrete settings system that uses `sbt.Scope` for the scope type. */
object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
@ -396,7 +397,7 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
*/
def unit(a: Any): Unit = ()
private[sbt] def dummy[A: Manifest](name: String, description: String): (TaskKey[A], Task[A]) =
private[sbt] def dummy[A: ClassTag](name: String, description: String): (TaskKey[A], Task[A]) =
(TaskKey[A](name, description, DTask), dummyTask(name))
private[sbt] def dummyTask[T](name: String): Task[T] = {
@ -420,11 +421,8 @@ object Def extends Init[Scope] with TaskMacroExtra with InitializeImplicits:
private[sbt] val (stateKey: TaskKey[State], dummyState: Task[State]) =
dummy[State]("state", "Current build state.")
private[sbt] val (
streamsManagerKey: TaskKey[std.Streams[ScopedKey[_]]],
dummyStreamsManager: Task[std.Streams[ScopedKey[_]]]
) =
Def.dummy[std.Streams[ScopedKey[_]]](
private[sbt] val (streamsManagerKey, dummyStreamsManager) =
Def.dummy[std.Streams[ScopedKey[?]]](
"streams-manager",
"Streams manager, which provides streams for different contexts."
)

View File

@ -17,6 +17,7 @@ import sbt.util.StampedFormat
import sjsonnew.JsonFormat
import scala.util.control.NonFatal
import scala.annotation.nowarn
/**
* Reads the previous value of tasks on-demand. The read values are cached so that they are only read once per task execution.
@ -123,10 +124,12 @@ object Previous {
// We first collect all of the successful tasks and write their scoped key into a map
// along with their values.
val successfulTaskResults = (for
results.TPair(task: Task[?], Result.Value(v)) <- results.toTypedSeq
key <- task.info.attributes.get(Def.taskDefinitionKey).asInstanceOf[Option[AnyTaskKey]]
yield key -> v).toMap
val successfulTaskResults = (
for
case results.TPair(task: Task[?], Result.Value(v)) <- results.toTypedSeq
key <- task.info.attributes.get(Def.taskDefinitionKey).asInstanceOf[Option[AnyTaskKey]]
yield key -> v
).toMap
// We then traverse the successful results and look up all of the referenced values for
// each of these tasks. This can be a many to one relationship if multiple tasks refer
// the previous value of another task. For each reference we find, we check if the task has
@ -168,8 +171,8 @@ object Previous {
.zip(Def.validated(skey, selfRefOk = true))
.zip(Global / references)
.zip(Def.resolvedScoped)
inputs { case (((prevTask, resolved), refs), inTask: ScopedKey[Task[_]] @unchecked) =>
val key = Key(resolved, inTask)
inputs { case (((prevTask, resolved), refs), inTask) =>
val key = Key(resolved, inTask.asInstanceOf[ScopedKey[Task[Any]]])
refs.recordReference(key, format) // always evaluated on project load
prevTask.map(_.get(key)) // evaluated if this task is evaluated
}

View File

@ -18,6 +18,7 @@ import sbt.Def.{ Initialize, ScopedKey, Setting, setting }
import std.TaskMacro
import std.TaskExtra.{ task => mktask, _ }
import scala.reflect.{ ClassTag, ManifestFactory }
import scala.annotation.nowarn
/** An abstraction on top of Settings for build configuration and task definition. */
sealed trait Scoped extends Equals:
@ -79,7 +80,7 @@ sealed abstract class SettingKey[A1]
inline def settingMacro[A](inline a: A): Initialize[A] =
${ std.SettingMacro.settingMacroImpl[A]('a) }
final inline def :=(inline v: A1): Setting[A1] =
final inline def :=(inline v: A1): Setting[A1 @nowarn] =
${ TaskMacro.settingAssignMacroImpl('this, 'v) }
final inline def +=[A2](inline v: A2)(using Append.Value[A1, A2]): Setting[A1] =

View File

@ -15,8 +15,8 @@ import sbt.util.Applicative
import scala.quoted.*
class InputInitConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart: Int)
extends Convert[C](qctx)
with ContextUtil[C](qctx, valStart):
extends Convert[C]
with ContextUtil[C](valStart):
import qctx.reflect.*
override def convert[A: Type](nme: String, in: Term): Converted =
@ -33,8 +33,8 @@ end InputInitConvert
/** Converts an input `Term` of type `Parser[A]` or `State => Parser[A]` into a `Term` of type `State => Parser[A]`. */
class ParserConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart: Int)
extends Convert[C](qctx)
with ContextUtil[C](qctx, valStart):
extends Convert[C]
with ContextUtil[C](valStart):
import qctx.reflect.*
override def convert[A: Type](nme: String, in: Term): Converted =
@ -51,8 +51,8 @@ end ParserConvert
/** Convert instance for plain `Task`s not within the settings system. */
class TaskConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart: Int)
extends Convert[C](qctx)
with ContextUtil[C](qctx, valStart):
extends Convert[C]
with ContextUtil[C](valStart):
import qctx.reflect.*
override def convert[A: Type](nme: String, in: Term): Converted =
if nme == InputWrapper.WrapTaskName then Converted.success(in)
@ -67,8 +67,8 @@ end TaskConvert
* a `Term` of type `Initialize[Task[A]]`.
*/
class FullConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart: Int)
extends Convert[C](qctx)
with ContextUtil[C](qctx, valStart):
extends Convert[C]
with ContextUtil[C](valStart):
import qctx.reflect.*
override def convert[A: Type](nme: String, in: Term): Converted =
@ -103,8 +103,8 @@ end FullConvert
* into a `Term` of type `Initialize[State => Parser[A]]`.
*/
class InitParserConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart: Int)
extends Convert[C](qctx)
with ContextUtil[C](qctx, valStart):
extends Convert[C]
with ContextUtil[C](valStart):
import qctx.reflect.*
override def convert[A: Type](nme: String, in: Term): Converted =

View File

@ -21,8 +21,8 @@ import scala.quoted.*
import sbt.internal.util.complete.Parser
class InitializeConvert[C <: Quotes & scala.Singleton](override val qctx: C, valStart: Int)
extends Convert[C](qctx)
with ContextUtil[C](qctx, valStart):
extends Convert[C]
with ContextUtil[C](valStart):
import qctx.reflect.*
override def convert[A: Type](nme: String, in: Term): Converted =

View File

@ -5,6 +5,7 @@
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.std
import sbt.SettingKey
@ -16,7 +17,6 @@ import sbt.internal.util.Terminal
import scala.io.AnsiColor
import scala.reflect.macros.blackbox
/*
abstract class BaseTaskLinterDSL extends LinterDSL {
def isDynamicTask: Boolean
def convert: Convert

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt
/*
object AppendSpec {
val onLoad = SettingKey[State => State]("onLoad")

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.test
/*
import org.scalacheck.{ Test => _, _ }, Arbitrary.arbitrary, Gen._
import java.io.File

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt
/*
import org.scalatest.flatspec.AnyFlatSpec
import sbt.internal.util.{ AttributeKey, AttributeMap }
import sbt.io.syntax.file

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.test
/*
import org.scalacheck._, Prop._, util.Pretty
import sbt.internal.util.AttributeKey

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.test
/*
import org.scalacheck.{ Test => _, _ }, Prop._
import sbt.SlashSyntax

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.test
/*
import java.io.File
import sjsonnew._, BasicJsonProtocol._
import sbt.Def.{ Setting, inputKey, settingKey, taskKey }

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.std
/*
import org.scalatest.{ TestData, fixture, funsuite }
import sbt.std.TestUtil._

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.std
/*
class TaskPosSpec {
// Starting sbt 1.4.0, Def.task can have task value lookups inside
// if branches since tasks with single if-expressions are automatically

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.std
/*
import org.scalatest.TestData
import scala.tools.reflect.ToolBox

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt.std.neg
/*
import scala.tools.reflect.ToolBoxError
import org.scalatest.{ TestData, fixture, funsuite }
import sbt.std.{ TaskLinterDSLFeedback, TestUtil }

View File

@ -179,6 +179,7 @@ object Cross {
project(k).toSeq.flatMap(crossVersions(extracted, _).map(v => v -> k))
}
.groupBy(_._1)
.view
.mapValues(_.map(_._2).toSet)
val commandsByVersion = keysByVersion.toSeq
.flatMap { case (v, keys) =>
@ -188,7 +189,7 @@ object Cross {
if (p == extracted.currentRef || !projects.contains(extracted.currentRef)) {
val parts =
project(k).map(_.project) ++ k.scope.config.toOption.map { case ConfigKey(n) =>
n.head.toUpper + n.tail
s"${n.head.toUpper}${n.tail}"
} ++ k.scope.task.toOption.map(_.label) ++ Some(k.key.label)
Some(v -> parts.mkString("", "/", fullArgs))
} else None
@ -196,6 +197,7 @@ object Cross {
}
}
.groupBy(_._1)
.view
.mapValues(_.map(_._2))
.toSeq
.sortBy(_._1)

View File

@ -899,7 +899,7 @@ object Defaults extends BuildCommon {
) ++
configGlobal ++ defaultCompileSettings ++ compileAnalysisSettings ++ Seq(
compileOutputs := {
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
val c = fileConverter.value
val classFiles =
manipulateBytecode.value.analysis.readStamps.getAllProductStamps.keySet.asScala
@ -1236,8 +1236,8 @@ object Defaults extends BuildCommon {
makeScalaInstance(
dummy.version,
dummy.libraryJars,
dummy.compilerJars,
dummy.allJars,
dummy.compilerJars.toSeq,
dummy.allJars.toSeq,
state.value,
scalaInstanceTopLoader.value,
)
@ -1264,7 +1264,7 @@ object Defaults extends BuildCommon {
loadedTestFrameworks := {
val loader = testLoader.value
val log = streams.value.log
testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x)).toIterable).toMap
testFrameworks.value.flatMap(f => f.create(loader, log).map(x => (f, x))).toMap
},
definedTests := detectTests.value,
definedTestNames := (definedTests map (_.map(
@ -2449,7 +2449,7 @@ object Defaults extends BuildCommon {
}
val map = managedFileStampCache.value
val analysis = analysisResult.analysis
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
analysis.readStamps.getAllProductStamps.asScala.foreach { case (f: VirtualFileRef, s) =>
map.put(c.toPath(f), sbt.nio.FileStamp.fromZincStamp(s))
}
@ -4046,7 +4046,7 @@ object Classpaths {
includeDetails = includeDetails,
log = s.log
)
}
}: @nowarn
private[sbt] def dependencyPositionsTask: Initialize[Task[Map[ModuleID, SourcePosition]]] =
Def.task {
@ -4064,7 +4064,7 @@ object Classpaths {
(s.key.key == libraryDependencies.key) &&
(s.key.scope.project == Select(projRef))
}
Map(settings flatMap { case s: Setting[Seq[ModuleID]] @unchecked =>
Map(settings.asInstanceOf[Seq[Setting[Seq[ModuleID]]]].flatMap { s =>
s.init.evaluate(empty) map { _ -> s.pos }
}: _*)
} catch {
@ -4394,11 +4394,11 @@ object Classpaths {
if cond then
Def.task {
val converter = fileConverter.value
(scalaInstance.value.libraryJars: Seq[File])
scalaInstance.value.libraryJars.toSeq
.map(_.toPath)
.map(converter.toVirtualFile)
}
else Def.task { (Nil: Seq[HashedVirtualFileRef]) }
else Def.task { Seq.empty[HashedVirtualFileRef] }
}
import DependencyFilter._

View File

@ -41,6 +41,7 @@ import scala.concurrent.ExecutionContext
import scala.concurrent.duration.Duration
import scala.reflect.ClassTag
import scala.util.control.NonFatal
import scala.util.boundary
/** This class is the entry point for sbt. */
final class xMain extends xsbti.AppMain:
@ -59,7 +60,7 @@ private[sbt] object xMain:
override def provider: AppProvider = config.provider()
}
private[sbt] def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = {
private[sbt] def run(configuration: xsbti.AppConfiguration): xsbti.MainResult = boundary {
try {
import BasicCommandStrings.{ DashDashClient, DashDashServer, runEarly }
import BasicCommands.early
@ -79,7 +80,7 @@ private[sbt] object xMain:
lazy val isServer = !userCommands.exists(c => isBsp(c) || isClient(c))
// keep this lazy to prevent project directory created prematurely
lazy val bootServerSocket = if (isServer) getSocketOrExit(configuration) match {
case (_, Some(e)) => return e
case (_, Some(e)) => boundary.break(e)
case (s, _) => s
}
else None
@ -570,7 +571,7 @@ object BuiltinCommands {
val app = s.configuration.provider
val classpath = app.mainClasspath ++ app.scalaProvider.jars
val result = Load
.mkEval(classpath.map(_.toPath()), s.baseDir, Nil)
.mkEval(classpath.map(_.toPath()).toSeq, s.baseDir, Nil)
.evalInfer(expression = arg, imports = EvalImports(Nil))
s.log.info(s"ans: ${result.tpe} = ${result.getValue(app.loader)}")
}
@ -1146,7 +1147,7 @@ object BuiltinCommands {
if (SysProp.allowRootDir) ()
else {
val baseDir = state.baseDir
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
// this should return / on Unix and C:\ for Windows.
val rootOpt = FileSystems.getDefault.getRootDirectories.asScala.toList.headOption
rootOpt foreach { root =>

View File

@ -373,7 +373,7 @@ trait ProjectExtra extends Scoped.Syntax:
private[this] def overlappingTargets(
targets: Seq[(ProjectRef, File)]
): Map[File, Seq[ProjectRef]] =
targets.groupBy(_._2).filter(_._2.size > 1).mapValues(_.map(_._1)).toMap
targets.groupBy(_._2).view.filter(_._2.size > 1).mapValues(_.map(_._1)).toMap
private[this] def allTargets(data: Settings[Scope]): Seq[(ProjectRef, File)] = {
import ScopeFilter._

View File

@ -367,7 +367,7 @@ object RemoteCache {
List((packageCache / remoteCacheArtifact).value)
},
pullRemoteCache := {
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
val log = streams.value.log
val r = remoteCacheResolvers.value.head
val p = remoteCacheProjectId.value

View File

@ -136,7 +136,7 @@ object ScriptedPlugin extends AutoPlugin {
val p = f.getParentFile
(p.getParentFile.getName, p.getName)
}
val pairMap = pairs.groupBy(_._1).mapValues(_.map(_._2).toSet)
val pairMap = pairs.groupBy(_._1).view.mapValues(_.map(_._2).toSet)
val id = charClass(c => !c.isWhitespace && c != '/', "not whitespace and not '/'").+.string
val groupP = token(id.examples(pairMap.keySet.toSet)) <~ token('/')

View File

@ -36,7 +36,7 @@ import sbt.librarymanagement.ivy.{
}
import sbt.ProjectExtra.transitiveInterDependencies
import sbt.ScopeFilter.Make._
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
object CoursierInputsTasks {
private def coursierProject0(
@ -186,7 +186,7 @@ object CoursierInputsTasks {
CProject(
module,
v.getModuleRevisionId.getRevision,
deps,
deps.toSeq,
configurations,
Nil,
None,

View File

@ -340,7 +340,7 @@ object LMCoursier {
()
}
}
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
(ThisBuild / Keys.credentials).value foreach registerCredentials
(LocalRootProject / Keys.credentials).value foreach registerCredentials
Keys.credentials.value foreach registerCredentials

View File

@ -601,9 +601,9 @@ object Act {
sealed trait ParsedAxis[+T] {
final def isExplicit = this != Omitted
}
final object ParsedGlobal extends ParsedAxis[Nothing]
final object ParsedZero extends ParsedAxis[Nothing]
final object Omitted extends ParsedAxis[Nothing]
object ParsedGlobal extends ParsedAxis[Nothing]
object ParsedZero extends ParsedAxis[Nothing]
object Omitted extends ParsedAxis[Nothing]
final class ParsedValue[T](val value: T) extends ParsedAxis[T]
def value[T](t: Parser[T]): Parser[ParsedAxis[T]] = t map { v =>
new ParsedValue(v)

View File

@ -22,13 +22,13 @@ object AddSettings {
private[sbt] final class Sequence(val sequence: Seq[AddSettings]) extends AddSettings {
override def toString: String = s"Sequence($sequence)"
}
private[sbt] final object User extends AddSettings
private[sbt] object User extends AddSettings
private[sbt] final class AutoPlugins(val include: AutoPlugin => Boolean) extends AddSettings
private[sbt] final class DefaultSbtFiles(val include: VirtualFile => Boolean) extends AddSettings
// private[sbt] final class SbtFiles(val files: Seq[File]) extends AddSettings {
// override def toString: String = s"SbtFiles($files)"
// }
private[sbt] final object BuildScalaFiles extends AddSettings
private[sbt] object BuildScalaFiles extends AddSettings
/** Adds all settings from autoplugins. */
val autoPlugins: AddSettings =

View File

@ -79,7 +79,7 @@ object Aggregation {
val success = results match
case Result.Value(_) => true
case Result.Inc(_) => false
results.toEither.right.foreach { r =>
results.toEither.foreach { r =>
if (show.taskValues) printSettings(r, show.print)
}
if (show.success && !state.get(suppressShow).getOrElse(false))

View File

@ -20,7 +20,7 @@ import xsbti.HashedVirtualFileRef
trait BuildDef {
def projectDefinitions(@deprecated("unused", "") baseDirectory: File): Seq[Project] = projects
def projects: Seq[Project] =
CompositeProject.expand(ReflectUtilities.allVals[CompositeProject](this).values.toSeq)
CompositeProject.expand(ReflectUtilities.allValsC(this, classOf[CompositeProject]).values.toSeq)
// TODO: Should we grab the build core settings here or in a plugin?
def settings: Seq[Setting[_]] = Defaults.buildCore
def buildLoaders: Seq[BuildLoader.Components] = Nil

View File

@ -293,7 +293,7 @@ final class PartBuildUnit(
) extends BuildUnitBase {
def resolve(f: Project => ResolvedProject): LoadedBuildUnit =
new LoadedBuildUnit(unit, defined.mapValues(f).toMap, rootProjects, buildSettings)
new LoadedBuildUnit(unit, defined.view.mapValues(f).toMap, rootProjects, buildSettings)
def resolveRefs(f: ProjectReference => ProjectRef): LoadedBuildUnit = resolve(_ resolve f)
}
@ -371,7 +371,8 @@ object BuildStreams {
case _ => Nil
}
def showAMap(a: AttributeMap): String =
a.entries.toStream
a.entries
.to(LazyList)
.sortBy(_.key.label)
.flatMap {
// The Previous.scopedKeyAttribute is an implementation detail that allows us to get a

View File

@ -22,7 +22,7 @@ import sbt.librarymanagement.{ Configuration, TrackLevel }
import sbt.librarymanagement.Configurations.names
import sbt.std.TaskExtra._
import sbt.util._
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import xsbti.{ HashedVirtualFileRef, VirtualFileRef }
import xsbti.compile.CompileAnalysis
@ -411,7 +411,7 @@ private[sbt] object ClasspathImpl {
depConfs: Seq[String],
default: String => Seq[String]
): String => Seq[String] =
union(confString.split(";") map parseSingleMapping(masterConfs, depConfs, default))
union(confString.split(";").map(parseSingleMapping(masterConfs, depConfs, default)).toSeq)
def parseSingleMapping(
masterConfs: Seq[String],

View File

@ -34,5 +34,4 @@ object CompileInputs2:
CompileInputs2(in.head, in.tail.head, in.tail.tail.head, in.tail.tail.tail.head)
}
)
given JsonFormat[CompileInputs2] = summon
end CompileInputs2

View File

@ -46,6 +46,7 @@ import scala.concurrent.duration.FiniteDuration.FiniteDurationIsOrdered
import scala.concurrent.duration._
import scala.util.{ Failure, Success, Try }
import scala.util.control.NonFatal
import scala.annotation.nowarn
/**
* Provides the implementation of the `~` command and `watch` task. The implementation is quite
@ -236,7 +237,6 @@ private[sbt] object Continuous extends DeprecatedContinuous {
throw new IllegalStateException(msg)
}
}
case _ => Nil: Seq[ScopedKey[_]]
}
}
impl(command)
@ -397,6 +397,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
}
}
@nowarn
private def getOnStart(
project: ProjectRef,
commands: Seq[String],

View File

@ -327,7 +327,7 @@ private[sbt] object CrossJava {
// We have a key, we're likely to be able to cross build this using the per project behaviour.
// Group all the projects by scala version
projVersions.groupBy(_._2).mapValues(_.map(_._1)).toSeq.flatMap {
projVersions.groupBy(_._2).view.mapValues(_.map(_._1)).toSeq.flatMap {
case (version, Seq(project)) =>
// If only one project for a version, issue it directly
Seq(s"$JavaSwitchCommand $verbose $version", s"$project/$aggCommand")

View File

@ -183,14 +183,12 @@ private[sbt] abstract class AbstractBackgroundJobService extends BackgroundJobSe
override def shutdown(): Unit = {
val deadline = 10.seconds.fromNow
while (jobSet.nonEmpty && !deadline.isOverdue) {
jobSet.headOption.foreach {
case handle: ThreadJobHandle @unchecked =>
if (handle.job.isRunning()) {
handle.job.shutdown()
handle.job.awaitTerminationTry(10.seconds)
}
jobSet = jobSet - handle
case _ => //
jobSet.headOption.foreach { case handle: ThreadJobHandle @unchecked =>
if (handle.job.isRunning()) {
handle.job.shutdown()
handle.job.awaitTerminationTry(10.seconds)
}
jobSet = jobSet - handle
}
}
pool.close()

View File

@ -12,7 +12,7 @@ import java.lang.management.ManagementFactory
import java.util.concurrent.LinkedBlockingQueue
import java.util.concurrent.atomic.AtomicReference
import scala.concurrent.duration._
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import scala.util.Try
import sbt.util.Logger
@ -61,7 +61,7 @@ class GCMonitor(logger: Logger) extends GCMonitorBase with AutoCloseable {
override protected def emitWarning(total: Long, over: Option[Long]): Unit = {
val totalSeconds = total / 1000.0
val amountMsg = over.fold(totalSeconds + " seconds") { d =>
val amountMsg = over.fold(s"$totalSeconds seconds") { d =>
"In the last " + (d / 1000.0).ceil.toInt + f" seconds, $totalSeconds (${total.toDouble / d * 100}%.1f%%)"
}
val msg = s"$amountMsg were spent in GC. " +

View File

@ -23,7 +23,7 @@ private[sbt] final class GroupedAutoPlugins(
private[sbt] object GroupedAutoPlugins {
private[sbt] def apply(units: Map[URI, LoadedBuildUnit]): GroupedAutoPlugins = {
val byBuild: Map[URI, Seq[AutoPlugin]] =
units.mapValues(unit => unit.projects.flatMap(_.autoPlugins).toSeq.distinct).toMap
units.view.mapValues(unit => unit.projects.flatMap(_.autoPlugins).toSeq.distinct).toMap
val all: Seq[AutoPlugin] = byBuild.values.toSeq.flatten.distinct
new GroupedAutoPlugins(all, byBuild)
}

View File

@ -15,7 +15,7 @@ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference }
import sbt.io.IO
import sbt.util.Logger
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
/**
* This classloader doesn't load any classes. It is able to create a two layer bundled ClassLoader
@ -141,7 +141,7 @@ private[internal] class NativeLookup extends NativeLoader {
private[this] def findLibrary0(name: String): String = {
val mappedName = System.mapLibraryName(name)
val search = searchPaths.toStream flatMap relativeLibrary(mappedName)
val search = searchPaths.to(LazyList).flatMap(relativeLibrary(mappedName))
search.headOption.map(copy).orNull
}

View File

@ -21,8 +21,8 @@ import sbt.io.IO
import sbt.io.syntax._
import sbt.ProjectExtra.*
import sjsonnew.JsonFormat
import scala.compat.Platform.EOL
import scala.concurrent.duration.FiniteDuration
import scala.annotation.nowarn
private[sbt] object LibraryManagement {
implicit val linter: sbt.dsl.LinterLevel.Ignore.type = sbt.dsl.LinterLevel.Ignore
@ -92,7 +92,7 @@ private[sbt] object LibraryManagement {
|| assumedEvictionErrorLevel != Level.Error
) Nil
else evictionError.toAssumedLines)
if (errorLines.nonEmpty) sys.error((errorLines ++ extraLines).mkString(EOL))
if (errorLines.nonEmpty) sys.error((errorLines ++ extraLines).mkString(System.lineSeparator))
else {
if (evictionError.incompatibleEvictions.isEmpty) ()
else evictionError.lines.foreach(log.log(evictionLevel, _: String))
@ -244,6 +244,7 @@ private[sbt] object LibraryManagement {
* Resolves and optionally retrieves classified artifacts, such as javadocs and sources,
* for dependency definitions, transitively.
*/
@nowarn
def updateClassifiersTask: Def.Initialize[Task[UpdateReport]] =
TupleWrap[
(
@ -417,15 +418,17 @@ private[sbt] object LibraryManagement {
default = Map.empty[ModuleID, Vector[ConfigRef]]
)
val report = f(excludes)
val allExcludes: Map[ModuleID, Vector[ConfigRef]] = excludes ++ IvyActions
.extractExcludes(report)
.mapValues(cs => cs.map(c => ConfigRef(c)).toVector)
val allExcludes: Map[ModuleID, Vector[ConfigRef]] = excludes ++
IvyActions
.extractExcludes(report)
.view
.mapValues(cs => cs.map(c => ConfigRef(c)).toVector)
store.write(allExcludes)
IvyActions
.addExcluded(
report,
classifiers.toVector,
allExcludes.mapValues(_.map(_.name).toSet).toMap
allExcludes.view.mapValues(_.map(_.name).toSet).toMap
)
}
}

View File

@ -686,7 +686,7 @@ private[sbt] object Load {
val resolve = (_: Project).resolve(ref => Scope.resolveProjectRef(uri, rootProject, ref))
new LoadedBuildUnit(
unit.unit,
unit.defined.mapValues(resolve).toMap,
unit.defined.view.mapValues(resolve).toMap,
unit.rootProjects,
unit.buildSettings
)
@ -1441,11 +1441,9 @@ private[sbt] object Load {
// Load only the dependency classpath for the common plugin classloader
val loader = manager.loader
loader.add(
sbt.io.Path.toURLs(
data(dependencyClasspath)
.map(converter.toPath)
.map(_.toFile())
)
sbt.io.Path
.toURLs(data(dependencyClasspath).map(converter.toPath).map(_.toFile()))
.toSeq
)
loader
// Load the definition classpath separately to avoid conflicts, see #511.

View File

@ -261,11 +261,9 @@ object LogManager {
// val execId: Option[String] = execOpt flatMap { _.execId }
val log = context.logger(loggerName, channelName, None)
context.clearAppenders(loggerName)
val consoleOpt = consoleLocally(state, console) map {
case a: Appender =>
a.setTrace(screenTrace)
a
case a => a
val consoleOpt = consoleLocally(state, console).map { a =>
a.setTrace(screenTrace)
a
}
consoleOpt.foreach(a => context.addAppender(loggerName, a -> screenLevel))
context.addAppender(loggerName, relay -> backingLevel)

View File

@ -133,7 +133,7 @@ object PluginDiscovery:
loader: ClassLoader,
resourceName: String
): Seq[String] =
import collection.JavaConverters._
import scala.jdk.CollectionConverters.*
loader
.getResources(resourceName)
.asScala

View File

@ -172,7 +172,7 @@ private[sbt] object PluginsDebug {
def definesPlugin(p: ResolvedProject): Boolean = p.autoPlugins.contains(plugin)
def projectForRef(ref: ProjectRef): ResolvedProject = get(ref / Keys.thisProject)
val perBuild: Map[URI, Set[AutoPlugin]] =
structure.units.mapValues(unit => availableAutoPlugins(unit).toSet).toMap
structure.units.view.mapValues(unit => availableAutoPlugins(unit).toSet).toMap
val pluginsThisBuild = perBuild.getOrElse(currentRef.build, Set.empty).toList
lazy val context = Context(
currentProject.plugins,

View File

@ -229,7 +229,7 @@ object SessionSettings:
val (_, oldShifted, replace) = inFile.foldLeft((0, List[Setting[_]](), Seq[SessionSetting]())) {
case ((offs, olds, repl), s) =>
val RangePosition(_, r @ LineRange(start, end)) = s.pos
val RangePosition(_, r @ LineRange(start, end)) = s.pos: @unchecked
settings find (_._1.key == s.key) match {
case Some(ss @ (ns, newLines)) if !ns.init.dependencies.contains(ns.key) =>
val shifted = ns withPos RangePosition(

View File

@ -333,7 +333,7 @@ private[sbt] object SettingCompletions {
else if (showDescriptions) {
val withDescriptions = in map { case (id, key) => (id, description(key)) }
val padded = CommandUtil.aligned("", " ", withDescriptions)
(padded, in).zipped.map { case (line, (id, _)) =>
padded.zip(in).map { case (line, (id, _)) =>
Completion.tokenDisplay(append = appendString(id), display = line + "\n")
}
} else

View File

@ -13,7 +13,7 @@ import java.util.concurrent.{ RejectedExecutionException, TimeUnit }
import sbt.internal.util._
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import scala.concurrent.duration._
import java.util.concurrent.{ ConcurrentHashMap, Executors, TimeoutException }
import sbt.util.Logger

View File

@ -44,7 +44,7 @@ object VirtualFileValueCache {
)(f)
}
def make[A](stamp: VirtualFile => XStamp)(f: VirtualFile => A): VirtualFileValueCache[A] =
new VirtualFileValueCache0[A](stamp, f)
new VirtualFileValueCache0[A](stamp, f)(using Equiv.universal)
}
private[this] final class VirtualFileValueCache0[A](

View File

@ -91,12 +91,12 @@ private[sbt] object WatchTransitiveDependencies {
(extracted, compiledMap, st, rs)
}
.flatMapTask { case (extracted, compiledMap, st, rs) =>
st.currentCommand.map(_.commandLine) match
case Some(ShowTransitive(key)) =>
st.currentCommand.get.commandLine match
case ShowTransitive(key) =>
Parser.parse(key.trim, Act.scopedKeyParser(st)) match
case Right(scopedKey) => argumentsImpl(scopedKey, extracted, compiledMap)
case _ => argumentsImpl(rs, extracted, compiledMap)
case Some(_) => argumentsImpl(rs, extracted, compiledMap)
case _ => argumentsImpl(rs, extracted, compiledMap)
}
private[sbt] def transitiveDynamicInputs(args: Arguments): Seq[DynamicInput] = {

View File

@ -11,7 +11,7 @@ package graph
package backend
import scala.language.implicitConversions
import scala.language.reflectiveCalls
import scala.reflect.Selectable.reflectiveSelectable
import sbt.librarymanagement.{ ModuleID, ModuleReport, ConfigurationReport }
object SbtUpdateReport {

View File

@ -11,7 +11,7 @@ package graph
import java.io.File
import sjsonnew._
import scala.collection.mutable.{ HashMap, MultiMap, Set }
import scala.collection.mutable
private[sbt] case class GraphModuleId(
organization: String,
@ -97,12 +97,12 @@ private[sbt] case class ModuleGraph(nodes: Seq[Module], edges: Seq[Edge]) {
def createMap(
bindingFor: ((GraphModuleId, GraphModuleId)) => (GraphModuleId, GraphModuleId)
): Map[GraphModuleId, Seq[Module]] = {
val m = new HashMap[GraphModuleId, Set[Module]] with MultiMap[GraphModuleId, Module]
val map = mutable.Map.empty[GraphModuleId, mutable.Set[Module]]
edges.foreach { entry =>
val (f, t) = bindingFor(entry)
module(t).foreach(m.addBinding(f, _))
module(t).foreach { m => map.getOrElseUpdate(f, mutable.Set.empty) += m }
}
m.toMap.mapValues(_.toSeq.sortBy(_.id.idString)).toMap.withDefaultValue(Nil)
map.view.mapValues(_.toSeq.sortBy(_.id.idString)).toMap.withDefaultValue(Nil)
}
def roots: Seq[Module] =

View File

@ -24,7 +24,7 @@ import sbt.Keys.{
}
import sbt.ProjectExtra.*
import sbt.librarymanagement.PublishConfiguration
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import scala.xml.{ Node, PrefixedAttribute }
object IvyXml {
@ -135,6 +135,7 @@ object IvyXml {
val publications = project.publications
.groupBy { case (_, p) => p }
.view
.mapValues { _.map { case (cfg, _) => cfg } }
val publicationElems = publications.map { case (pub, configs) =>

View File

@ -56,7 +56,7 @@ object BspCompileTask {
targetId: BuildTargetIdentifier,
elapsedTimeMillis: Long
): CompileReport = {
val countBySeverity = problems.groupBy(_.severity()).mapValues(_.size)
val countBySeverity = problems.groupBy(_.severity).view.mapValues(_.size)
val warnings = countBySeverity.getOrElse(Severity.Warn, 0)
val errors = countBySeverity.getOrElse(Severity.Error, 0)
CompileReport(targetId, None, errors, warnings, Some(elapsedTimeMillis.toInt))
@ -79,15 +79,11 @@ case class BspCompileTask private (
}
private[sbt] def notifySuccess(result: CompileResult): Unit = {
import collection.JavaConverters._
import scala.jdk.CollectionConverters.*
val endTimeMillis = System.currentTimeMillis()
val elapsedTimeMillis = endTimeMillis - startTimeMillis
val problems = result match {
case compileResult: CompileResult =>
val sourceInfos = compileResult.analysis().readSourceInfos().getAllSourceInfos.asScala
sourceInfos.values.flatMap(_.getReportedProblems).toSeq
case _ => Seq()
}
val sourceInfos = result.analysis().readSourceInfos().getAllSourceInfos.asScala
val problems = sourceInfos.values.flatMap(_.getReportedProblems).toSeq
val report = compileReport(problems, targetId, elapsedTimeMillis)
val params = TaskFinishParams(
id,

View File

@ -24,7 +24,7 @@ import xsbti.{
Position => XPosition
}
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import scala.collection.mutable
sealed trait BuildServerReporter extends Reporter {

View File

@ -13,7 +13,7 @@ import java.net.URI
import java.nio.file._
import scala.annotation.{ nowarn, tailrec }
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
import scala.concurrent.{ ExecutionContext, Future }
import scala.reflect.NameTransformer
import scala.util.matching.Regex

View File

@ -667,7 +667,7 @@ final class NetworkChannel(
new Terminal.WriteableInputStream(inputStream, name)
import sjsonnew.BasicJsonProtocol._
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
private[this] val outputBuffer = new LinkedBlockingQueue[Byte]
private[this] val flushExecutor = Executors.newSingleThreadScheduledExecutor(r =>
new Thread(r, s"$name-output-buffer-timer-thread")
@ -799,13 +799,14 @@ final class NetworkChannel(
}
private[this] def waitForPending(f: TerminalPropertiesResponse => Boolean): Boolean = {
if (closed.get || !isAttached) false
withThread(
{
if (pending.get) pending.synchronized(pending.wait())
Option(properties.get).map(f).getOrElse(false)
},
false
)
else
withThread(
{
if (pending.get) pending.synchronized(pending.wait())
Option(properties.get).map(f).getOrElse(false)
},
false
)
}
private[this] val blockedThreads = ConcurrentHashMap.newKeySet[Thread]
override private[sbt] val progressState: ProgressState = new ProgressState(

View File

@ -26,7 +26,7 @@ object SettingQuery {
// Similar to Act.ParsedAxis / Act.projectRef / Act.resolveProject except you can't omit the project reference
sealed trait ParsedExplicitAxis[+T]
final object ParsedExplicitGlobal extends ParsedExplicitAxis[Nothing]
object ParsedExplicitGlobal extends ParsedExplicitAxis[Nothing]
final class ParsedExplicitValue[T](val value: T) extends ParsedExplicitAxis[T]
def explicitValue[T](t: Parser[T]): Parser[ParsedExplicitAxis[T]] = t map { v =>
new ParsedExplicitValue(v)

View File

@ -85,7 +85,7 @@ object VirtualTerminal {
queue
}
private[sbt] def cancelRequests(name: String): Unit = {
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
pendingTerminalCapabilities.asScala.foreach {
case (k @ (`name`, _), q) =>
pendingTerminalCapabilities.remove(k)

View File

@ -4,7 +4,7 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt
import sbt.Def.{ ScopedKey, displayFull, displayMasked }
@ -16,7 +16,6 @@ import hedgehog._
import hedgehog.core.{ ShrinkLimit, SuccessCount }
import hedgehog.runner._
/*
/**
* Tests that the scoped key parser in Act can correctly parse a ScopedKey converted by Def.show*Key.
* This includes properly resolving omitted components.

View File

@ -4,7 +4,7 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt
import java.net.URI
@ -18,7 +18,6 @@ import sbt.librarymanagement.Configuration
import hedgehog._
import hedgehog.runner._
/*
object ParserSpec extends Properties {
override def tests: List[Test] =
List(

View File

@ -4,10 +4,9 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt
/*
import java.io._
import sbt.internal._

View File

@ -97,7 +97,7 @@ abstract class TestBuild {
// task axis of Scope is set to Zero and the value of the second map is the original task axis
val taskAxesMappings =
for ((scope, keys) <- data.data.toIterable; key <- keys.keys)
for ((scope, keys) <- data.data; key <- keys.keys)
yield (ScopedKey(scope.copy(task = Zero), key), scope.task): (
ScopedKey[_],
ScopeAxis[AttributeKey[_]]

View File

@ -4,7 +4,7 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package testpkg
import java.net.URI
@ -19,7 +19,6 @@ import hedgehog._
import hedgehog.runner._
import _root_.sbt.internal.util.complete.Parser
/*
object CompletionSpec extends Properties {
override def tests: List[Test] =
List(

View File

@ -32,8 +32,8 @@ object ForkTest extends Properties("Fork") {
lazy val genRelClasspath = nonEmptyListOf(path)
lazy val requiredEntries =
IO.classLocationPath[scala.Option[_]].toFile ::
IO.classLocationPath[sbt.exit.type].toFile ::
IO.classLocationPath(classOf[scala.Option[_]]).toFile ::
IO.classLocationPath(classOf[sbt.exit.type]).toFile ::
Nil
lazy val mainAndArgs =
"sbt.exit" ::

View File

@ -43,7 +43,7 @@ package object sbt
def file(s: String): File = new File(s)
def url(s: String): URL = new URL(s)
implicit def fileToRichFile(file: File): sbt.io.RichFile = new sbt.io.RichFile(file)
implicit def filesToFinder(cc: Traversable[File]): sbt.io.PathFinder =
implicit def filesToFinder(cc: Iterable[File]): sbt.io.PathFinder =
sbt.io.PathFinder.strict(cc)
/*
* Provides macro extension methods. Because the extension methods are all macros, no instance

View File

@ -1,5 +1,5 @@
import java.nio.file._
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
val copyTestResources = inputKey[Unit]("Copy the native libraries to the base directory")
val appendToLibraryPath = taskKey[Unit]("Append the base directory to the java.library.path system property")

View File

@ -1,7 +1,7 @@
package sbt
import java.nio.file._
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
object TestMain {
def main(args: Array[String]): Unit = {

View File

@ -3,7 +3,7 @@ ThisBuild / scalaVersion := "2.12.17"
import java.nio.file.Files
import java.nio.file.attribute.FileTime
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
val rewriteIvy = inputKey[Unit]("Rewrite ivy directory")
ThisBuild / useCoursier := false

View File

@ -1,6 +1,6 @@
import java.nio.file.Files
import scala.collection.JavaConverters._
import scala.jdk.CollectionConverters.*
val foo = taskKey[Unit]("foo")
foo := {

View File

@ -4,13 +4,12 @@
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
/*
package sbt
import org.scalatest
import org.scalatest.{ TestData, fixture, funsuite }
/*
import scala.tools.reflect.{ FrontEnd, ToolBoxError }
class IllegalReferenceSpec extends funsuite.FixtureAnyFunSuite with fixture.TestDataFixture {

View File

@ -96,7 +96,7 @@ object RunFromSourceMain {
context
) map exit
catch {
case r: xsbti.FullReload => Some((baseDir, r.arguments()))
case r: xsbti.FullReload => Some((baseDir, r.arguments.toSeq))
case scala.util.control.NonFatal(e) =>
e.printStackTrace(); errorAndExit(e.toString)
}

View File

@ -536,7 +536,7 @@ class ScriptedRunner {
javaCommand = "java",
launchOpts,
prescripted,
RunFromSourceBased(scalaVersion, sbtVersion, classpath),
RunFromSourceBased(scalaVersion, sbtVersion, classpath.toSeq),
instances
)
@ -626,7 +626,7 @@ class ScriptedRunner {
instances
)
private def reportErrors(errors: GenSeq[String]): Unit =
private def reportErrors(errors: Seq[String]): Unit =
if (errors.nonEmpty) sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n")) else ()
def runAll(toRun: Seq[ScriptedTests.TestRunner]): Unit =
@ -696,12 +696,13 @@ private[sbt] final class ListTests(
def filter = DirectoryFilter -- HiddenFileFilter
def listTests: Seq[ScriptedTest] = {
IO.listFiles(baseDirectory, filter) flatMap { group =>
val groupName = group.getName
listTests(group).map(ScriptedTest(groupName, _))
}
}
def listTests: Seq[ScriptedTest] =
IO.listFiles(baseDirectory, filter)
.flatMap { group =>
val groupName = group.getName
listTests(group).map(ScriptedTest(groupName, _))
}
.toSeq
private[this] def listTests(group: File): Set[String] = {
val groupName = group.getName

View File

@ -28,7 +28,7 @@ object Transform:
final class TaskAndValue[T](val task: Task[T], val value: T)
def dummyMap(dummyMap: DummyTaskMap): [A] => TaskId[A] => Option[Task[A]] = {
val pmap = new DelegatingPMap[TaskId, Task](new collection.mutable.ListMap)
val pmap = new DelegatingPMap[TaskId, Task](new collection.mutable.HashMap)
def add[T](dummy: TaskAndValue[T]): Unit = {
pmap(dummy.task) = fromDummyStrict(dummy.task, dummy.value)
}

Some files were not shown because too many files have changed in this diff Show More