remove implicit params. change to using

This commit is contained in:
xuwei-k 2024-11-17 09:26:16 +09:00 committed by kenji yoshida
parent c5b7038f3a
commit d193c990d1
48 changed files with 132 additions and 131 deletions

View File

@ -137,7 +137,7 @@ sealed trait RichParser[A] {
examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false)
/** Converts a Parser returning a Char sequence to a Parser returning a String. */
def string(implicit ev: A <:< Seq[Char]): Parser[String]
def string(using ev: A <:< Seq[Char]): Parser[String]
/**
* Produces a Parser that filters the original parser. If 'f' is not true when applied to the
@ -372,7 +372,7 @@ trait ParserMain {
Parser.examples(a, s, maxNumberOfExamples, removeInvalidExamples)
def filter(f: A => Boolean, msg: String => String): Parser[A] = filterParser(a, f, "", msg)
def string(implicit ev: A <:< Seq[Char]): Parser[String] = map(_.mkString)
def string(using ev: A <:< Seq[Char]): Parser[String] = map(_.mkString)
def flatMap[B](f: A => Parser[B]) = bindParser(a, f)
}

View File

@ -58,7 +58,7 @@ class ManagedLogger(
final def warnEvent[A: JsonFormat: StringTypeTag](event: => A): Unit = logEvent(Level.Warn, event)
final def errorEvent[A: JsonFormat: StringTypeTag](event: => A): Unit =
logEvent(Level.Error, event)
def logEvent[A: JsonFormat](level: Level.Value, event: => A)(implicit
def logEvent[A: JsonFormat](level: Level.Value, event: => A)(using
tag: StringTypeTag[A]
): Unit = {
val v: A = event

View File

@ -84,11 +84,11 @@ object LogWriterTest extends Properties("Log Writer") {
implicit lazy val arbNewLine: Arbitrary[NewLine] = Arbitrary(genNewLine)
implicit lazy val arbLevel: Arbitrary[Level.Value] = Arbitrary(genLevel)
implicit def genLine(implicit logG: Gen[ToLog]): Gen[List[ToLog]] =
implicit def genLine(using logG: Gen[ToLog]): Gen[List[ToLog]] =
for (l <- listOf[ToLog](MaxSegments); last <- logG)
yield (addNewline(last) :: l.filter(!_.content.isEmpty)).reverse
implicit def genLog(implicit content: Arbitrary[String], byChar: Arbitrary[Boolean]): Gen[ToLog] =
implicit def genLog(using content: Arbitrary[String], byChar: Arbitrary[Boolean]): Gen[ToLog] =
for (c <- content.arbitrary; by <- byChar.arbitrary) yield {
assert(c != null)
new ToLog(removeNewlines(c), by)
@ -110,7 +110,7 @@ object LogWriterTest extends Properties("Log Writer") {
l.byCharacter
) // \n will be replaced by a random line terminator for all lines
def listOf[T](max: Int)(implicit content: Arbitrary[T]): Gen[List[T]] =
def listOf[T](max: Int)(using content: Arbitrary[T]): Gen[List[T]] =
Gen.choose(0, max) flatMap (sz => listOfN(sz, content.arbitrary))
}

View File

@ -14,7 +14,7 @@ final case class InterProjectRepository(projects: Seq[Project]) extends Reposito
module: Module,
version: String,
fetch: Repository.Fetch[F]
)(implicit
)(using
F: Monad[F]
): EitherT[F, String, (ArtifactSource, Project)] = {

View File

@ -138,7 +138,7 @@ final class TemporaryInMemoryRepository private (
module: Module,
version: String,
fetch: Repository.Fetch[F]
)(implicit
)(using
F: Monad[F]
): EitherT[F, String, (ArtifactSource, Project)] = {

View File

@ -100,6 +100,6 @@ object IvyRepoSpec extends BaseIvySpecification {
lazy val testIvy = {
val repoUrl = getClass.getResource("/test-ivy-repo")
Resolver.url("Test Repo", repoUrl)(Resolver.ivyStylePatterns)
Resolver.url("Test Repo", repoUrl)(using Resolver.ivyStylePatterns)
}
}

View File

@ -7,7 +7,7 @@ import verify.BasicTestSuite
class ResolverSpec extends BasicTestSuite {
test("Resolver.url") {
Resolver.url("Test Repo", new URI("http://example.com/").toURL)(Resolver.ivyStylePatterns)
Resolver.url("Test Repo", new URI("http://example.com/").toURL)(using Resolver.ivyStylePatterns)
()
}

View File

@ -31,7 +31,7 @@ final class Console(compiler: AnalyzingCompiler) {
cleanupCommands: String,
log: Logger
): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(log)
apply(classpath, options, initialCommands, cleanupCommands)(None, Nil)(using log)
def apply(
classpath: Seq[File],
@ -39,7 +39,7 @@ final class Console(compiler: AnalyzingCompiler) {
loader: ClassLoader,
initialCommands: String,
cleanupCommands: String
)(bindings: (String, Any)*)(implicit log: Logger): Try[Unit] =
)(bindings: (String, Any)*)(using log: Logger): Try[Unit] =
apply(classpath, options, initialCommands, cleanupCommands)(Some(loader), bindings)
def apply(
@ -47,7 +47,7 @@ final class Console(compiler: AnalyzingCompiler) {
options: Seq[String],
initialCommands: String,
cleanupCommands: String
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = {
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(using log: Logger): Try[Unit] = {
apply(classpath, options, initialCommands, cleanupCommands, Terminal.get)(loader, bindings)
}
def apply(
@ -56,7 +56,7 @@ final class Console(compiler: AnalyzingCompiler) {
initialCommands: String,
cleanupCommands: String,
terminal: Terminal
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(implicit log: Logger): Try[Unit] = {
)(loader: Option[ClassLoader], bindings: Seq[(String, Any)])(using log: Logger): Try[Unit] = {
def console0(): Unit =
try {
compiler.console(

View File

@ -72,7 +72,8 @@ object Sync {
noDuplicateTargets(relation)
val currentInfo = relation._1s.map(s => (s, inStyle(s))).toMap
val (previousRelation, previousInfo) = readInfoWrapped(store, fileConverter)(inStyle.format)
val (previousRelation, previousInfo) =
readInfoWrapped(store, fileConverter)(using inStyle.format)
val removeTargets = previousRelation._2s -- relation._2s
def outofdate(source: File, target: File): Boolean =
@ -89,7 +90,7 @@ object Sync {
IO.deleteIfEmpty(cleanDirs)
updates.all.foreach((copy).tupled)
writeInfoVirtual(store, relation, currentInfo, fileConverter)(inStyle.format)
writeInfoVirtual(store, relation, currentInfo, fileConverter)(using inStyle.format)
relation
}
@ -109,7 +110,7 @@ object Sync {
sys.error("Duplicate mappings:" + dups.mkString)
}
implicit def relationFormat[A, B](implicit
implicit def relationFormat[A, B](using
af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]
): JsonFormat[Relation[A, B]] =
@ -145,7 +146,7 @@ object Sync {
store: CacheStore,
relation: Relation[File, File],
info: Map[File, F]
)(implicit infoFormat: JsonFormat[F]): Unit =
)(using infoFormat: JsonFormat[F]): Unit =
given IsoString[File] = fileIsoString
import PathOnlyFormats.given
store.write((relation, info))
@ -155,7 +156,7 @@ object Sync {
relation: Relation[File, File],
info: Map[File, F],
fileConverter: FileConverter
)(implicit infoFormat: JsonFormat[F]): Unit = {
)(using infoFormat: JsonFormat[F]): Unit = {
val virtualRelation: Relation[VirtualFileRef, VirtualFileRef] =
Relation.switch(relation, (f: File) => fileConverter.toVirtualFile(f.toPath))
val virtualInfo: Map[VirtualFileRef, F] = info.map { case (file, fileInfo) =>
@ -176,10 +177,10 @@ object Sync {
type RelationInfo[F] = (Relation[File, File], Map[File, F])
type RelationInfoVirtual[F] = (Relation[VirtualFileRef, VirtualFileRef], Map[VirtualFileRef, F])
def readInfoWrapped[F <: FileInfo](store: CacheStore, fileConverter: FileConverter)(implicit
def readInfoWrapped[F <: FileInfo](store: CacheStore, fileConverter: FileConverter)(using
infoFormat: JsonFormat[F]
): RelationInfo[F] = {
convertFromVirtual(readInfoVirtual(store)(infoFormat), fileConverter)
convertFromVirtual(readInfoVirtual(store)(using infoFormat), fileConverter)
}
def convertFromVirtual[F <: FileInfo](
@ -195,9 +196,9 @@ object Sync {
def readInfo[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
)(using infoFormat: JsonFormat[F]): RelationInfo[F] =
try {
readUncaught[F](store)(infoFormat)
readUncaught[F](store)(using infoFormat)
} catch {
case _: IOException => (Relation.empty[File, File], Map.empty[File, F])
case _: ZipException => (Relation.empty[File, File], Map.empty[File, F])
@ -210,9 +211,9 @@ object Sync {
def readInfoVirtual[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfoVirtual[F] =
)(using infoFormat: JsonFormat[F]): RelationInfoVirtual[F] =
try {
readUncaughtVirtual[F](store)(infoFormat)
readUncaughtVirtual[F](store)(using infoFormat)
} catch {
case _: IOException =>
(Relation.empty[VirtualFileRef, VirtualFileRef], Map.empty[VirtualFileRef, F])
@ -228,14 +229,14 @@ object Sync {
private def readUncaught[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfo[F] =
)(using infoFormat: JsonFormat[F]): RelationInfo[F] =
given IsoString[File] = fileIsoString
import PathOnlyFormats.given
store.read(default = (Relation.empty[File, File], Map.empty[File, F]))
private def readUncaughtVirtual[F <: FileInfo](
store: CacheStore
)(implicit infoFormat: JsonFormat[F]): RelationInfoVirtual[F] = {
)(using infoFormat: JsonFormat[F]): RelationInfoVirtual[F] = {
import sjsonnew.IsoString
implicit def virtualFileRefStringIso: IsoString[VirtualFileRef] =
IsoString.iso[VirtualFileRef](_.toString, VirtualFileRef.of(_))

View File

@ -40,7 +40,7 @@ class CacheIvyTest extends Properties("CacheIvy") {
private def testCache[T: JsonFormat, U](
f: (SingletonCache[T], CacheStore) => U
)(implicit cache: SingletonCache[T]): U = {
)(using cache: SingletonCache[T]): U = {
val store = new InMemoryStore(Converter)
f(cache, store)
}

View File

@ -223,7 +223,7 @@ object Def extends BuildSyntax with Init with InitializeImplicits:
s"Scope cannot be defined for dependency ${k.key.label} of ${definedSettingString(s)}"
)
override def intersect(s1: Scope, s2: Scope)(implicit
override def intersect(s1: Scope, s2: Scope)(using
delegates: Scope => Seq[Scope]
): Option[Scope] =
if (s2 == GlobalScope) Some(s1) // s1 is more specific

View File

@ -153,7 +153,7 @@ object Previous {
)
/** Public as a macro implementation detail. Do not call directly. */
def runtime[T](skey: TaskKey[T])(implicit format: JsonFormat[T]): Initialize[Task[Option[T]]] = {
def runtime[T](skey: TaskKey[T])(using format: JsonFormat[T]): Initialize[Task[Option[T]]] = {
type Inputs = (Task[Previous], ScopedKey[Task[T]], References)
val inputs = (Global / cache, Def.validated(skey, selfRefOk = true), Global / references)
Def.app[Inputs, Task[Option[T]]](inputs) { case (prevTask, resolved, refs) =>
@ -164,7 +164,7 @@ object Previous {
}
/** Public as a macro implementation detail. Do not call directly. */
def runtimeInEnclosingTask[T](skey: TaskKey[T])(implicit
def runtimeInEnclosingTask[T](skey: TaskKey[T])(using
format: JsonFormat[T]
): Initialize[Task[Option[T]]] = {
type Inputs = (Task[Previous], ScopedKey[Task[T]], References, ScopedKey[?])

View File

@ -100,7 +100,7 @@ object TaskMacro:
Expr.summon[JsonFormat[A1]] match
case Some(ev) =>
'{
InputWrapper.`wrapInitTask_\u2603\u2603`[Option[A1]](Previous.runtime[A1]($t)($ev))
InputWrapper.`wrapInitTask_\u2603\u2603`[Option[A1]](Previous.runtime[A1]($t)(using $ev))
}
case _ => report.errorAndAbort(s"JsonFormat[${Type.show[A1]}] missing")

View File

@ -2178,7 +2178,7 @@ object Defaults extends BuildCommon {
def consoleProjectTask =
Def.task {
ConsoleProject(state.value, (consoleProject / initialCommands).value)(streams.value.log)
ConsoleProject(state.value, (consoleProject / initialCommands).value)(using streams.value.log)
println()
}
@ -2201,7 +2201,7 @@ object Defaults extends BuildCommon {
val sc = (task / scalacOptions).value
val ic = (task / initialCommands).value
val cc = (task / cleanupCommands).value
(new Console(compiler))(cpFiles, sc, loader, ic, cc)()(s.log).get
(new Console(compiler))(cpFiles, sc, loader, ic, cc)()(using s.log).get
println()
}
@ -4512,10 +4512,10 @@ object Classpaths {
case "file" =>
// This hackery is to deal suitably with UNC paths on Windows. Once we can assume Java7, Paths should save us from this.
val file = IO.toFile(i.url)
Resolver.file(i.id, file)(patterns)
Resolver.file(i.id, file)(using patterns)
case _ =>
Resolver
.url(i.id, i.url)(patterns)
.url(i.id, i.url)(using patterns)
.withAllowInsecureProtocol(allowInsecureProtocol(i))
}
case p: xsbti.PredefinedRepository =>
@ -4805,7 +4805,7 @@ trait BuildCommon {
def getFromContext[T](task: TaskKey[T], context: ScopedKey[?], s: State): Option[T] =
SessionVar.get(SessionVar.resolveContext(task.scopedKey, context.scope, s), s)
def loadFromContext[T](task: TaskKey[T], context: ScopedKey[?], s: State)(implicit
def loadFromContext[T](task: TaskKey[T], context: ScopedKey[?], s: State)(using
f: JsonFormat[T]
): Option[T] =
SessionVar.load(SessionVar.resolveContext(task.scopedKey, context.scope, s), s)
@ -4813,13 +4813,13 @@ trait BuildCommon {
// intended for use in constructing InputTasks
def loadForParser[P, T](task: TaskKey[T])(
f: (State, Option[T]) => Parser[P]
)(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] =
loadForParserI(task)(Def.value(f))(format)
)(using format: JsonFormat[T]): Initialize[State => Parser[P]] =
loadForParserI(task)(Def.value(f))(using format)
def loadForParserI[P, T](task: TaskKey[T])(
init: Initialize[(State, Option[T]) => Parser[P]]
)(implicit format: JsonFormat[T]): Initialize[State => Parser[P]] =
)(using format: JsonFormat[T]): Initialize[State => Parser[P]] =
Def.setting { (s: State) =>
init.value(s, loadFromContext(task, resolvedScoped.value, s)(format))
init.value(s, loadFromContext(task, resolvedScoped.value, s)(using format))
}
def getForParser[P, T](
@ -4834,8 +4834,8 @@ trait BuildCommon {
}
// these are for use for constructing Tasks
def loadPrevious[T](task: TaskKey[T])(implicit f: JsonFormat[T]): Initialize[Task[Option[T]]] =
Def.task { loadFromContext(task, resolvedScoped.value, state.value)(f) }
def loadPrevious[T](task: TaskKey[T])(using f: JsonFormat[T]): Initialize[Task[Option[T]]] =
Def.task { loadFromContext(task, resolvedScoped.value, state.value)(using f) }
def getPrevious[A](task: TaskKey[A]): Initialize[Task[Option[A]]] =
Def.task { getFromContext(task, resolvedScoped.value, state.value) }

View File

@ -426,7 +426,7 @@ object EvaluateTask {
private def contextDisplay(state: State, highlight: Boolean) =
Project.showContextKey(state, if (highlight) Some(RED) else None)
def suppressedMessage(key: ScopedKey[?])(implicit display: Show[ScopedKey[?]]): String =
def suppressedMessage(key: ScopedKey[?])(using display: Show[ScopedKey[?]]): String =
"Stack trace suppressed. Run 'last %s' for the full log.".format(display.show(key))
def getStreams(key: ScopedKey[?], streams: Streams): TaskStreams =

View File

@ -115,15 +115,15 @@ final case class Extracted(
private def resolve[K <: Scoped.ScopingSetting[K] & Scoped](key: K): K =
Scope.resolveScope(GlobalScope, currentRef.build, rootProject)(key.scope) / key
private def getOrError[T](key: ScopedKey[?], value: Option[T])(implicit
private def getOrError[T](key: ScopedKey[?], value: Option[T])(using
display: Show[ScopedKey[?]]
): T =
value.getOrElse(sys.error(display.show(key) + " is undefined."))
private def getOrError[T](key: ScopedKey[T])(implicit
private def getOrError[T](key: ScopedKey[T])(using
display: Show[ScopedKey[?]]
): T =
getOrError(key, structure.data.get(key))(display)
getOrError(key, structure.data.get(key))(using display)
@deprecated(
"This discards session settings. Migrate to appendWithSession or appendWithoutSession.",

View File

@ -587,7 +587,7 @@ object BuiltinCommands {
val loggerInject = LogManager.settingsLogger(s)
val withLogger = newSession.appendRaw(loggerInject :: Nil)
val show = Project.showContextKey2(newSession)
val newStructure = Load.reapply(withLogger.mergeSettings, structure)(show)
val newStructure = Load.reapply(withLogger.mergeSettings, structure)(using show)
Project.setProject(newSession, newStructure, s)
}

View File

@ -57,7 +57,7 @@ object Opts {
"sonatype-staging",
"https://oss.sonatype.org/service/local/staging/deploy/maven2"
)
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository")(
val mavenLocalFile = Resolver.file("Local Repository", userHome / ".m2" / "repository")(using
Resolver.defaultPatterns
)
val sbtSnapshots = Resolver.bintrayRepo("sbt", "maven-snapshots")

View File

@ -649,7 +649,7 @@ trait ProjectExtra extends Scoped.Syntax:
SessionVar.resolveContext(key, scoped.scope, state),
state,
value
)(f)
)(using f)
)
}
@ -705,7 +705,7 @@ trait ProjectExtra extends Scoped.Syntax:
new Constructor(p)
*/
implicit def classpathDependency[T](p: T)(implicit
implicit def classpathDependency[T](p: T)(using
ev: T => ProjectReference
): ClasspathDependency =
ClasspathDependency(ev(p), None)

View File

@ -47,7 +47,7 @@ sealed trait ProjectMatrix extends CompositeProject {
def dependsOn(deps: MatrixClasspathDep[ProjectMatrixReference]*): ProjectMatrix
/** Adds classpath dependencies on internal or external non-matrix projects. */
def dependsOn(deps: ClasspathDep[ProjectReference]*)(implicit
def dependsOn(deps: ClasspathDep[ProjectReference]*)(using
dummyImplicit: DummyImplicit
): ProjectMatrix
@ -60,7 +60,7 @@ sealed trait ProjectMatrix extends CompositeProject {
/**
* Allows non-matrix projects to be aggregated in a matrix project.
*/
def aggregate(refs: ProjectReference*)(implicit dummyImplicit: DummyImplicit): ProjectMatrix
def aggregate(refs: ProjectReference*)(using dummyImplicit: DummyImplicit): ProjectMatrix
/** Appends settings to the current settings sequence for this project. */
def settings(ss: Def.SettingsDefinition*): ProjectMatrix
@ -401,7 +401,7 @@ object ProjectMatrix {
override def aggregate(refs: ProjectMatrixReference*): ProjectMatrix =
copy(aggregate = (aggregate: Seq[ProjectMatrixReference]) ++ refs)
override def aggregate(refs: ProjectReference*)(implicit
override def aggregate(refs: ProjectReference*)(using
dummyImplicit: DummyImplicit
): ProjectMatrix =
copy(nonMatrixAggregate = (nonMatrixAggregate: Seq[ProjectReference]) ++ refs)
@ -409,7 +409,7 @@ object ProjectMatrix {
override def dependsOn(deps: MatrixClasspathDep[ProjectMatrixReference]*): ProjectMatrix =
copy(dependencies = dependencies ++ deps)
override def dependsOn(deps: ClasspathDep[ProjectReference]*)(implicit
override def dependsOn(deps: ClasspathDep[ProjectReference]*)(using
dummyImplicit: DummyImplicit
) =
copy(nonMatrixDependencies = nonMatrixDependencies ++ deps)

View File

@ -29,14 +29,14 @@ object SessionVar {
}
def emptyMap = Map(IMap.empty)
def persistAndSet[T](key: ScopedKey[Task[T]], state: State, value: T)(implicit
def persistAndSet[T](key: ScopedKey[Task[T]], state: State, value: T)(using
f: JsonFormat[T]
): State = {
persist(key, state, value)(f)
persist(key, state, value)(using f)
set(key, state, value)
}
def persist[T](key: ScopedKey[Task[T]], state: State, value: T)(implicit f: JsonFormat[T]): Unit =
def persist[T](key: ScopedKey[Task[T]], state: State, value: T)(using f: JsonFormat[T]): Unit =
Project.structure(state).streams(state).use(key)(s => s.getOutput(DefaultDataID).write(value))
def clear(s: State): State = s.put(sessionVars, SessionVar.emptyMap)
@ -65,23 +65,23 @@ object SessionVar {
.definingKey(subScoped)
.getOrElse(subScoped)
def read[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
def read[T](key: ScopedKey[Task[T]], state: State)(using f: JsonFormat[T]): Option[T] =
Project.structure(state).streams(state).use(key) { s =>
try {
Some(s.getInput(key, DefaultDataID).read[T]())
} catch { case NonFatal(_) => None }
}
def load[T](key: ScopedKey[Task[T]], state: State)(implicit f: JsonFormat[T]): Option[T] =
get(key, state) orElse read(key, state)(f)
def load[T](key: ScopedKey[Task[T]], state: State)(using f: JsonFormat[T]): Option[T] =
get(key, state) orElse read(key, state)(using f)
def loadAndSet[T](key: ScopedKey[Task[T]], state: State, setIfUnset: Boolean = true)(implicit
def loadAndSet[T](key: ScopedKey[Task[T]], state: State, setIfUnset: Boolean = true)(using
f: JsonFormat[T]
): (State, Option[T]) =
get(key, state) match {
case s: Some[T] => (state, s)
case None =>
read(key, state)(f) match {
read(key, state)(using f) match {
case s @ Some(t) =>
val newState =
if (setIfUnset && get(key, state).isDefined) state else set(key, state, t)

View File

@ -119,7 +119,7 @@ object Act {
): Parser[ParsedKey] =
scopedKeyFull(index, current, defaultConfigs, keyMap, askProject = askProject).flatMap {
choices =>
select(choices, data)(showRelativeKey2(current))
select(choices, data)(using showRelativeKey2(current))
}
def scopedKeyFull(
@ -196,7 +196,7 @@ object Act {
key
)
def select(allKeys: Seq[Parser[ParsedKey]], data: Def.Settings)(implicit
def select(allKeys: Seq[Parser[ParsedKey]], data: Def.Settings)(using
show: Show[ScopedKey[?]]
): Parser[ParsedKey] =
seq(allKeys) flatMap { ss =>
@ -206,7 +206,7 @@ object Act {
selectFromValid(ss filter isValid(data), default)
}
def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])(implicit
def selectFromValid(ss: Seq[ParsedKey], default: Parser[ParsedKey])(using
show: Show[ScopedKey[?]]
): Parser[ParsedKey] =
selectByTask(selectByConfig(ss)) match {
@ -231,7 +231,7 @@ object Act {
def noValidKeys = failure("No such key.")
def showAmbiguous(keys: Seq[ScopedKey[?]])(implicit show: Show[ScopedKey[?]]): String =
def showAmbiguous(keys: Seq[ScopedKey[?]])(using show: Show[ScopedKey[?]]): String =
keys.take(3).map(x => show.show(x)).mkString("", ", ", if (keys.size > 3) ", ..." else "")
def isValid(data: Def.Settings)(parsed: ParsedKey): Boolean = data.contains(parsed.key)

View File

@ -48,7 +48,7 @@ object Aggregation {
success = true
)
def printSettings(xs: Seq[KeyValue[?]], print: String => Unit)(implicit
def printSettings(xs: Seq[KeyValue[?]], print: String => Unit)(using
display: Show[ScopedKey[?]]
): Unit =
xs match {
@ -68,10 +68,10 @@ object Aggregation {
s: State,
ps: Values[Parser[Task[T]]],
show: ShowConfig
)(implicit display: Show[ScopedKey[?]]): Parser[() => State] =
)(using display: Show[ScopedKey[?]]): Parser[() => State] =
Command.applyEffect(seqParser(ps))(ts => runTasks(s, ts, DummyTaskMap(Nil), show))
private def showRun[A](complete: Complete[A], show: ShowConfig)(implicit
private def showRun[A](complete: Complete[A], show: ShowConfig)(using
display: Show[ScopedKey[?]]
): Unit =
import complete.*
@ -187,7 +187,7 @@ object Aggregation {
s: State,
inputs: Values[InputTask[I]],
show: ShowConfig
)(implicit display: Show[ScopedKey[?]]): Parser[() => State] = {
)(using display: Show[ScopedKey[?]]): Parser[() => State] = {
val parsers =
for (KeyValue(k, it) <- inputs)
yield it.parser(s).map(v => KeyValue(k, v))
@ -196,7 +196,7 @@ object Aggregation {
}
}
def evaluatingParser(s: State, show: ShowConfig)(keys: Seq[KeyValue[?]])(implicit
def evaluatingParser(s: State, show: ShowConfig)(keys: Seq[KeyValue[?]])(using
display: Show[ScopedKey[?]]
): Parser[() => State] = {

View File

@ -152,7 +152,7 @@ private[sbt] object Clean {
end ToSeqPath
extension [T](t: T) {
private def toSeqPath(implicit toSeqPath: ToSeqPath[T]): Seq[Path] = toSeqPath(t)
private def toSeqPath(using toSeqPath: ToSeqPath[T]): Seq[Path] = toSeqPath(t)
}
@nowarn

View File

@ -18,7 +18,7 @@ import xsbti.compile.ClasspathOptionsUtil
object ConsoleProject {
def apply(state: State, extra: String, cleanupCommands: String = "", options: Seq[String] = Nil)(
implicit log: Logger
using log: Logger
): Unit = {
val extracted = Project.extract(state)
val cpImports = new Imports(extracted, state)

View File

@ -175,7 +175,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
scopedKey: ScopedKey[?],
compiledMap: CompiledMap,
dynamicInputs: mutable.Set[DynamicInput],
)(implicit extracted: Extracted, logger: Logger): Config = {
)(using extracted: Extracted, logger: Logger): Config = {
// Extract all of the globs that we will monitor during the continuous build.
val inputs = {
@ -247,7 +247,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
state: State,
commands: Seq[String],
dynamicInputs: mutable.Set[DynamicInput],
)(implicit extracted: Extracted, logger: Logger): Seq[Config] = {
)(using extracted: Extracted, logger: Logger): Seq[Config] = {
val commandKeys = commands.map(parseCommand(_, state))
val compiledMap = WatchTransitiveDependencies.compile(extracted.structure)
commandKeys.flatMap(_.map(getConfig(state, _, compiledMap, dynamicInputs)))
@ -317,7 +317,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
isCommand: Boolean,
commands: Seq[String],
fileStampCache: FileStamp.Cache
)(implicit
)(using
extracted: Extracted
): Callbacks = {
val project = extracted.currentRef
@ -442,7 +442,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
commands: Seq[String],
fileStampCache: FileStamp.Cache,
channel: String,
)(implicit extracted: Extracted): (Int => Option[(Watch.Event, Watch.Action)], () => Unit) = {
)(using extracted: Extracted): (Int => Option[(Watch.Event, Watch.Action)], () => Unit) = {
val trackMetaBuild = configs.forall(_.watchSettings.trackMetaBuild)
val buildGlobs =
if (trackMetaBuild) extracted.getOpt((checkBuildSources / fileInputs)).getOrElse(Nil)
@ -751,7 +751,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
state: State,
terminal: Terminal,
logger: Logger,
)(implicit extracted: Extracted): WatchExecutor => Option[Watch.Action] = {
)(using extracted: Extracted): WatchExecutor => Option[Watch.Action] = {
/*
* This parses the buffer until all possible actions are extracted. By draining the input
* to a state where it does not parse an action, we can wait until we receive new input
@ -989,7 +989,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
end Config
@nowarn
private def getStartMessage(key: ScopedKey[?])(implicit e: Extracted): StartMessage = Some {
private def getStartMessage(key: ScopedKey[?])(using e: Extracted): StartMessage = Some {
lazy val default = key.get(watchStartMessage).getOrElse(Watch.defaultStartWatch)
key.get(deprecatedWatchingMessage).map(Left(_)).getOrElse(Right(default))
}
@ -997,7 +997,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
@nowarn
private def getTriggerMessage(
key: ScopedKey[?]
)(implicit e: Extracted): TriggerMessage = {
)(using e: Extracted): TriggerMessage = {
lazy val default =
key.get(watchTriggeredMessage).getOrElse(Watch.defaultOnTriggerMessage)
key.get(deprecatedTriggeredMessage).map(Left(_)).getOrElse(Right(default))
@ -1049,7 +1049,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
* @return the optional value of the [[SettingKey]] if it is defined at the input
* [[ScopedKey]] instance's scope or task scope.
*/
private def get[T](settingKey: SettingKey[T])(implicit extracted: Extracted): Option[T] = {
private def get[T](settingKey: SettingKey[T])(using extracted: Extracted): Option[T] = {
lazy val taskScope = Project.fillTaskAxis(scopedKey).scope
scopedKey.scope match {
case scope if scope.task.toOption.isDefined =>
@ -1071,7 +1071,7 @@ private[sbt] object Continuous extends DeprecatedContinuous {
* @return the optional value of the [[SettingKey]] if it is defined at the input
* [[ScopedKey]] instance's scope or task scope.
*/
private def get[T](taskKey: TaskKey[T])(implicit extracted: Extracted): Option[TaskKey[T]] = {
private def get[T](taskKey: TaskKey[T])(using extracted: Extracted): Option[TaskKey[T]] = {
lazy val taskScope = Project.fillTaskAxis(scopedKey).scope
scopedKey.scope match {
case scope if scope.task.toOption.isDefined =>

View File

@ -64,7 +64,7 @@ private[sbt] abstract class BackgroundJob {
// called after stop or on spontaneous exit, closing the result
// removes the listener
def onStop(listener: () => Unit)(implicit ex: ExecutionContext): Closeable
def onStop(listener: () => Unit)(using ex: ExecutionContext): Closeable
// do we need this or is the spawning task good enough?
// def tags: SomeType
@ -428,7 +428,7 @@ private[sbt] class BackgroundThreadPool extends java.io.Closeable {
}
}
override def onStop(listener: () => Unit)(implicit ex: ExecutionContext): Closeable =
override def onStop(listener: () => Unit)(using ex: ExecutionContext): Closeable =
synchronized {
val result = new StopListener(listener, ex)
stopListeners += result

View File

@ -54,14 +54,14 @@ private[sbt] object InMemoryCacheStore {
private class CacheStoreImpl(path: Path, store: InMemoryCacheStore, cacheStore: CacheStore)
extends CacheStore {
override def delete(): Unit = cacheStore.delete()
override def read[T]()(implicit reader: JsonReader[T]): T = {
override def read[T]()(using reader: JsonReader[T]): T = {
val lastModified = IO.getModifiedTimeOrZero(path.toFile)
store.get[T](path) match {
case Some((value, `lastModified`)) => value
case _ => cacheStore.read[T]()
}
}
override def write[T](value: T)(implicit writer: JsonWriter[T]): Unit = {
override def write[T](value: T)(using writer: JsonWriter[T]): Unit = {
/*
* This may be inefficient if multiple threads are concurrently modifying the file.
* There is an assumption that there will be little to no concurrency at the file level

View File

@ -391,7 +391,7 @@ private[sbt] object Load {
}
// Reevaluates settings after modifying them. Does not recompile or reload any build components.
def reapply(newSettings: Seq[Setting[?]], structure: BuildStructure)(implicit
def reapply(newSettings: Seq[Setting[?]], structure: BuildStructure)(using
display: Show[ScopedKey[?]]
): BuildStructure = {
val transformed = finalTransforms(newSettings)
@ -419,7 +419,7 @@ private[sbt] object Load {
newSettings: Seq[Setting[?]],
structure: BuildStructure,
log: Logger
)(implicit display: Show[ScopedKey[?]]): BuildStructure = {
)(using display: Show[ScopedKey[?]]): BuildStructure = {
reapply(newSettings, structure)
}

View File

@ -58,7 +58,7 @@ object Output {
def grep(lines: Seq[String], patternString: String): Seq[String] =
lines.flatMap(showMatches(Pattern.compile(patternString)))
def flatLines(outputs: Values[Seq[String]])(f: Seq[String] => Seq[String])(implicit
def flatLines(outputs: Values[Seq[String]])(f: Seq[String] => Seq[String])(using
display: Show[ScopedKey[?]]
): Seq[String] = {
val single = outputs.size == 1

View File

@ -65,7 +65,7 @@ object PluginDiscovery:
/** Discovers the sbt-plugin-related top-level modules from the provided source `analysis`. */
def discoverSourceAll(analysis: CompileAnalysis): DiscoveredNames = {
def discover[T](implicit classTag: reflect.ClassTag[T]): Seq[String] =
def discover[T](using classTag: reflect.ClassTag[T]): Seq[String] =
sourceModuleNames(analysis, classTag.runtimeClass.getName)
new DiscoveredNames(discover[AutoPlugin], discover[BuildDef])
}

View File

@ -90,7 +90,7 @@ private[sbt] object SettingCompletions {
session: SessionSettings,
r: Relation[ScopedKey[?], ScopedKey[?]],
redefined: Seq[Setting[?]],
)(implicit show: Show[ScopedKey[?]]): SetResult = {
)(using show: Show[ScopedKey[?]]): SetResult = {
val redefinedKeys = redefined.map(_.key).toSet
val affectedKeys = redefinedKeys.flatMap(r.reverse)
def summary(verbose: Boolean): String = setSummary(redefinedKeys, affectedKeys, verbose)
@ -101,7 +101,7 @@ private[sbt] object SettingCompletions {
redefined: Set[ScopedKey[?]],
affected: Set[ScopedKey[?]],
verbose: Boolean,
)(implicit display: Show[ScopedKey[?]]): String = {
)(using display: Show[ScopedKey[?]]): String = {
val QuietLimit = 3
def strings(in: Set[ScopedKey[?]]): Seq[String] = in.toSeq.map(sk => display.show(sk)).sorted
def lines(in: Seq[String]): (String, Boolean) =

View File

@ -257,7 +257,7 @@ private[sbt] object Definition {
commandSource: CommandSource,
converter: FileConverter,
log: Logger,
)(implicit ec: ExecutionContext): Future[Unit] = Future {
)(using ec: ExecutionContext): Future[Unit] = Future {
val LspDefinitionLogHead = "lsp-definition"
val jsonDefinitionString = CompactPrinter(jsonDefinition)
log.debug(s"$LspDefinitionLogHead json request: $jsonDefinitionString")

View File

@ -68,7 +68,7 @@ private[sbt] object LanguageServerProtocol {
jsonRpcRespond(InitializeResult(serverCapabilities), Some(r.id))
case r: JsonRpcRequestMessage if r.method == "textDocument/definition" =>
val _ = Definition.lspDefinition(json(r), r.id, CommandSource(name), converter, log)(
val _ = Definition.lspDefinition(json(r), r.id, CommandSource(name), converter, log)(using
StandardMain.executionContext
)

View File

@ -73,7 +73,7 @@ object SettingQuery {
data: Def.Settings
): Parser[ParsedKey] =
scopedKeyFull(index, currentBuild, defaultConfigs, keyMap) flatMap { choices =>
Act.select(choices, data)(showBuildRelativeKey2(currentBuild))
Act.select(choices, data)(using showBuildRelativeKey2(currentBuild))
}
def scopedKey(

View File

@ -48,8 +48,8 @@ abstract class TestBuild {
cs <- Gen.list(alphaNumChar, MaxIDSizeGen)
} yield (c :: cs).mkString
def cGen = genConfigs(nonEmptyId map { _.capitalize }, MaxDepsGen, MaxConfigsGen)
def tGen = genTasks(kebabIdGen, MaxDepsGen, MaxTasksGen)
def cGen = genConfigs(using nonEmptyId map { _.capitalize }, MaxDepsGen, MaxConfigsGen)
def tGen = genTasks(using kebabIdGen, MaxDepsGen, MaxTasksGen)
class TestKeys(val env: Env, val scopes: Seq[Scope]) {
override def toString = env.toString + "\n" + scopes.mkString("Scopes:\n\t", "\n\t", "")
@ -312,7 +312,7 @@ abstract class TestBuild {
}
}
def genConfigs(implicit
def genConfigs(using
genName: Gen[String],
maxDeps: Range[Int],
count: Range[Int]
@ -323,7 +323,7 @@ abstract class TestBuild {
.withExtendsConfigs(deps.toVector)
)
def genTasks(implicit
def genTasks(using
genName: Gen[String],
maxDeps: Range[Int],
count: Range[Int]

View File

@ -190,7 +190,7 @@ class Run(private[sbt] val newLoader: Seq[NioPath] => ClassLoader, trapExit: Boo
/** This module is an interface to starting the scala interpreter or runner. */
object Run:
def run(mainClass: String, classpath: Seq[NioPath], options: Seq[String], log: Logger)(implicit
def run(mainClass: String, classpath: Seq[NioPath], options: Seq[String], log: Logger)(using
runner: ScalaRun
) =
runner.run(mainClass, classpath, options, log)

View File

@ -242,7 +242,7 @@ trait TaskExtra extends TaskExtra0 {
IO.readLines(s.readText(key(in), sid))
}
}
implicit def processToTask(p: ProcessBuilder)(implicit streams: Task[TaskStreams[?]]): Task[Int] =
implicit def processToTask(p: ProcessBuilder)(using streams: Task[TaskStreams[?]]): Task[Int] =
streams map { s =>
val pio = TaskExtra.processIO(s)
(p run pio).exitValue()

View File

@ -17,7 +17,7 @@ trait BasicCacheImplicits extends HashedVirtualFileRefFormats { self: BasicJsonP
implicit def basicCache[I: JsonFormat, O: JsonFormat]: Cache[I, O] =
new BasicCache[I, O]()
def wrapIn[I, J](implicit f: I => J, g: J => I, jCache: SingletonCache[J]): SingletonCache[I] =
def wrapIn[I, J](using f: I => J, g: J => I, jCache: SingletonCache[J]): SingletonCache[I] =
new SingletonCache[I] {
override def read(from: Input): I = g(jCache.read(from))
override def write(to: Output, value: I) = jCache.write(to, f(value))

View File

@ -37,7 +37,7 @@ object Cache {
/**
* Materializes a cache.
*/
def cache[I, O](implicit c: Cache[I, O]): Cache[I, O] = c
def cache[I, O](using c: Cache[I, O]): Cache[I, O] = c
/**
* Returns a function that represents a cache that inserts on miss.
@ -47,7 +47,7 @@ object Cache {
* @param default
* A function that computes a default value to insert on
*/
def cached[I, O](cacheFile: File)(default: I => O)(implicit cache: Cache[I, O]): I => O =
def cached[I, O](cacheFile: File)(default: I => O)(using cache: Cache[I, O]): I => O =
cached(CacheStore(cacheFile))(default)
/**
@ -58,7 +58,7 @@ object Cache {
* @param default
* A function that computes a default value to insert on
*/
def cached[I, O](store: CacheStore)(default: I => O)(implicit cache: Cache[I, O]): I => O =
def cached[I, O](store: CacheStore)(default: I => O)(using cache: Cache[I, O]): I => O =
key =>
cache(store)(key) match {
case Hit(value) =>

View File

@ -14,11 +14,11 @@ import sjsonnew.{ BasicJsonProtocol, Builder, deserializationError, JsonFormat,
object StampedFormat extends BasicJsonProtocol {
def apply[T](format: JsonFormat[T])(implicit mf: ClassTag[JsonFormat[T]]): JsonFormat[T] = {
def apply[T](format: JsonFormat[T])(using mf: ClassTag[JsonFormat[T]]): JsonFormat[T] = {
withStamp(stamp(format))(format)
}
def withStamp[T, S](stamp: S)(format: JsonFormat[T])(implicit
def withStamp[T, S](stamp: S)(format: JsonFormat[T])(using
formatStamp: JsonFormat[S],
equivStamp: Equiv[S]
): JsonFormat[T] =
@ -47,9 +47,9 @@ object StampedFormat extends BasicJsonProtocol {
}
}
private def stamp[T](format: JsonFormat[T])(implicit mf: ClassTag[JsonFormat[T]]): Int =
typeHash(mf)
private def stamp[T](format: JsonFormat[T])(using mf: ClassTag[JsonFormat[T]]): Int =
typeHash(using mf)
private def typeHash[T](implicit mf: ClassTag[T]) = mf.toString.hashCode
private def typeHash[T](using mf: ClassTag[T]) = mf.toString.hashCode
}

View File

@ -66,7 +66,7 @@ class CacheSpec extends AnyFlatSpec {
}
}
private def testCache[K, V](f: (Cache[K, V], CacheStore) => Unit)(implicit
private def testCache[K, V](f: (Cache[K, V], CacheStore) => Unit)(using
cache: Cache[K, V]
): Unit =
IO.withTemporaryDirectory { tmp =>

View File

@ -82,7 +82,7 @@ class SingletonCacheSpec extends AnyFlatSpec {
}
}
private def testCache[T](f: (SingletonCache[T], CacheStore) => Unit)(implicit
private def testCache[T](f: (SingletonCache[T], CacheStore) => Unit)(using
cache: SingletonCache[T]
): Unit =
IO.withTemporaryDirectory { tmp =>

View File

@ -67,7 +67,7 @@ class EvaluateSettings[I <: Init](
private val running = new AtomicInteger
private val cancel = new AtomicBoolean(false)
def run(implicit delegates: ScopeType => Seq[ScopeType]): Settings = {
def run(using delegates: ScopeType => Seq[ScopeType]): Settings = {
assert(running.get() == 0, "Already running")
startWork()
roots.foreach(_.registerIfNew())
@ -76,10 +76,10 @@ class EvaluateSettings[I <: Init](
cancel.set(true)
throw ex
}
getResults(delegates)
getResults(using delegates)
}
private def getResults(implicit delegates: ScopeType => Seq[ScopeType]) =
private def getResults(using delegates: ScopeType => Seq[ScopeType]) =
static.toTypedSeq.foldLeft(empty) { case (ss, static.TPair(key, node)) =>
if key.key.isLocal then ss
else ss.set(key, node.get)

View File

@ -172,7 +172,7 @@ trait Init:
private final val nextID = new java.util.concurrent.atomic.AtomicLong
private final def nextDefaultID(): Long = nextID.incrementAndGet()
def empty(implicit delegates: ScopeType => Seq[ScopeType]): Settings =
def empty(using delegates: ScopeType => Seq[ScopeType]): Settings =
Settings0(Set.empty, Set.empty, Map.empty, delegates)
def asTransform(s: Settings): [A] => ScopedKey[A] => A =
@ -210,7 +210,7 @@ trait Init:
val sMap: ScopedMap = grouped(derived)
// delegate references to undefined values according to 'delegates'
val dMap: ScopedMap =
if (actual) delegate(sMap)(delegates, display) else sMap
if (actual) delegate(sMap)(using delegates, display) else sMap
// merge Seq[Setting[_]] into Compiled
compile(dMap)
}
@ -277,10 +277,10 @@ trait Init:
if s.definitive then Vector(s)
else ss :+ s
def addLocal(init: Seq[Setting[?]])(implicit scopeLocal: ScopeLocal): Seq[Setting[?]] =
def addLocal(init: Seq[Setting[?]])(using scopeLocal: ScopeLocal): Seq[Setting[?]] =
Par(init).map(_.dependencies flatMap scopeLocal).toVector.flatten ++ init
def delegate(sMap: ScopedMap)(implicit
def delegate(sMap: ScopedMap)(using
delegates: ScopeType => Seq[ScopeType],
display: Show[ScopedKey[?]]
): ScopedMap = {
@ -329,7 +329,7 @@ trait Init:
val definedAt = skeys.find(sk => (selfRefOk || ref.key != sk) && (sMap.contains(sk)))
definedAt.toRight(Undefined(ref, k))
private def applyInits(ordered: Seq[Compiled[?]])(implicit
private def applyInits(ordered: Seq[Compiled[?]])(using
delegates: ScopeType => Seq[ScopeType]
): Settings =
val x =
@ -345,7 +345,7 @@ trait Init:
u: Undefined,
validKeys: Seq[ScopedKey[?]],
delegates: ScopeType => Seq[ScopeType]
)(implicit
)(using
display: Show[ScopedKey[?]]
): String =
val guessed = guessIntendedScope(validKeys, delegates, u.referencedKey)
@ -396,7 +396,7 @@ trait Init:
delegates: ScopeType => Seq[ScopeType],
keys: Seq[Undefined],
runtime: Boolean
)(implicit display: Show[ScopedKey[?]]): Uninitialized = {
)(using display: Show[ScopedKey[?]]): Uninitialized = {
assert(keys.nonEmpty)
val suffix = if (keys.length > 1) "s" else ""
val prefix = if (runtime) "Runtime reference" else "Reference"
@ -491,7 +491,7 @@ trait Init:
/**
* Intersects two scopes, returning the more specific one if they intersect, or None otherwise.
*/
private[sbt] def intersect(s1: ScopeType, s2: ScopeType)(implicit
private[sbt] def intersect(s1: ScopeType, s2: ScopeType)(using
delegates: ScopeType => Seq[ScopeType]
): Option[ScopeType] = intersectDelegates(s1, s2, mkDelegates(delegates))
@ -507,7 +507,7 @@ trait Init:
else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific
else None
private def deriveAndLocal(init: Seq[Setting[?]], delegates: ScopeType => Delegates)(implicit
private def deriveAndLocal(init: Seq[Setting[?]], delegates: ScopeType => Delegates)(using
scopeLocal: ScopeLocal
): Seq[Setting[?]] = {
import collection.mutable
@ -531,7 +531,7 @@ trait Init:
Util.separate[Setting[?], Derived, Setting[?]](init) {
case d: DerivedSetting[_] => Left(new Derived(d)); case s => Right(s)
}
val defs = addLocal(rawDefs)(scopeLocal)
val defs = addLocal(rawDefs)(using scopeLocal)
// group derived settings by the key they define
val derivsByDef = new mutable.HashMap[AttributeKey[?], Deriveds]
@ -609,7 +609,7 @@ trait Init:
// Take all the original defs and DerivedSettings along with locals, replace each DerivedSetting with the actual
// settings that were derived.
val allDefs = addLocal(init)(scopeLocal)
val allDefs = addLocal(init)(using scopeLocal)
allDefs.flatMap {
case d: DerivedSetting[_] => (derivedToStruct get d map (_.outputs)).toSeq.flatten
case s => s :: nil

View File

@ -18,13 +18,13 @@ trait OptJsonWriter0 {
implicit def fallback[A]: NoJsonWriter[A] = NoJsonWriter()
}
object OptJsonWriter extends OptJsonWriter0 {
implicit def lift[A](implicit z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
implicit def lift[A](using z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
trait StrictMode0 {
implicit def conflictingFallback1[A]: NoJsonWriter[A] = NoJsonWriter()
implicit def conflictingFallback2[A]: NoJsonWriter[A] = NoJsonWriter()
}
object StrictMode extends StrictMode0 {
implicit def lift[A](implicit z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
implicit def lift[A](using z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
}
}

View File

@ -406,7 +406,7 @@ class Difference(
apply(files, lastFilesInfo)(f)(_ => files)
}
def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T = {
def apply[T](f: ChangeReport[File] => T)(using toFiles: T => Set[File]): T = {
val lastFilesInfo = cachedFilesInfo
apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles)
}

View File

@ -59,7 +59,7 @@ abstract class IvyBridgeProviderSpecification
targetDir: File,
log: Logger,
scalaVersion: String,
)(implicit td: TestData): File = {
)(using td: TestData): File = {
val zincVersion = td.configMap.get("sbt.zinc.version") match {
case Some(v: String) => v
case _ => throw new IllegalStateException("No zinc version specified")