remove more implicit. use given and using

This commit is contained in:
xuwei-k 2024-11-17 12:39:08 +09:00 committed by kenji yoshida
parent d193c990d1
commit f11d9d76f0
35 changed files with 81 additions and 80 deletions

View File

@ -14,20 +14,21 @@ import sjsonnew.{ JsonWriter => JW, JsonReader => JR, JsonFormat => JF, _ }
import sjsonnew.shaded.scalajson.ast.unsafe._
trait JValueFormats { self: sjsonnew.BasicJsonProtocol =>
implicit val JNullFormat: JF[JNull.type] = new JF[JNull.type] {
given JNullFormat: JF[JNull.type] = new JF[JNull.type] {
def write[J](x: JNull.type, b: Builder[J]) = b.writeNull()
def read[J](j: Option[J], u: Unbuilder[J]) = JNull
}
implicit val JBooleanFormat: JF[JBoolean] = projectFormat(_.get, (x: Boolean) => JBoolean(x))
implicit val JStringFormat: JF[JString] = projectFormat(_.value, (x: String) => JString(x))
given JBooleanFormat: JF[JBoolean] = projectFormat(_.get, (x: Boolean) => JBoolean(x))
given JStringFormat: JF[JString] = projectFormat(_.value, (x: String) => JString(x))
implicit val JNumberFormat: JF[JNumber] =
given JNumberFormat: JF[JNumber] =
projectFormat(x => BigDecimal(x.value), (x: BigDecimal) => JNumber(x.toString))
implicit val JArrayFormat: JF[JArray] = projectFormat[JArray, Array[JValue]](_.value, JArray(_))
given JArrayFormat(using JF[JValue]): JF[JArray] =
projectFormat[JArray, Array[JValue]](_.value, JArray(_))
implicit lazy val JObjectJsonWriter: JW[JObject] = new JW[JObject] {
given JObjectJsonWriter: JW[JObject] = new JW[JObject] {
def write[J](x: JObject, b: Builder[J]) = {
b.beginObject()
x.value foreach (jsonField => JValueFormat.addField(jsonField.field, jsonField.value, b))
@ -35,7 +36,7 @@ trait JValueFormats { self: sjsonnew.BasicJsonProtocol =>
}
}
implicit lazy val JValueJsonWriter: JW[JValue] = new JW[JValue] {
given JValueJsonWriter: JW[JValue] = new JW[JValue] {
def write[J](x: JValue, b: Builder[J]) = x match {
case x: JNull.type => JNullFormat.write(x, b)
case x: JBoolean => JBooleanFormat.write(x, b)
@ -47,7 +48,7 @@ trait JValueFormats { self: sjsonnew.BasicJsonProtocol =>
}
// This passes through JValue, or returns JNull instead of blowing up with unimplemented.
implicit lazy val JValueJsonReader: JR[JValue] = new JR[JValue] {
given JValueJsonReader: JR[JValue] = new JR[JValue] {
def read[J](j: Option[J], u: Unbuilder[J]) = j match {
case Some(x: JValue) => x
case Some(x) => sys.error(s"Uknown AST $x")
@ -55,6 +56,6 @@ trait JValueFormats { self: sjsonnew.BasicJsonProtocol =>
}
}
implicit lazy val JValueFormat: JF[JValue] =
given JValueFormat: JF[JValue] =
jsonFormat[JValue](JValueJsonReader, JValueJsonWriter)
}

View File

@ -13,7 +13,7 @@ import xsbti.Position
import java.util.Optional
trait PositionFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val PositionFormat: JsonFormat[Position] = new JsonFormat[Position] {
given PositionFormat: JsonFormat[Position] = new JsonFormat[Position] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Position = {
jsOpt match {
case Some(js) =>

View File

@ -13,7 +13,7 @@ import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
import java.util.Optional
trait ProblemFormats { self: SeverityFormats & PositionFormats & sjsonnew.BasicJsonProtocol =>
implicit lazy val ProblemFormat: JsonFormat[Problem] = new JsonFormat[Problem] {
given ProblemFormat: JsonFormat[Problem] = new JsonFormat[Problem] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Problem = {
jsOpt match {
case Some(js) =>

View File

@ -12,7 +12,7 @@ import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
import xsbti.Severity;
trait SeverityFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val SeverityFormat: JsonFormat[Severity] = new JsonFormat[Severity] {
given SeverityFormat: JsonFormat[Severity] = new JsonFormat[Severity] {
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Severity = {
jsOpt match {
case Some(js) =>

View File

@ -13,7 +13,7 @@ import sbt.util.ShowLines
import sbt.internal.util.SuccessEvent
trait SuccessEventShowLines {
implicit val sbtSuccessEventShowLines: ShowLines[SuccessEvent] =
given sbtSuccessEventShowLines: ShowLines[SuccessEvent] =
ShowLines[SuccessEvent]((e: SuccessEvent) => {
Vector(e.message)
})

View File

@ -13,7 +13,7 @@ import sbt.util.ShowLines
import sbt.internal.util.{ StackTrace, TraceEvent }
trait ThrowableShowLines {
implicit val sbtThrowableShowLines: ShowLines[Throwable] =
given sbtThrowableShowLines: ShowLines[Throwable] =
ShowLines[Throwable]((t: Throwable) => {
// 0 means enabled with default behavior. See StackTrace.scala.
val traceLevel = 0
@ -24,7 +24,7 @@ trait ThrowableShowLines {
object ThrowableShowLines extends ThrowableShowLines
trait TraceEventShowLines {
implicit val sbtTraceEventShowLines: ShowLines[TraceEvent] =
given sbtTraceEventShowLines: ShowLines[TraceEvent] =
ShowLines[TraceEvent]((t: TraceEvent) => {
ThrowableShowLines.sbtThrowableShowLines.showLines(t.message)
})

View File

@ -46,9 +46,9 @@ sealed abstract class LogExchange {
}
private[sbt] def initStringCodecs(): Unit = {
import sbt.internal.util.codec.SuccessEventShowLines._
import sbt.internal.util.codec.ThrowableShowLines._
import sbt.internal.util.codec.TraceEventShowLines._
import sbt.internal.util.codec.SuccessEventShowLines.given
import sbt.internal.util.codec.ThrowableShowLines.given
import sbt.internal.util.codec.TraceEventShowLines.given
registerStringCodec[Throwable]
registerStringCodec[TraceEvent]

View File

@ -94,13 +94,13 @@ object LogWriterTest extends Properties("Log Writer") {
new ToLog(removeNewlines(c), by)
}
implicit lazy val genNewLine: Gen[NewLine] =
given genNewLine: Gen[NewLine] =
for (str <- oneOf("\n", "\r", "\r\n")) yield new NewLine(str)
implicit lazy val genLevel: Gen[Level.Value] =
given genLevel: Gen[Level.Value] =
oneOf(Level.values.toSeq)
implicit lazy val genOutput: Gen[Output] =
given genOutput: Gen[Output] =
for (ls <- listOf[List[ToLog]](MaxLines); lv <- genLevel) yield new Output(ls, lv)
def removeNewlines(s: String) = s.replaceAll("""[\n\r]+""", "")

View File

@ -57,7 +57,7 @@ class ManagedLoggerSpec extends AnyFlatSpec with Matchers {
import sjsonnew.BasicJsonProtocol._
val log = newLogger("foo")
context.addAppender("foo", asyncStdout -> Level.Info)
implicit val intShow: ShowLines[Int] =
given ShowLines[Int] =
ShowLines((x: Int) => Vector(s"String representation of $x"))
log.registerStringCodec[Int]
log.infoEvent(1)
@ -67,7 +67,7 @@ class ManagedLoggerSpec extends AnyFlatSpec with Matchers {
import sjsonnew.BasicJsonProtocol._
val log = newLogger("foo")
context.addAppender("foo", asyncStdout -> Level.Info)
implicit val intArrayShow: ShowLines[Array[Int]] =
given ShowLines[Array[Int]] =
ShowLines((x: Array[Int]) => Vector(s"String representation of ${x.mkString}"))
log.registerStringCodec[Array[Int]]
log.infoEvent(Array(1, 2, 3))
@ -77,7 +77,7 @@ class ManagedLoggerSpec extends AnyFlatSpec with Matchers {
import sjsonnew.BasicJsonProtocol._
val log = newLogger("foo")
context.addAppender("foo", asyncStdout -> Level.Info)
implicit val intVectorShow: ShowLines[Vector[Vector[Int]]] =
given ShowLines[Vector[Vector[Int]]] =
ShowLines((xss: Vector[Vector[Int]]) => Vector(s"String representation of $xss"))
log.registerStringCodec[Vector[Vector[Int]]]
log.infoEvent(Vector(Vector(1, 2, 3)))

View File

@ -47,7 +47,7 @@ private[sbt] class ParallelResolveEngine(
new PrepareDownloadEvent(typed.asScala.toArray)
}
// Farm out the dependencies for parallel download
implicit val ec = ParallelResolveEngine.resolveExecutionContext
given ExecutionContext = ParallelResolveEngine.resolveExecutionContext
val allDownloadsFuture = Future.traverse(report.getDependencies.asScala) { case dep: IvyNode =>
Future {
if (

View File

@ -110,7 +110,7 @@ object Sync {
sys.error("Duplicate mappings:" + dups.mkString)
}
implicit def relationFormat[A, B](using
given relationFormat[A, B](using
af: JsonFormat[Map[A, Set[B]]],
bf: JsonFormat[Map[B, Set[A]]]
): JsonFormat[Relation[A, B]] =
@ -164,7 +164,7 @@ object Sync {
}
import sjsonnew.IsoString
implicit def virtualFileRefStringIso: IsoString[VirtualFileRef] =
given IsoString[VirtualFileRef] =
IsoString.iso[VirtualFileRef](_.toString, VirtualFileRef.of(_))
store.write(
(
@ -238,7 +238,7 @@ object Sync {
store: CacheStore
)(using infoFormat: JsonFormat[F]): RelationInfoVirtual[F] = {
import sjsonnew.IsoString
implicit def virtualFileRefStringIso: IsoString[VirtualFileRef] =
given IsoString[VirtualFileRef] =
IsoString.iso[VirtualFileRef](_.toString, VirtualFileRef.of(_))
store.read(default =
(Relation.empty[VirtualFileRef, VirtualFileRef], Map.empty[VirtualFileRef, F])

View File

@ -16,7 +16,7 @@ import sjsonnew.shaded.scalajson.ast.unsafe.JValue
class CacheIvyTest extends Properties("CacheIvy") {
import sbt.util.{ CacheStore, SingletonCache }
import SingletonCache._
import SingletonCache.given
import sjsonnew._
import sjsonnew.support.scalajson.unsafe.Converter

View File

@ -31,7 +31,7 @@ object Append:
trait Sequence[A1, -A2, A3] extends Value[A1, A3] with Values[A1, A2]
implicit def appendSeq[T, V <: T]: Sequence[Seq[T], Seq[V], V] =
given appendSeq[T, V <: T]: Sequence[Seq[T], Seq[V], V] =
new Sequence[Seq[T], Seq[V], V] {
def appendValues(a: Seq[T], b: Seq[V]): Seq[T] = a ++ (b: Seq[T])
def appendValue(a: Seq[T], b: V): Seq[T] = a :+ (b: T)
@ -42,13 +42,13 @@ object Append:
override def appendValue(a: Seq[A1], b: V): Seq[A1] = a :+ (b: A1)
@compileTimeOnly("This can be used in += only.")
implicit def appendTaskValueSeq[T, V <: T]: Value[Seq[Task[T]], Initialize[Task[V]]] =
given appendTaskValueSeq[T, V <: T]: Value[Seq[Task[T]], Initialize[Task[V]]] =
(_, _) => ??? // SAM conversion. This implementation is rewritten by sbt's macros too.
@compileTimeOnly("This can be used in += only.")
implicit def appendTaskKeySeq[T, V <: T]: Value[Seq[Task[T]], TaskKey[V]] = (_, _) => ??? // SAM
given appendTaskKeySeq[T, V <: T]: Value[Seq[Task[T]], TaskKey[V]] = (_, _) => ??? // SAM
implicit def appendList[T, V <: T]: Sequence[List[T], List[V], V] =
given appendList[T, V <: T]: Sequence[List[T], List[V], V] =
new Sequence[List[T], List[V], V] {
def appendValues(a: List[T], b: List[V]): List[T] = a ::: (b: List[T])
def appendValue(a: List[T], b: V): List[T] = a :+ (b: T)
@ -64,10 +64,10 @@ object Append:
override def appendValue(a: Vector[A1], b: V): Vector[A1] = a :+ (b: A1)
// psst... these are implemented with SAM conversions
implicit def appendString: Value[String, String] = _ + _
implicit def appendInt: Value[Int, Int] = _ + _
implicit def appendLong: Value[Long, Long] = _ + _
implicit def appendDouble: Value[Double, Double] = _ + _
given appendString: Value[String, String] = _ + _
given appendInt: Value[Int, Int] = _ + _
given appendLong: Value[Long, Long] = _ + _
given appendDouble: Value[Double, Double] = _ + _
given Sequence[Classpath, Seq[HashedVirtualFileRef], HashedVirtualFileRef] with
override def appendValues(a: Classpath, b: Seq[HashedVirtualFileRef]): Classpath =
@ -75,26 +75,26 @@ object Append:
override def appendValue(a: Classpath, b: HashedVirtualFileRef): Classpath =
a :+ Attributed.blank(b)
implicit def appendSet[T, V <: T]: Sequence[Set[T], Set[V], V] =
given appendSet[T, V <: T]: Sequence[Set[T], Set[V], V] =
new Sequence[Set[T], Set[V], V] {
def appendValues(a: Set[T], b: Set[V]): Set[T] = a ++ (b.toSeq: Seq[T]).toSet
def appendValue(a: Set[T], b: V): Set[T] = a + (b: T)
}
implicit def appendMap[A, B, X <: A, Y <: B]: Sequence[Map[A, B], Map[X, Y], (X, Y)] =
given appendMap[A, B, X <: A, Y <: B]: Sequence[Map[A, B], Map[X, Y], (X, Y)] =
new Sequence[Map[A, B], Map[X, Y], (X, Y)] {
def appendValues(a: Map[A, B], b: Map[X, Y]): Map[A, B] =
(a.toSeq ++ (b.toSeq: Seq[(A, B)])).toMap
def appendValue(a: Map[A, B], b: (X, Y)): Map[A, B] = a + (b: (A, B))
}
implicit def appendOption[T]: Sequence[Seq[T], Option[T], Option[T]] =
given appendOption[T]: Sequence[Seq[T], Option[T], Option[T]] =
new Sequence[Seq[T], Option[T], Option[T]] {
def appendValue(a: Seq[T], b: Option[T]): Seq[T] = b.fold(a)(a :+ _)
def appendValues(a: Seq[T], b: Option[T]): Seq[T] = b.fold(a)(a :+ _)
}
implicit def appendSource: Sequence[Seq[Source], Seq[File], File] =
given appendSource: Sequence[Seq[Source], Seq[File], File] =
new Sequence[Seq[Source], Seq[File], File] {
def appendValue(a: Seq[Source], b: File): Seq[Source] = appendValues(a, Seq(b))
def appendValues(a: Seq[Source], b: Seq[File]): Seq[Source] =
@ -111,9 +111,9 @@ object Append:
}
// Implemented with SAM conversion short-hand
implicit def appendFunction[A, B]: Value[A => A, A => A] = _.andThen(_)
given appendFunction[A, B]: Value[A => A, A => A] = _.andThen(_)
implicit def appendSideEffectToFunc[A, B]: Value[A => B, () => Unit] = (f, sideEffect) => {
given appendSideEffectToFunc[A, B]: Value[A => B, () => Unit] = (f, sideEffect) => {
f.andThen { b =>
sideEffect()
b

View File

@ -35,12 +35,12 @@ object Remove {
def removeValue(a: Seq[T], b: Option[T]): Seq[T] = b.fold(a)(a filterNot _.==)
def removeValues(a: Seq[T], b: Option[T]): Seq[T] = b.fold(a)(a filterNot _.==)
}
implicit def removeSet[T, V <: T]: Sequence[Set[T], Set[V], V] =
given removeSet[T, V <: T]: Sequence[Set[T], Set[V], V] =
new Sequence[Set[T], Set[V], V] {
def removeValue(a: Set[T], b: V): Set[T] = a - b
def removeValues(a: Set[T], b: Set[V]): Set[T] = a diff (b.toSeq: Seq[T]).toSet
}
implicit def removeMap[A, B, X <: A]: Sequence[Map[A, B], Seq[X], X] =
given removeMap[A, B, X <: A]: Sequence[Map[A, B], Seq[X], X] =
new Sequence[Map[A, B], Seq[X], X] {
def removeValue(a: Map[A, B], b: X): Map[A, B] = a - b
def removeValues(a: Map[A, B], b: Seq[X]): Map[A, B] = a -- b

View File

@ -3411,7 +3411,7 @@ object Classpaths {
inTask(updateClassifiers)(
Seq(
classifiersModule := {
implicit val key = (m: ModuleID) => (m.organization, m.name, m.revision)
val key = (m: ModuleID) => (m.organization, m.name, m.revision)
val projectDeps = projectDependencies.value.iterator.map(key).toSet
val externalModules = update.value.allModules.filterNot(m => projectDeps contains key(m))
GetClassifiersModule(

View File

@ -22,7 +22,7 @@ final case class Extracted(
structure: BuildStructure,
session: SessionSettings,
currentRef: ProjectRef
)(implicit val showKey: Show[ScopedKey[?]]) {
)(using val showKey: Show[ScopedKey[?]]) {
def rootProject = structure.rootProject
lazy val currentUnit = structure units currentRef.build
lazy val currentProject = currentUnit defined currentRef.project

View File

@ -45,7 +45,7 @@ private[sbt] object PluginCross {
Command.arb(requireSession(switchParser), pluginSwitchHelp) {
case (state, (version, command)) =>
val x = Project.extract(state)
import x._
import x.{ *, given }
state.log.info(s"Setting `pluginCrossBuild / sbtVersion` to $version")
val add = List(GlobalScope / pluginCrossBuild / sbtVersion :== version) ++
List(scalaVersion := scalaVersionSetting.value) ++

View File

@ -241,7 +241,7 @@ trait ProjectExtra extends Scoped.Syntax:
Project.extract(Project.session(state), Project.structure(state))
private[sbt] def extract(se: SessionSettings, st: BuildStructure): Extracted =
Extracted(st, se, se.current)(Project.showContextKey2(se))
Extracted(st, se, se.current)(using Project.showContextKey2(se))
def getProjectForReference(ref: Reference, structure: BuildStructure): Option[ResolvedProject] =
ref match

View File

@ -85,7 +85,7 @@ object Inspect {
def keyOutput(s: State, option: Mode, sk: Def.ScopedKey[?]): String = {
val extracted = Project.extract(s)
import extracted._
import extracted.{ *, given }
option match {
case Details(actual) => Project.details(extracted.structure, actual, sk)
case DependencyTreeMode =>

View File

@ -47,7 +47,7 @@ object IvyConsole {
val (eval, structure) = Load.defaultLoad(state, base, state.log)
val session = Load.initialSession(structure, eval)
val extracted = Project.extract(session, structure)
import extracted._
import extracted.{ *, given }
val depSettings: Seq[Setting[?]] = Seq(
libraryDependencies ++= managed.reverse,

View File

@ -49,7 +49,7 @@ object Script {
val session = Load.initialSession(structure, eval)
val extracted = Project.extract(session, structure)
val vf = structure.converter.toVirtualFile(script.toPath())
import extracted.*
import extracted.{ *, given }
val embeddedSettings = blocks(script).flatMap { block =>
evaluate(eval(), vf, block.lines, currentUnit.imports, block.offset + 1)(currentLoader)

View File

@ -40,7 +40,7 @@ private[sbt] object SettingCompletions {
* The settings injected by this method cannot be later persisted by the `session save` command.
*/
def setAll(extracted: Extracted, settings: Seq[Setting[?]]): SetResult = {
import extracted._
import extracted.{ *, given }
val r = Project.relation(extracted.structure, true)
val allDefs = Def
.flattenLocals(
@ -74,7 +74,7 @@ private[sbt] object SettingCompletions {
* appended to the current settings.
*/
def setThis(extracted: Extracted, settings: Seq[Def.Setting[?]], arg: String): SetResult = {
import extracted._
import extracted.{ *, given }
val append =
Load.transformSettings(Load.projectScope(currentRef), currentRef.build, rootProject, settings)
val newSession = session.appendSettings(append map (a => (a, arg.split('\n').toList)))

View File

@ -53,7 +53,7 @@ object VirtualFileValueCache {
private final class VirtualFileValueCache0[A](
getStamp: VirtualFile => XStamp,
make: VirtualFile => A
)(implicit
)(using
equiv: Equiv[XStamp]
) extends VirtualFileValueCache[A] {
private val backing = new ConcurrentHashMap[VirtualFile, VirtualFileCache]

View File

@ -46,7 +46,7 @@ import xsbti.VirtualFileRef
import java.util.concurrent.atomic.AtomicReference
object BuildServerProtocol {
import sbt.internal.bsp.codec.JsonProtocol._
import sbt.internal.bsp.codec.JsonProtocol.given
private val capabilities = BuildServerCapabilities(
CompileProvider(BuildServerConnection.languages),

View File

@ -118,7 +118,7 @@ object SettingQuery {
key: Def.ScopedKey[A]
): Either[String, JValue] =
getSettingValue(structure, key) flatMap (value =>
getJsonWriter(key.key) map { implicit jw: JsonWriter[A] =>
getJsonWriter(key.key) map { case given JsonWriter[A] =>
toJson(value)
}
)

View File

@ -23,10 +23,10 @@ object TagsTest extends Properties("Tags") {
def size: Gen[Size] =
for (i <- Arbitrary.arbitrary[Int] if i != Int.MinValue) yield Size(math.abs(i))
implicit def aTagMap: Arbitrary[Map[Tag, Int]] = Arbitrary(tagMap)
implicit def aTagAndFrequency: Arbitrary[(Tag, Int)] = Arbitrary(tagAndFrequency)
implicit def aTag: Arbitrary[Tag] = Arbitrary(tag)
implicit def aSize: Arbitrary[Size] = Arbitrary(size)
given aTagMap: Arbitrary[Map[Tag, Int]] = Arbitrary(tagMap)
given aTagAndFrequency: Arbitrary[(Tag, Int)] = Arbitrary(tagAndFrequency)
given aTag: Arbitrary[Tag] = Arbitrary(tag)
given aSize: Arbitrary[Size] = Arbitrary(size)
property("exclusive allows all groups without the exclusive tag") = forAll {
(tm: TagMap, tag: Tag) =>

View File

@ -41,7 +41,7 @@ object RunFromSourceMain {
case Some(home) => Vector(s"-Dsbt.ivy.home=$home")
case _ => Vector()
}) ++ fo0.runJVMOptions)
implicit val runner = new ForkRun(fo)
val runner = new ForkRun(fo)
val options =
Vector(workingDirectory.toString, scalaVersion, sbtVersion, cp.mkString(pathSeparator))
val context = LoggerContext()

View File

@ -14,7 +14,7 @@ import xsbti.VirtualFileRef
trait BasicCacheImplicits extends HashedVirtualFileRefFormats { self: BasicJsonProtocol =>
implicit def basicCache[I: JsonFormat, O: JsonFormat]: Cache[I, O] =
given basicCache[I: JsonFormat, O: JsonFormat]: Cache[I, O] =
new BasicCache[I, O]()
def wrapIn[I, J](using f: I => J, g: J => I, jCache: SingletonCache[J]): SingletonCache[I] =

View File

@ -64,7 +64,7 @@ class DirectoryStoreFactory[J](base: File) extends CacheStoreFactory {
IO.createDirectory(base)
@deprecated("Use constructor without converter", "1.4")
def this(base: File, converter: sjsonnew.SupportConverter[J])(implicit e: sjsonnew.IsoString[J]) =
def this(base: File, converter: sjsonnew.SupportConverter[J])(using e: sjsonnew.IsoString[J]) =
this(base)
def make(identifier: String): CacheStore = new FileBasedStore(base / identifier)
@ -78,7 +78,7 @@ class FileBasedStore[J](file: File) extends CacheStore {
IO.touch(file, setModified = false)
@deprecated("Use constructor without converter", "1.4")
def this(file: File, converter: sjsonnew.SupportConverter[J])(implicit e: sjsonnew.IsoString[J]) =
def this(file: File, converter: sjsonnew.SupportConverter[J])(using e: sjsonnew.IsoString[J]) =
this(file)
def read[T: JsonReader]() =

View File

@ -64,7 +64,7 @@ final case class FilesInfo[F <: FileInfo] private[sbt] (files: Set[F])
object FilesInfo {
def empty[F <: FileInfo]: FilesInfo[F] = FilesInfo(Set.empty[F])
implicit def format[F <: FileInfo: JsonFormat]: JsonFormat[FilesInfo[F]] =
given format[F <: FileInfo: JsonFormat]: JsonFormat[FilesInfo[F]] =
projectFormat(_.files, (fs: Set[F]) => FilesInfo(fs))
def full: FileInfo.Style = FileInfo.full
@ -101,8 +101,8 @@ object FileInfo {
sealed trait Style {
type F <: FileInfo
implicit def format: JsonFormat[F]
implicit def formats: JsonFormat[FilesInfo[F]] =
given format: JsonFormat[F]
given formats: JsonFormat[FilesInfo[F]] =
projectFormat(_.files, (fs: Set[F]) => FilesInfo(fs))
def apply(file: File): F

View File

@ -30,7 +30,7 @@ trait SingletonCache[A] {
object SingletonCache {
implicit def basicSingletonCache[A: JsonFormat]: SingletonCache[A] =
given basicSingletonCache[A: JsonFormat]: SingletonCache[A] =
new SingletonCache[A] {
override def read(from: Input): A = from.read[A]()
override def write(to: Output, value: A) = to.write(value)

View File

@ -11,7 +11,7 @@ package sbt.util
import sbt.io.IO
import sbt.io.syntax._
import CacheImplicits._
import CacheImplicits.given
import org.scalatest.flatspec.AnyFlatSpec

View File

@ -15,16 +15,16 @@ final case class NoJsonWriter[A]() extends OptJsonWriter[A]
final case class SomeJsonWriter[A](value: JsonWriter[A]) extends OptJsonWriter[A]
trait OptJsonWriter0 {
implicit def fallback[A]: NoJsonWriter[A] = NoJsonWriter()
given fallback[A]: NoJsonWriter[A] = NoJsonWriter()
}
object OptJsonWriter extends OptJsonWriter0 {
implicit def lift[A](using z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
given lift[A](using z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
trait StrictMode0 {
implicit def conflictingFallback1[A]: NoJsonWriter[A] = NoJsonWriter()
implicit def conflictingFallback2[A]: NoJsonWriter[A] = NoJsonWriter()
given conflictingFallback1[A]: NoJsonWriter[A] = NoJsonWriter()
given conflictingFallback2[A]: NoJsonWriter[A] = NoJsonWriter()
}
object StrictMode extends StrictMode0 {
implicit def lift[A](using z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
given lift[A](using z: JsonWriter[A]): SomeJsonWriter[A] = SomeJsonWriter(z)
}
}

View File

@ -317,7 +317,7 @@ trait Tracked {
}
class Timestamp(val store: CacheStore, useStartTime: Boolean)(implicit format: JsonFormat[Long])
class Timestamp(val store: CacheStore, useStartTime: Boolean)(using format: JsonFormat[Long])
extends Tracked {
def clean() = store.delete()

View File

@ -23,25 +23,25 @@ class ZincComponentCompilerSpec extends IvyBridgeProviderSpecification {
val logger = ConsoleLogger()
it should "compile the bridge for Scala 2.10.5 and 2.10.6" in { implicit td =>
it should "compile the bridge for Scala 2.10.5 and 2.10.6" in { case given FixtureParam =>
if (isJava8) {
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2105) should exist)
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2106) should exist)
} else ()
}
it should "compile the bridge for Scala 2.11.8 and 2.11.11" in { implicit td =>
it should "compile the bridge for Scala 2.11.8 and 2.11.11" in { case given FixtureParam =>
if (isJava8) {
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala2118) should exist)
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala21111) should exist)
} else ()
}
it should "compile the bridge for Scala 2.12.20" in { implicit td =>
it should "compile the bridge for Scala 2.12.20" in { case given FixtureParam =>
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala21220) should exist)
}
it should "compile the bridge for Scala 2.13.11" in { implicit td =>
it should "compile the bridge for Scala 2.13.11" in { case given FixtureParam =>
IO.withTemporaryDirectory(t => getCompilerBridge(t, logger, scala21311) should exist)
}
}