Merge branch 'develop' into ipcsocket-upgrade

This commit is contained in:
Ethan Atkins 2020-11-25 08:25:11 -08:00 committed by GitHub
commit d5ddf0f8ce
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 94 additions and 78 deletions

View File

@ -3411,11 +3411,10 @@ object Classpaths {
val s = streams.value
val cacheDirectory = crossTarget.value / cacheLabel / updateCacheName.value
import CacheStoreFactory.jvalueIsoString
val cacheStoreFactory: CacheStoreFactory = {
val factory =
state.value.get(Keys.cacheStoreFactoryFactory).getOrElse(InMemoryCacheStore.factory(0))
factory(cacheDirectory.toPath, Converter)
factory(cacheDirectory.toPath)
}
val isRoot = executionRoots.value contains resolvedScoped.value

View File

@ -20,8 +20,6 @@ import sbt.io.syntax._
import sbt.internal.util.{ AttributeEntry, AttributeKey, AttributeMap, Attributed, Settings }
import sbt.internal.util.Attributed.data
import sbt.util.Logger
import sjsonnew.SupportConverter
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
final class BuildStructure(
val units: Map[URI, LoadedBuildUnit],
@ -311,20 +309,14 @@ object BuildStreams {
root: URI,
data: Settings[Scope]
): State => Streams = s => {
implicit val isoString: sjsonnew.IsoString[JValue] =
sjsonnew.IsoString.iso(
sjsonnew.support.scalajson.unsafe.CompactPrinter.apply,
sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe
)
(s get Keys.stateStreams) getOrElse {
std.Streams(
path(units, root, data),
displayFull,
LogManager.construct(data, s),
sjsonnew.support.scalajson.unsafe.Converter, {
path(units, root, data)(_),
displayFull: ScopedKey[_] => String,
LogManager.construct(data, s), {
val factory =
s.get(Keys.cacheStoreFactoryFactory).getOrElse(InMemoryCacheStore.factory(0))
(file, converter: SupportConverter[JValue]) => factory(file.toPath, converter)
(file: File) => factory(file.toPath)
}
)
}

View File

@ -16,7 +16,7 @@ import java.util.concurrent.atomic.AtomicReference
import com.github.benmanes.caffeine.cache.{ Cache, Caffeine, Weigher }
import sbt.io.IO
import sbt.util.{ CacheStore, CacheStoreFactory, DirectoryStoreFactory }
import sjsonnew.{ IsoString, JsonReader, JsonWriter, SupportConverter }
import sjsonnew.{ JsonReader, JsonWriter }
private[sbt] object InMemoryCacheStore {
private[this] class InMemoryCacheStore(maxSize: Long) extends AutoCloseable {
@ -83,28 +83,26 @@ private[sbt] object InMemoryCacheStore {
cacheStore.close()
}
}
private[this] def factory[J: IsoString](
private[this] def factory(
store: InMemoryCacheStore,
path: Path,
converter: SupportConverter[J]
path: Path
): CacheStoreFactory = {
val delegate = new DirectoryStoreFactory(path.toFile, converter)
val delegate = new DirectoryStoreFactory(path.toFile)
new CacheStoreFactory {
override def make(identifier: String): CacheStore =
new CacheStoreImpl(path.resolve(identifier), store, delegate.make(identifier))
override def sub(identifier: String): CacheStoreFactory =
factory(store, path.resolve(identifier), converter)
factory(store, path.resolve(identifier))
}
}
private[sbt] trait CacheStoreFactoryFactory extends AutoCloseable {
def apply[J: IsoString](path: Path, supportConverter: SupportConverter[J]): CacheStoreFactory
def apply(path: Path): CacheStoreFactory
}
private[this] class CacheStoreFactoryFactoryImpl(size: Long) extends CacheStoreFactoryFactory {
private[this] val storeRef = new AtomicReference[InMemoryCacheStore]
override def close(): Unit = Option(storeRef.get).foreach(_.close())
def apply[J: IsoString](
def apply(
path: Path,
supportConverter: SupportConverter[J]
): CacheStoreFactory = {
val store = storeRef.get match {
case null =>
@ -119,14 +117,13 @@ private[sbt] object InMemoryCacheStore {
}
case s => s
}
factory(store, path, supportConverter)
factory(store, path)
}
}
private[this] object DirectoryFactory extends CacheStoreFactoryFactory {
override def apply[J: IsoString](
override def apply(
path: Path,
supportConverter: SupportConverter[J]
): CacheStoreFactory = new DirectoryStoreFactory(path.toFile, supportConverter)
): CacheStoreFactory = new DirectoryStoreFactory(path.toFile)
override def close(): Unit = {}
}
def factory(size: Long): CacheStoreFactoryFactory =

View File

@ -307,17 +307,11 @@ private[sbt] object LibraryManagement {
def withExcludes(out: File, classifiers: Seq[String], lock: xsbti.GlobalLock)(
f: Map[ModuleID, Vector[ConfigRef]] => UpdateReport
): UpdateReport = {
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import sbt.librarymanagement.LibraryManagementCodec._
import sbt.util.FileBasedStore
implicit val isoString: sjsonnew.IsoString[JValue] =
sjsonnew.IsoString.iso(
sjsonnew.support.scalajson.unsafe.CompactPrinter.apply,
sjsonnew.support.scalajson.unsafe.Parser.parseUnsafe
)
val exclName = "exclude_classifiers"
val file = out / exclName
val store = new FileBasedStore(file, sjsonnew.support.scalajson.unsafe.Converter)
val store = new FileBasedStore(file)
lock(
out / (exclName + ".lock"),
new Callable[UpdateReport] {

View File

@ -13,11 +13,10 @@ import java.util.concurrent.ConcurrentHashMap
import sbt.internal.io.DeferredWriter
import sbt.internal.util.ManagedLogger
import sbt.internal.util.Util.{ nil }
import sbt.internal.util.Util.nil
import sbt.io.IO
import sbt.io.syntax._
import sbt.util._
import sjsonnew.{ IsoString, SupportConverter }
// no longer specific to Tasks, so 'TaskStreams' should be renamed
/**
@ -120,25 +119,39 @@ object Streams {
synchronized { streams.values.foreach(_.close()); streams.clear() }
}
def apply[Key, J: IsoString](
@deprecated("Use constructor without converter", "1.4")
def apply[Key, J: sjsonnew.IsoString](
taskDirectory: Key => File,
name: Key => String,
mkLogger: (Key, PrintWriter) => ManagedLogger,
converter: sjsonnew.SupportConverter[J],
): Streams[Key] = apply[Key](taskDirectory, name, mkLogger)
@deprecated("Use constructor without converter", "1.4")
private[sbt] def apply[Key, J: sjsonnew.IsoString](
taskDirectory: Key => File,
name: Key => String,
mkLogger: (Key, PrintWriter) => ManagedLogger,
converter: sjsonnew.SupportConverter[J],
mkFactory: (File, sjsonnew.SupportConverter[J]) => CacheStoreFactory
): Streams[Key] = apply[Key](taskDirectory, name, mkLogger, mkFactory(_, converter))
def apply[Key](
taskDirectory: Key => File,
name: Key => String,
mkLogger: (Key, PrintWriter) => ManagedLogger,
converter: SupportConverter[J]
): Streams[Key] =
apply(
taskDirectory,
name,
mkLogger,
converter,
(file, s: SupportConverter[J]) => new DirectoryStoreFactory[J](file, s)
file => new DirectoryStoreFactory(file)
)
private[sbt] def apply[Key, J: IsoString](
private[sbt] def apply[Key](
taskDirectory: Key => File,
name: Key => String,
mkLogger: (Key, PrintWriter) => ManagedLogger,
converter: SupportConverter[J],
mkFactory: (File, SupportConverter[J]) => CacheStoreFactory
mkFactory: File => CacheStoreFactory
): Streams[Key] = new Streams[Key] {
def apply(a: Key): ManagedStreams[Key] = new ManagedStreams[Key] {
@ -146,10 +159,10 @@ object Streams {
private[this] var closed = false
def getInput(a: Key, sid: String = default): Input =
make(a, sid)(f => new PlainInput(new FileInputStream(f), converter))
make(a, sid)(f => new FileInput(f))
def getOutput(sid: String = default): Output =
make(a, sid)(f => new PlainOutput(new FileOutputStream(f), converter))
make(a, sid)(f => new FileOutput(f))
def readText(a: Key, sid: String = default): BufferedReader =
make(a, sid)(
@ -180,7 +193,7 @@ object Streams {
dir
}
lazy val cacheStoreFactory: CacheStoreFactory = mkFactory(cacheDirectory, converter)
lazy val cacheStoreFactory: CacheStoreFactory = mkFactory(cacheDirectory)
def log(sid: String): ManagedLogger = mkLogger(a, text(sid))

View File

@ -8,11 +8,12 @@
package sbt.util
import java.io.{ File, InputStream, OutputStream }
import sbt.io.syntax.fileToRichFile
import sbt.io.{ IO, Using }
import sjsonnew.{ IsoString, JsonReader, JsonWriter, SupportConverter }
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
import sbt.io.IO
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Parser }
import sjsonnew.{ IsoString, JsonReader, JsonWriter, SupportConverter }
/** A `CacheStore` is used by the caching infrastructure to persist cached information. */
abstract class CacheStore extends Input with Output {
@ -23,6 +24,7 @@ abstract class CacheStore extends Input with Output {
}
object CacheStore {
@deprecated("Create your own IsoString[JValue]", "1.4")
implicit lazy val jvalueIsoString: IsoString[JValue] =
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
@ -30,7 +32,7 @@ object CacheStore {
def apply(cacheFile: File): CacheStore = file(cacheFile)
/** Returns file-based CacheStore using standard JSON converter. */
def file(cacheFile: File): CacheStore = new FileBasedStore[JValue](cacheFile, Converter)
def file(cacheFile: File): CacheStore = new FileBasedStore(cacheFile)
}
/** Factory that can make new stores. */
@ -47,6 +49,7 @@ abstract class CacheStoreFactory {
}
object CacheStoreFactory {
@deprecated("Create your own IsoString[JValue]", "1.4")
implicit lazy val jvalueIsoString: IsoString[JValue] =
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
@ -54,31 +57,36 @@ object CacheStoreFactory {
def apply(base: File): CacheStoreFactory = directory(base)
/** Returns directory-based CacheStoreFactory using standard JSON converter. */
def directory(base: File): CacheStoreFactory = new DirectoryStoreFactory[JValue](base, Converter)
def directory(base: File): CacheStoreFactory = new DirectoryStoreFactory(base)
}
/** A factory that creates new stores persisted in `base`. */
class DirectoryStoreFactory[J: IsoString](base: File, converter: SupportConverter[J])
extends CacheStoreFactory {
class DirectoryStoreFactory[J](base: File) extends CacheStoreFactory {
IO.createDirectory(base)
def make(identifier: String): CacheStore = new FileBasedStore(base / identifier, converter)
@deprecated("Use constructor without converter", "1.4")
def this(base: File, converter: sjsonnew.SupportConverter[J])(implicit e: sjsonnew.IsoString[J]) =
this(base)
def make(identifier: String): CacheStore = new FileBasedStore(base / identifier)
def sub(identifier: String): CacheStoreFactory =
new DirectoryStoreFactory(base / identifier, converter)
new DirectoryStoreFactory(base / identifier)
}
/** A `CacheStore` that persists information in `file`. */
class FileBasedStore[J: IsoString](file: File, converter: SupportConverter[J]) extends CacheStore {
class FileBasedStore[J](file: File) extends CacheStore {
IO.touch(file, setModified = false)
@deprecated("Use constructor without converter", "1.4")
def this(file: File, converter: sjsonnew.SupportConverter[J])(implicit e: sjsonnew.IsoString[J]) =
this(file)
def read[T: JsonReader]() =
Using.fileInputStream(file)(stream => new PlainInput(stream, converter).read())
new FileInput(file).read()
def write[T: JsonWriter](value: T) =
Using.fileOutputStream(append = false)(file) { stream =>
new PlainOutput(stream, converter).write(value)
}
new FileOutput(file).write(value)
def delete() = IO.delete(file)
def close() = ()

View File

@ -7,7 +7,8 @@
package sbt.util
import java.io.{ Closeable, InputStream }
import java.io.{ Closeable, File, InputStream }
import scala.util.control.NonFatal
import sjsonnew.{ IsoString, JsonReader, SupportConverter }
import sbt.io.{ IO, Using }
@ -44,3 +45,14 @@ class PlainInput[J: IsoString](input: InputStream, converter: SupportConverter[J
def close() = input.close()
}
class FileInput(file: File) extends Input {
override def read[T: JsonReader](): T = {
sjsonnew.support.scalajson.unsafe.Converter
.fromJson(sjsonnew.support.scalajson.unsafe.Parser.parseFromFile(file).get)
.get
}
def close() = ()
}

View File

@ -7,7 +7,8 @@
package sbt.util
import java.io.{ Closeable, OutputStream }
import java.io.{ Closeable, File, OutputStream }
import sjsonnew.{ IsoString, JsonWriter, SupportConverter }
import sbt.io.Using
@ -31,3 +32,16 @@ class PlainOutput[J: IsoString](output: OutputStream, converter: SupportConverte
def close() = output.close()
}
class FileOutput(file: File) extends Output {
override def write[T: JsonWriter](value: T): Unit = {
val js = sjsonnew.support.scalajson.unsafe.Converter.toJson(value).get
Using.fileOutputStream(append = false)(file) { stream =>
val out = new java.io.PrintWriter(stream)
sjsonnew.support.scalajson.unsafe.CompactPrinter.print(js, out)
out.flush()
}
}
def close() = ()
}

View File

@ -12,17 +12,10 @@ import sbt.io.syntax._
import CacheImplicits._
import sjsonnew.IsoString
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import org.scalatest.FlatSpec
class CacheSpec extends FlatSpec {
implicit val isoString: IsoString[JValue] =
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
"A cache" should "NOT throw an exception if read without being written previously" in {
testCache[String, Int] {
case (cache, store) =>
@ -80,7 +73,7 @@ class CacheSpec extends FlatSpec {
implicit cache: Cache[K, V]
): Unit =
IO.withTemporaryDirectory { tmp =>
val store = new FileBasedStore(tmp / "cache-store", Converter)
val store = new FileBasedStore(tmp / "cache-store")
f(cache, store)
}

View File

@ -12,10 +12,7 @@ import sbt.io.syntax._
import CacheImplicits._
import sjsonnew.{ Builder, deserializationError, IsoString, JsonFormat, Unbuilder }
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
import sjsonnew.{ Builder, deserializationError, JsonFormat, Unbuilder }
import org.scalatest.FlatSpec
class SingletonCacheSpec extends FlatSpec {
@ -49,9 +46,6 @@ class SingletonCacheSpec extends FlatSpec {
}
}
implicit val isoString: IsoString[JValue] =
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
"A singleton cache" should "throw an exception if read without being written previously" in {
testCache[Int] {
case (cache, store) =>
@ -95,7 +89,7 @@ class SingletonCacheSpec extends FlatSpec {
implicit cache: SingletonCache[T]
): Unit =
IO.withTemporaryDirectory { tmp =>
val store = new FileBasedStore(tmp / "cache-store", Converter)
val store = new FileBasedStore(tmp / "cache-store")
f(cache, store)
}