mirror of https://github.com/sbt/sbt.git
Merge pull request #62 from dwijnand/fix-file-info
Fix stackoverflow in implicit FilesInfo JsonFormat
This commit is contained in:
commit
6527382a4e
|
|
@ -1,23 +1,17 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import java.io.{ File, InputStream, OutputStream }
|
||||
import sbt.io.syntax.fileToRichFile
|
||||
import sbt.io.{ IO, Using }
|
||||
import sjsonnew.{ IsoString, JsonReader, JsonWriter, SupportConverter }
|
||||
|
||||
import java.io.{ File, InputStream, OutputStream }
|
||||
|
||||
import sbt.io.{ IO, Using }
|
||||
import sbt.io.syntax.fileToRichFile
|
||||
|
||||
/**
|
||||
* A `CacheStore` is used by the caching infrastructure to persist cached information.
|
||||
*/
|
||||
/** A `CacheStore` is used by the caching infrastructure to persist cached information. */
|
||||
trait CacheStore extends Input with Output {
|
||||
/** Delete the persisted information. */
|
||||
def delete(): Unit
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory that can derive new stores.
|
||||
*/
|
||||
/** Factory that can derive new stores. */
|
||||
trait CacheStoreFactory {
|
||||
/** Create a new store. */
|
||||
def derive(identifier: String): CacheStore
|
||||
|
|
@ -26,74 +20,32 @@ trait CacheStoreFactory {
|
|||
def sub(identifier: String): CacheStoreFactory
|
||||
}
|
||||
|
||||
/**
|
||||
* A factory that creates new stores persisted in `base`.
|
||||
*/
|
||||
/** A factory that creates new stores persisted in `base`. */
|
||||
class DirectoryStoreFactory[J: IsoString](base: File, converter: SupportConverter[J]) extends CacheStoreFactory {
|
||||
|
||||
IO.createDirectory(base)
|
||||
|
||||
override def derive(identifier: String): CacheStore =
|
||||
new FileBasedStore(base / identifier, converter)
|
||||
def derive(identifier: String): CacheStore = new FileBasedStore(base / identifier, converter)
|
||||
|
||||
override def sub(identifier: String): CacheStoreFactory =
|
||||
new DirectoryStoreFactory(base / identifier, converter)
|
||||
def sub(identifier: String): CacheStoreFactory = new DirectoryStoreFactory(base / identifier, converter)
|
||||
}
|
||||
|
||||
/**
|
||||
* A `CacheStore` that persists information in `file`.
|
||||
*/
|
||||
/** A `CacheStore` that persists information in `file`. */
|
||||
class FileBasedStore[J: IsoString](file: File, converter: SupportConverter[J]) extends CacheStore {
|
||||
|
||||
IO.touch(file, setModified = false)
|
||||
|
||||
override def delete(): Unit =
|
||||
IO.delete(file)
|
||||
def read[T: JsonReader]() = Using.fileInputStream(file)(stream => new PlainInput(stream, converter).read())
|
||||
|
||||
override def read[T: JsonReader](): T =
|
||||
Using.fileInputStream(file) { stream =>
|
||||
val input = new PlainInput(stream, converter)
|
||||
input.read()
|
||||
}
|
||||
|
||||
override def read[T: JsonReader](default: => T): T =
|
||||
try read[T]()
|
||||
catch { case _: Exception => default }
|
||||
|
||||
override def write[T: JsonWriter](value: T): Unit =
|
||||
Using.fileOutputStream(append = false)(file) { stream =>
|
||||
val output = new PlainOutput(stream, converter)
|
||||
output.write(value)
|
||||
}
|
||||
|
||||
override def close(): Unit = ()
|
||||
def write[T: JsonWriter](value: T) =
|
||||
Using.fileOutputStream(append = false)(file)(stream => new PlainOutput(stream, converter).write(value))
|
||||
|
||||
def delete() = IO.delete(file)
|
||||
def close() = ()
|
||||
}
|
||||
|
||||
/**
|
||||
* A store that reads from `inputStream` and writes to `outputStream
|
||||
*/
|
||||
/** A store that reads from `inputStream` and writes to `outputStream`. */
|
||||
class StreamBasedStore[J: IsoString](inputStream: InputStream, outputStream: OutputStream, converter: SupportConverter[J]) extends CacheStore {
|
||||
|
||||
override def delete(): Unit = ()
|
||||
|
||||
override def read[T: JsonReader](): T = {
|
||||
val input = new PlainInput(inputStream, converter)
|
||||
input.read()
|
||||
}
|
||||
|
||||
override def read[T: JsonReader](default: => T): T =
|
||||
try read[T]()
|
||||
catch { case _: Exception => default }
|
||||
|
||||
override def write[T: JsonWriter](value: T): Unit = {
|
||||
val output = new PlainOutput(outputStream, converter)
|
||||
output.write(value)
|
||||
}
|
||||
|
||||
override def close(): Unit = {
|
||||
inputStream.close()
|
||||
outputStream.close()
|
||||
}
|
||||
|
||||
}
|
||||
def read[T: JsonReader]() = new PlainInput(inputStream, converter).read()
|
||||
def write[T: JsonWriter](value: T) = new PlainOutput(outputStream, converter).write(value)
|
||||
def delete() = ()
|
||||
def close() = { inputStream.close(); outputStream.close() }
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,115 +3,102 @@
|
|||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.io.Hash
|
||||
|
||||
import java.io.File
|
||||
import sjsonnew.{ Builder, deserializationError, JsonFormat, Unbuilder }
|
||||
import scala.util.control.NonFatal
|
||||
import sbt.io.Hash
|
||||
import sjsonnew.{ Builder, JsonFormat, Unbuilder, deserializationError }
|
||||
import CacheImplicits._
|
||||
|
||||
sealed trait FileInfo {
|
||||
def file: File
|
||||
}
|
||||
|
||||
sealed trait HashFileInfo extends FileInfo {
|
||||
def hash: List[Byte]
|
||||
}
|
||||
|
||||
sealed trait ModifiedFileInfo extends FileInfo {
|
||||
def lastModified: Long
|
||||
}
|
||||
|
||||
sealed trait PlainFileInfo extends FileInfo {
|
||||
def exists: Boolean
|
||||
}
|
||||
sealed trait FileInfo { def file: File }
|
||||
sealed trait HashFileInfo extends FileInfo { def hash: List[Byte] }
|
||||
sealed trait ModifiedFileInfo extends FileInfo { def lastModified: Long }
|
||||
sealed trait PlainFileInfo extends FileInfo { def exists: Boolean }
|
||||
|
||||
sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo
|
||||
|
||||
private final case class PlainFile(file: File, exists: Boolean) extends PlainFileInfo
|
||||
private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo
|
||||
private final case class FileHash(file: File, hash: List[Byte]) extends HashFileInfo
|
||||
private final case class FileHashModified(file: File, hash: List[Byte], lastModified: Long) extends HashModifiedFileInfo
|
||||
|
||||
object FileInfo {
|
||||
final case class FilesInfo[F <: FileInfo] private (files: Set[F])
|
||||
object FilesInfo {
|
||||
def empty[F <: FileInfo]: FilesInfo[F] = FilesInfo(Set.empty[F])
|
||||
}
|
||||
|
||||
object FileInfo {
|
||||
sealed trait Style {
|
||||
type F <: FileInfo
|
||||
implicit val format: JsonFormat[F]
|
||||
|
||||
implicit def format: JsonFormat[F]
|
||||
implicit def formats: JsonFormat[FilesInfo[F]] = project(_.files, (fs: Set[F]) => FilesInfo(fs))
|
||||
|
||||
def apply(file: File): F
|
||||
|
||||
def apply(files: Set[File]): FilesInfo[F] = FilesInfo(files map apply)
|
||||
|
||||
def unapply(info: F): File = info.file
|
||||
|
||||
def unapply(infos: FilesInfo[F]): Set[File] = infos.files map (_.file)
|
||||
}
|
||||
|
||||
object full extends Style {
|
||||
override type F = HashModifiedFileInfo
|
||||
type F = HashModifiedFileInfo
|
||||
|
||||
override implicit val format: JsonFormat[HashModifiedFileInfo] = new JsonFormat[HashModifiedFileInfo] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): HashModifiedFileInfo =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val hash = unbuilder.readField[List[Byte]]("hash")
|
||||
val lastModified = unbuilder.readField[Long]("lastModified")
|
||||
unbuilder.endObject()
|
||||
FileHashModified(file, hash, lastModified)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
|
||||
override def write[J](obj: HashModifiedFileInfo, builder: Builder[J]): Unit = {
|
||||
implicit val format: JsonFormat[HashModifiedFileInfo] = new JsonFormat[HashModifiedFileInfo] {
|
||||
def write[J](obj: HashModifiedFileInfo, builder: Builder[J]) = {
|
||||
builder.beginObject()
|
||||
builder.addField("file", obj.file)
|
||||
builder.addField("hash", obj.hash)
|
||||
builder.addField("lastModified", obj.lastModified)
|
||||
builder.endObject()
|
||||
}
|
||||
|
||||
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val hash = unbuilder.readField[List[Byte]]("hash")
|
||||
val lastModified = unbuilder.readField[Long]("lastModified")
|
||||
unbuilder.endObject()
|
||||
FileHashModified(file, hash, lastModified)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
|
||||
override implicit def apply(file: File): HashModifiedFileInfo =
|
||||
implicit def apply(file: File): HashModifiedFileInfo =
|
||||
FileHashModified(file.getAbsoluteFile, Hash(file).toList, file.lastModified)
|
||||
}
|
||||
|
||||
object hash extends Style {
|
||||
override type F = HashFileInfo
|
||||
type F = HashFileInfo
|
||||
|
||||
override implicit val format: JsonFormat[HashFileInfo] = new JsonFormat[HashFileInfo] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): HashFileInfo =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val hash = unbuilder.readField[List[Byte]]("hash")
|
||||
unbuilder.endObject()
|
||||
FileHash(file, hash)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
|
||||
override def write[J](obj: HashFileInfo, builder: Builder[J]): Unit = {
|
||||
implicit val format: JsonFormat[HashFileInfo] = new JsonFormat[HashFileInfo] {
|
||||
def write[J](obj: HashFileInfo, builder: Builder[J]) = {
|
||||
builder.beginObject()
|
||||
builder.addField("file", obj.file)
|
||||
builder.addField("hash", obj.hash)
|
||||
builder.endObject()
|
||||
}
|
||||
|
||||
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val hash = unbuilder.readField[List[Byte]]("hash")
|
||||
unbuilder.endObject()
|
||||
FileHash(file, hash)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
|
||||
override implicit def apply(file: File): HashFileInfo =
|
||||
FileHash(file.getAbsoluteFile, computeHash(file))
|
||||
implicit def apply(file: File): HashFileInfo = FileHash(file.getAbsoluteFile, computeHash(file))
|
||||
|
||||
private def computeHash(file: File): List[Byte] =
|
||||
try Hash(file).toList
|
||||
catch { case _: Exception => Nil }
|
||||
private def computeHash(file: File): List[Byte] = try Hash(file).toList catch { case NonFatal(_) => Nil }
|
||||
}
|
||||
|
||||
object lastModified extends Style {
|
||||
override type F = ModifiedFileInfo
|
||||
type F = ModifiedFileInfo
|
||||
|
||||
override implicit val format: JsonFormat[ModifiedFileInfo] = new JsonFormat[ModifiedFileInfo] {
|
||||
implicit val format: JsonFormat[ModifiedFileInfo] = new JsonFormat[ModifiedFileInfo] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): ModifiedFileInfo =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
|
|
@ -132,43 +119,34 @@ object FileInfo {
|
|||
}
|
||||
}
|
||||
|
||||
override implicit def apply(file: File): ModifiedFileInfo =
|
||||
FileModified(file.getAbsoluteFile, file.lastModified)
|
||||
implicit def apply(file: File): ModifiedFileInfo = FileModified(file.getAbsoluteFile, file.lastModified)
|
||||
}
|
||||
|
||||
object exists extends Style {
|
||||
override type F = PlainFileInfo
|
||||
type F = PlainFileInfo
|
||||
|
||||
override implicit val format: JsonFormat[PlainFileInfo] = new JsonFormat[PlainFileInfo] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): PlainFileInfo =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val exists = unbuilder.readField[Boolean]("exists")
|
||||
unbuilder.endObject()
|
||||
PlainFile(file, exists)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
|
||||
override def write[J](obj: PlainFileInfo, builder: Builder[J]): Unit = {
|
||||
implicit val format: JsonFormat[PlainFileInfo] = new JsonFormat[PlainFileInfo] {
|
||||
def write[J](obj: PlainFileInfo, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("file", obj.file)
|
||||
builder.addField("exists", obj.exists)
|
||||
builder.endObject()
|
||||
}
|
||||
|
||||
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val exists = unbuilder.readField[Boolean]("exists")
|
||||
unbuilder.endObject()
|
||||
PlainFile(file, exists)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
|
||||
override implicit def apply(file: File): PlainFileInfo = {
|
||||
implicit def apply(file: File): PlainFileInfo = {
|
||||
val abs = file.getAbsoluteFile
|
||||
PlainFile(abs, abs.exists)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final case class FilesInfo[F <: FileInfo] private (files: Set[F])
|
||||
object FilesInfo {
|
||||
implicit def format[F <: FileInfo]: JsonFormat[FilesInfo[F]] = implicitly
|
||||
def empty[F <: FileInfo]: FilesInfo[F] = FilesInfo(Set.empty[F])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,20 +1,18 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import sbt.io.{ IO, Using }
|
||||
|
||||
import java.io.{ Closeable, InputStream }
|
||||
|
||||
import scala.util.{ Failure, Success }
|
||||
|
||||
import scala.util.control.NonFatal
|
||||
import sjsonnew.{ IsoString, JsonReader, SupportConverter }
|
||||
import sbt.io.{ IO, Using }
|
||||
|
||||
trait Input extends Closeable {
|
||||
def read[T: JsonReader](): T
|
||||
def read[T: JsonReader](default: => T): T
|
||||
def read[T: JsonReader](default: => T): T = try read[T]() catch { case NonFatal(_) => default }
|
||||
}
|
||||
|
||||
class PlainInput[J: IsoString](input: InputStream, converter: SupportConverter[J]) extends Input {
|
||||
val isoFormat: IsoString[J] = implicitly
|
||||
|
||||
private def readFully(): String = {
|
||||
Using.streamReader(input, IO.utf8) { reader =>
|
||||
val builder = new StringBuilder()
|
||||
|
|
@ -28,18 +26,7 @@ class PlainInput[J: IsoString](input: InputStream, converter: SupportConverter[J
|
|||
}
|
||||
}
|
||||
|
||||
override def read[T: JsonReader](): T = {
|
||||
val string = readFully()
|
||||
val json = isoFormat.from(string)
|
||||
converter.fromJson(json) match {
|
||||
case Success(value) => value
|
||||
case Failure(ex) => throw ex
|
||||
}
|
||||
}
|
||||
def read[T: JsonReader]() = converter.fromJson(isoFormat.from(readFully())).get
|
||||
|
||||
override def read[T: JsonReader](default: => T): T =
|
||||
try read[T]()
|
||||
catch { case _: Exception => default }
|
||||
|
||||
override def close(): Unit = input.close()
|
||||
def close() = input.close()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,8 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import sbt.io.Using
|
||||
|
||||
import java.io.{ Closeable, OutputStream }
|
||||
|
||||
import scala.util.{ Failure, Success }
|
||||
|
||||
import sjsonnew.{ IsoString, JsonWriter, SupportConverter }
|
||||
import sbt.io.Using
|
||||
|
||||
trait Output extends Closeable {
|
||||
def write[T: JsonWriter](value: T): Unit
|
||||
|
|
@ -14,19 +10,16 @@ trait Output extends Closeable {
|
|||
|
||||
class PlainOutput[J: IsoString](output: OutputStream, converter: SupportConverter[J]) extends Output {
|
||||
val isoFormat: IsoString[J] = implicitly
|
||||
override def write[T: JsonWriter](value: T): Unit = {
|
||||
converter.toJson(value) match {
|
||||
case Success(js) =>
|
||||
val asString = isoFormat.to(js)
|
||||
Using.bufferedOutputStream(output) { writer =>
|
||||
val out = new java.io.PrintWriter(writer)
|
||||
out.print(asString)
|
||||
out.flush()
|
||||
}
|
||||
case Failure(ex) =>
|
||||
throw ex
|
||||
|
||||
def write[T: JsonWriter](value: T) = {
|
||||
val js = converter.toJson(value).get
|
||||
val asString = isoFormat.to(js)
|
||||
Using.bufferedOutputStream(output) { writer =>
|
||||
val out = new java.io.PrintWriter(writer)
|
||||
out.print(asString)
|
||||
out.flush()
|
||||
}
|
||||
}
|
||||
|
||||
override def close(): Unit = output.close()
|
||||
def close() = output.close()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import scala.json.ast.unsafe._
|
||||
import sjsonnew._, support.scalajson.unsafe._
|
||||
|
||||
class FileInfoSpec extends UnitSpec {
|
||||
val file = new java.io.File(".").getAbsoluteFile
|
||||
val fileInfo: ModifiedFileInfo = FileModified(file, file.lastModified())
|
||||
val filesInfo = FilesInfo(Set(fileInfo))
|
||||
|
||||
it should "round trip" in assertRoundTrip(filesInfo)(FileInfo.lastModified.formats, FileInfo.lastModified.formats)
|
||||
|
||||
def assertRoundTrip[A: JsonWriter: JsonReader](x: A) = {
|
||||
val jsonString: String = toJsonString(x)
|
||||
val jValue: JValue = Parser.parseUnsafe(jsonString)
|
||||
val y: A = Converter.fromJson[A](jValue).get
|
||||
assert(x === y)
|
||||
}
|
||||
|
||||
def assertJsonString[A: JsonWriter](x: A, s: String) = assert(toJsonString(x) === s)
|
||||
|
||||
def toJsonString[A: JsonWriter](x: A): String = CompactPrinter(Converter.toJson(x).get)
|
||||
}
|
||||
|
|
@ -131,7 +131,7 @@ class Difference(val store: CacheStore, val style: FileInfo.Style, val defineCle
|
|||
}
|
||||
private def clearCache() = store.delete()
|
||||
|
||||
private def cachedFilesInfo = store.read(default = FilesInfo.empty[style.F]).files //(style.formats).files
|
||||
private def cachedFilesInfo = store.read(default = FilesInfo.empty[style.F])(style.formats).files
|
||||
private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file)
|
||||
|
||||
def apply[T](files: Set[File])(f: ChangeReport[File] => T): T =
|
||||
|
|
@ -164,7 +164,7 @@ class Difference(val store: CacheStore, val style: FileInfo.Style, val defineCle
|
|||
val result = f(report)
|
||||
val info = if (filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo
|
||||
|
||||
store.write(info)
|
||||
store.write(info)(style.formats)
|
||||
|
||||
result
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ import sbt._
|
|||
import Keys._
|
||||
|
||||
object Dependencies {
|
||||
lazy val scala210 = "2.10.6"
|
||||
lazy val scala211 = "2.11.8"
|
||||
lazy val scala212 = "2.12.0-M4"
|
||||
val scala210 = "2.10.6"
|
||||
val scala211 = "2.11.8"
|
||||
val scala212 = "2.12.0"
|
||||
|
||||
private lazy val sbtIO = "org.scala-sbt" %% "io" % "1.0.0-M6"
|
||||
private val sbtIO = "org.scala-sbt" %% "io" % "1.0.0-M7"
|
||||
|
||||
def getSbtModulePath(key: String, name: String) = {
|
||||
val localProps = new java.util.Properties()
|
||||
|
|
@ -26,17 +26,17 @@ object Dependencies {
|
|||
|
||||
def addSbtIO(p: Project): Project = addSbtModule(p, sbtIoPath, "io", sbtIO)
|
||||
|
||||
lazy val jline = "jline" % "jline" % "2.13"
|
||||
val jline = "jline" % "jline" % "2.13"
|
||||
|
||||
lazy val scalaCompiler = Def.setting { "org.scala-lang" % "scala-compiler" % scalaVersion.value }
|
||||
lazy val scalaReflect = Def.setting { "org.scala-lang" % "scala-reflect" % scalaVersion.value }
|
||||
val scalaCompiler = Def.setting { "org.scala-lang" % "scala-compiler" % scalaVersion.value }
|
||||
val scalaReflect = Def.setting { "org.scala-lang" % "scala-reflect" % scalaVersion.value }
|
||||
|
||||
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.13.1"
|
||||
val scalatest = "org.scalatest" %% "scalatest" % "2.2.6"
|
||||
|
||||
lazy val parserCombinator211 = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.4"
|
||||
val parserCombinator211 = "org.scala-lang.modules" %% "scala-parser-combinators" % "1.0.4"
|
||||
|
||||
lazy val sjsonnewVersion = "0.5.1"
|
||||
lazy val sjsonnew = "com.eed3si9n" %% "sjson-new-core" % sjsonnewVersion
|
||||
lazy val sjsonnewScalaJson = "com.eed3si9n" %% "sjson-new-scalajson" % sjsonnewVersion
|
||||
val sjsonnewVersion = "0.5.1"
|
||||
val sjsonnew = "com.eed3si9n" %% "sjson-new-core" % sjsonnewVersion
|
||||
val sjsonnewScalaJson = "com.eed3si9n" %% "sjson-new-scalajson" % sjsonnewVersion
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
addSbtPlugin("com.eed3si9n" % "sbt-doge" % "0.1.3")
|
||||
addSbtPlugin("com.eed3si9n" % "sbt-doge" % "0.1.5")
|
||||
|
|
|
|||
Loading…
Reference in New Issue