mirror of https://github.com/sbt/sbt.git
Initial xsbt commit
This commit is contained in:
commit
f83d59b8cc
|
|
@ -0,0 +1,58 @@
|
|||
package xsbt
|
||||
|
||||
import sbinary.{CollectionTypes, Format, JavaFormats}
|
||||
import java.io.File
|
||||
|
||||
trait Cache[I,O]
|
||||
{
|
||||
def apply(file: File)(i: I): Either[O, O => Unit]
|
||||
}
|
||||
trait SBinaryFormats extends CollectionTypes with JavaFormats with NotNull
|
||||
{
|
||||
//TODO: add basic types minus FileFormat
|
||||
}
|
||||
object Cache extends BasicCacheImplicits with SBinaryFormats with HListCacheImplicits
|
||||
{
|
||||
def cache[I,O](implicit c: Cache[I,O]): Cache[I,O] = c
|
||||
def outputCache[O](implicit c: OutputCache[O]): OutputCache[O] = c
|
||||
def inputCache[O](implicit c: InputCache[O]): InputCache[O] = c
|
||||
|
||||
def wrapInputCache[I,DI](implicit convert: I => DI, base: InputCache[DI]): InputCache[I] =
|
||||
new WrappedInputCache(convert, base)
|
||||
def wrapOutputCache[O,DO](implicit convert: O => DO, reverse: DO => O, base: OutputCache[DO]): OutputCache[O] =
|
||||
new WrappedOutputCache[O,DO](convert, reverse, base)
|
||||
|
||||
/* Note: Task[O] { type Input = I } is written out because ITask[I,O] did not work (type could not be inferred properly) with a task
|
||||
* with an HList input.*/
|
||||
def apply[I,O](task: Task[O] { type Input = I }, file: File)(implicit cache: Cache[I,O]): Task[O] { type Input = I } =
|
||||
task match { case m: M[I,O,_] =>
|
||||
new M[I,O,Result[O]](None)(m.dependencies)(m.extract)(computeWithCache(m, cache, file))
|
||||
}
|
||||
private def computeWithCache[I,O](m: M[I,O,_], cache: Cache[I,O], file: File)(in: I): Result[O] =
|
||||
cache(file)(in) match
|
||||
{
|
||||
case Left(value) => Value(value)
|
||||
case Right(store) => NewTask(m.map { out => store(out); out })
|
||||
}
|
||||
}
|
||||
trait BasicCacheImplicits extends NotNull
|
||||
{
|
||||
implicit def basicInputCache[I](implicit format: Format[I], equiv: Equiv[I]): InputCache[I] =
|
||||
new BasicInputCache(format, equiv)
|
||||
implicit def basicOutputCache[O](implicit format: Format[O]): OutputCache[O] =
|
||||
new BasicOutputCache(format)
|
||||
|
||||
implicit def ioCache[I,O](implicit input: InputCache[I], output: OutputCache[O]): Cache[I,O] =
|
||||
new SeparatedCache(input, output)
|
||||
implicit def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b }
|
||||
}
|
||||
trait HListCacheImplicits extends HLists
|
||||
{
|
||||
implicit def hConsInputCache[H,T<:HList](implicit headCache: InputCache[H], tailCache: InputCache[T]): InputCache[HCons[H,T]] =
|
||||
new HConsInputCache(headCache, tailCache)
|
||||
implicit lazy val hNilInputCache: InputCache[HNil] = new HNilInputCache
|
||||
|
||||
implicit def hConsOutputCache[H,T<:HList](implicit headCache: OutputCache[H], tailCache: OutputCache[T]): OutputCache[HCons[H,T]] =
|
||||
new HConsOutputCache(headCache, tailCache)
|
||||
implicit lazy val hNilOutputCache: OutputCache[HNil] = new HNilOutputCache
|
||||
}
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
package xsbt
|
||||
|
||||
import java.io.{File, IOException}
|
||||
import sbinary.{DefaultProtocol, Format}
|
||||
import DefaultProtocol._
|
||||
import Function.tupled
|
||||
|
||||
sealed trait FileInfo extends NotNull
|
||||
{
|
||||
val file: File
|
||||
}
|
||||
sealed trait HashFileInfo extends FileInfo
|
||||
{
|
||||
val hash: List[Byte]
|
||||
}
|
||||
sealed trait ModifiedFileInfo extends FileInfo
|
||||
{
|
||||
val lastModified: Long
|
||||
}
|
||||
sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo
|
||||
|
||||
private final case class FileHash(file: File, hash: List[Byte]) extends HashFileInfo
|
||||
private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo
|
||||
private final case class FileHashModified(file: File, hash: List[Byte], lastModified: Long) extends HashModifiedFileInfo
|
||||
|
||||
object FileInfo
|
||||
{
|
||||
sealed trait Style[F <: FileInfo] extends NotNull
|
||||
{
|
||||
implicit def apply(file: File): F
|
||||
implicit def unapply(info: F): File = info.file
|
||||
implicit val format: Format[F]
|
||||
import Cache._
|
||||
implicit def infoInputCache: InputCache[File] = wrapInputCache[File,F]
|
||||
implicit def infoOutputCache: OutputCache[File] = wrapOutputCache[File,F]
|
||||
}
|
||||
object full extends Style[HashModifiedFileInfo]
|
||||
{
|
||||
implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified)
|
||||
def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified)
|
||||
implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), tupled(make _))
|
||||
}
|
||||
object hash extends Style[HashFileInfo]
|
||||
{
|
||||
implicit def apply(file: File): HashFileInfo = make(file, computeHash(file).toList)
|
||||
def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash)
|
||||
implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), tupled(make _))
|
||||
private def computeHash(file: File) = try { Hash(file) } catch { case e: Exception => Nil }
|
||||
}
|
||||
object lastModified extends Style[ModifiedFileInfo]
|
||||
{
|
||||
implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified)
|
||||
def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified)
|
||||
implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), tupled(make _))
|
||||
}
|
||||
}
|
||||
|
||||
final case class FilesInfo[F <: FileInfo] private(files: Set[F]) extends NotNull
|
||||
object FilesInfo
|
||||
{
|
||||
sealed trait Style[F <: FileInfo] extends NotNull
|
||||
{
|
||||
implicit def apply(files: Iterable[File]): FilesInfo[F]
|
||||
implicit val format: Format[FilesInfo[F]]
|
||||
}
|
||||
private final class BasicStyle[F <: FileInfo](fileStyle: FileInfo.Style[F])(implicit infoFormat: Format[F]) extends Style[F]
|
||||
{
|
||||
implicit def apply(files: Iterable[File]) = FilesInfo( (Set() ++ files.map(_.getAbsoluteFile)).map(fileStyle.apply) )
|
||||
implicit val format: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs))
|
||||
}
|
||||
lazy val full: Style[HashModifiedFileInfo] = new BasicStyle(FileInfo.full)(FileInfo.full.format)
|
||||
lazy val hash: Style[HashFileInfo] = new BasicStyle(FileInfo.hash)(FileInfo.hash.format)
|
||||
lazy val lastModified: Style[ModifiedFileInfo] = new BasicStyle(FileInfo.lastModified)(FileInfo.lastModified.format)
|
||||
}
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
package xsbt
|
||||
|
||||
import java.io.{InputStream,OutputStream}
|
||||
import metascala.HLists.{HCons,HList,HNil}
|
||||
|
||||
class HNilInputCache extends NoInputCache[HNil]
|
||||
class HConsInputCache[H,T <: HList](val headCache: InputCache[H], val tailCache: InputCache[T]) extends InputCache[HCons[H,T]]
|
||||
{
|
||||
def uptodate(in: HCons[H,T])(cacheStream: InputStream) =
|
||||
{
|
||||
lazy val headResult = headCache.uptodate(in.head)(cacheStream)
|
||||
lazy val tailResult = tailCache.uptodate(in.tail)(cacheStream)
|
||||
new CacheResult
|
||||
{
|
||||
lazy val uptodate = headResult.uptodate && tailResult.uptodate
|
||||
def update(outputStream: OutputStream) =
|
||||
{
|
||||
headResult.update(outputStream)
|
||||
tailResult.update(outputStream)
|
||||
}
|
||||
}
|
||||
}
|
||||
def force(in: HCons[H,T])(cacheStream: OutputStream) =
|
||||
{
|
||||
headCache.force(in.head)(cacheStream)
|
||||
tailCache.force(in.tail)(cacheStream)
|
||||
}
|
||||
}
|
||||
|
||||
class HNilOutputCache extends NoOutputCache[HNil](HNil)
|
||||
class HConsOutputCache[H,T <: HList](val headCache: OutputCache[H], val tailCache: OutputCache[T]) extends OutputCache[HCons[H,T]]
|
||||
{
|
||||
def loadCached(cacheStream: InputStream) =
|
||||
{
|
||||
val head = headCache.loadCached(cacheStream)
|
||||
val tail = tailCache.loadCached(cacheStream)
|
||||
HCons(head, tail)
|
||||
}
|
||||
def update(out: HCons[H,T])(cacheStream: OutputStream)
|
||||
{
|
||||
headCache.update(out.head)(cacheStream)
|
||||
tailCache.update(out.tail)(cacheStream)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
package xsbt
|
||||
|
||||
import java.io.{InputStream,OutputStream}
|
||||
|
||||
class NoInputCache[T] extends InputCache[T]
|
||||
{
|
||||
def uptodate(in: T)(cacheStream: InputStream) =
|
||||
new CacheResult
|
||||
{
|
||||
def uptodate = true
|
||||
def update(outputStream: OutputStream) {}
|
||||
}
|
||||
def force(in: T)(outputStream: OutputStream) {}
|
||||
}
|
||||
class NoOutputCache[O](create: => O) extends OutputCache[O]
|
||||
{
|
||||
def loadCached(cacheStream: InputStream) = create
|
||||
def update(out: O)(cacheStream: OutputStream) {}
|
||||
}
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
package xsbt
|
||||
|
||||
import sbinary.Format
|
||||
import sbinary.JavaIO._
|
||||
import java.io.{File, InputStream, OutputStream}
|
||||
|
||||
trait CacheResult
|
||||
{
|
||||
def uptodate: Boolean
|
||||
def update(stream: OutputStream): Unit
|
||||
}
|
||||
trait InputCache[I] extends NotNull
|
||||
{
|
||||
def uptodate(in: I)(cacheStream: InputStream): CacheResult
|
||||
def force(in: I)(cacheStream: OutputStream): Unit
|
||||
}
|
||||
trait OutputCache[O] extends NotNull
|
||||
{
|
||||
def loadCached(cacheStream: InputStream): O
|
||||
def update(out: O)(cacheStream: OutputStream): Unit
|
||||
}
|
||||
class SeparatedCache[I,O](input: InputCache[I], output: OutputCache[O]) extends Cache[I,O]
|
||||
{
|
||||
def apply(file: File)(in: I) =
|
||||
try { applyImpl(file, in) }
|
||||
catch { case _: Exception => Right(update(file)(in)) }
|
||||
protected def applyImpl(file: File, in: I) =
|
||||
{
|
||||
OpenResource.fileInputStream(file) { stream =>
|
||||
val cache = input.uptodate(in)(stream)
|
||||
lazy val doUpdate = (result: O) =>
|
||||
{
|
||||
OpenResource.fileOutputStream(false)(file) { stream =>
|
||||
cache.update(stream)
|
||||
output.update(result)(stream)
|
||||
}
|
||||
}
|
||||
if(cache.uptodate)
|
||||
try { Left(output.loadCached(stream)) }
|
||||
catch { case _: Exception => Right(doUpdate) }
|
||||
else
|
||||
Right(doUpdate)
|
||||
}
|
||||
}
|
||||
protected def update(file: File)(in: I)(out: O)
|
||||
{
|
||||
OpenResource.fileOutputStream(false)(file) { stream =>
|
||||
input.force(in)(stream)
|
||||
output.update(out)(stream)
|
||||
}
|
||||
}
|
||||
}
|
||||
class BasicOutputCache[O](val format: Format[O]) extends OutputCache[O]
|
||||
{
|
||||
def loadCached(cacheStream: InputStream): O = format.reads(cacheStream)
|
||||
def update(out: O)(cacheStream: OutputStream): Unit = format.writes(cacheStream, out)
|
||||
}
|
||||
class BasicInputCache[I](val format: Format[I], val equiv: Equiv[I]) extends InputCache[I]
|
||||
{
|
||||
def uptodate(in: I)(cacheStream: InputStream) =
|
||||
{
|
||||
val loaded = format.reads(cacheStream)
|
||||
new CacheResult
|
||||
{
|
||||
val uptodate = equiv.equiv(in, loaded)
|
||||
def update(outputStream: OutputStream) = force(in)(outputStream)
|
||||
}
|
||||
}
|
||||
def force(in: I)(outputStream: OutputStream) = format.writes(outputStream, in)
|
||||
}
|
||||
class WrappedInputCache[I,DI](val convert: I => DI, val base: InputCache[DI]) extends InputCache[I]
|
||||
{
|
||||
def uptodate(in: I)(cacheStream: InputStream) = base.uptodate(convert(in))(cacheStream)
|
||||
def force(in: I)(outputStream: OutputStream) = base.force(convert(in))(outputStream)
|
||||
}
|
||||
class WrappedOutputCache[O,DO](val convert: O => DO, val reverse: DO => O, val base: OutputCache[DO]) extends OutputCache[O]
|
||||
{
|
||||
def loadCached(cacheStream: InputStream): O = reverse(base.loadCached(cacheStream))
|
||||
def update(out: O)(cacheStream: OutputStream): Unit = base.update(convert(out))(cacheStream)
|
||||
}
|
||||
Binary file not shown.
|
|
@ -0,0 +1,21 @@
|
|||
package xsbt
|
||||
|
||||
import java.io.File
|
||||
|
||||
object CacheTest// extends Properties("Cache test")
|
||||
{
|
||||
import Task._
|
||||
import Cache._
|
||||
import FileInfo.hash._
|
||||
def checkFormattable(file: File)
|
||||
{
|
||||
val createTask = Task { new File("test") }
|
||||
val lengthTask = createTask map { f => println("File length: " + f.length); f.length }
|
||||
val cached = Cache(lengthTask, new File("/tmp/length-cache"))
|
||||
|
||||
val cTask = (createTask :: cached :: TNil) map { case (file :: len :: HNil) => println("File: " + file + " length: " + len); len :: file :: HNil }
|
||||
val cachedC = Cache(cTask, new File("/tmp/c-cache"))
|
||||
|
||||
TaskRunner(cachedC).left.foreach(_.foreach(f => f.exception.printStackTrace))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
package xsbt
|
||||
|
||||
import metascala.HLists.{HCons => metaHCons, HList => metaHList, HNil => metaHNil}
|
||||
|
||||
object HLists extends HLists
|
||||
// add an extractor to metascala.HLists and define aliases to the HList classes in the xsbt namespace
|
||||
trait HLists extends NotNull
|
||||
{
|
||||
object :: { def unapply[H,T<:HList](list: HCons[H,T]) = Some((list.head,list.tail)) }
|
||||
final val HNil = metaHNil
|
||||
final type ::[H, T <: HList] = metaHCons[H, T]
|
||||
final type HNil = metaHNil
|
||||
final type HList = metaHList
|
||||
final type HCons[H, T <: HList] = metaHCons[H, T]
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
package xsbt
|
||||
|
||||
import scala.collection.{mutable,immutable}
|
||||
|
||||
// immutable.HashSet is not suitable for multi-threaded access, so this
|
||||
// implementation uses an underlying immutable.TreeHashMap, which is suitable
|
||||
object TreeHashSet
|
||||
{
|
||||
def apply[T](contents: T*) = new TreeHashSet(immutable.TreeHashMap( andUnit(contents) : _*))
|
||||
def andUnit[T](contents: Iterable[T]) = contents.map(c => (c,()) ).toSeq
|
||||
}
|
||||
final class TreeHashSet[T](backing: immutable.TreeHashMap[T,Unit]) extends immutable.Set[T]
|
||||
{
|
||||
import TreeHashSet.andUnit
|
||||
override def contains(t: T) = backing.contains(t)
|
||||
override def ++(s: Iterable[T]) = new TreeHashSet(backing ++ andUnit(s))
|
||||
override def +(s: T) = ++( Seq(s) )
|
||||
override def -(s: T) = new TreeHashSet(backing - s)
|
||||
override def elements = backing.keys
|
||||
override def empty[A] = TreeHashSet[A]()
|
||||
override def size = backing.size
|
||||
}
|
||||
Binary file not shown.
|
|
@ -0,0 +1,18 @@
|
|||
package xsbt
|
||||
|
||||
object ErrorHandling
|
||||
{
|
||||
def translate[T](msg: => String)(f: => T) =
|
||||
try { f }
|
||||
catch { case e => throw new TranslatedException(msg + e.toString, e) }
|
||||
def wideConvert[T](f: => T): Either[Throwable, T] =
|
||||
try { Right(f) }
|
||||
catch { case e => Left(e) } // TODO: restrict type of e
|
||||
def convert[T](f: => T): Either[Exception, T] =
|
||||
try { Right(f) }
|
||||
catch { case e: Exception => Left(e) }
|
||||
}
|
||||
final class TranslatedException private[xsbt](msg: String, cause: Throwable) extends RuntimeException(msg, cause)
|
||||
{
|
||||
override def toString = msg
|
||||
}
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package xsbt
|
||||
|
||||
abstract class Logger extends NotNull
|
||||
{
|
||||
def getLevel: Level.Value
|
||||
def setLevel(newLevel: Level.Value)
|
||||
def enableTrace(flag: Boolean)
|
||||
def traceEnabled: Boolean
|
||||
|
||||
def atLevel(level: Level.Value) = level.id >= getLevel.id
|
||||
def trace(t: => Throwable): Unit
|
||||
final def debug(message: => String): Unit = log(Level.Debug, message)
|
||||
final def info(message: => String): Unit = log(Level.Info, message)
|
||||
final def warn(message: => String): Unit = log(Level.Warn, message)
|
||||
final def error(message: => String): Unit = log(Level.Error, message)
|
||||
def success(message: => String): Unit
|
||||
def log(level: Level.Value, message: => String): Unit
|
||||
def control(event: ControlEvent.Value, message: => String): Unit
|
||||
|
||||
def logAll(events: Seq[LogEvent]): Unit
|
||||
/** Defined in terms of other methods in Logger and should not be called from them. */
|
||||
final def log(event: LogEvent)
|
||||
{
|
||||
event match
|
||||
{
|
||||
case s: Success => success(s.msg)
|
||||
case l: Log => log(l.level, l.msg)
|
||||
case t: Trace => trace(t.exception)
|
||||
case setL: SetLevel => setLevel(setL.newLevel)
|
||||
case setT: SetTrace => enableTrace(setT.enabled)
|
||||
case c: ControlEvent => control(c.event, c.msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait LogEvent extends NotNull
|
||||
final class Success(val msg: String) extends LogEvent
|
||||
final class Log(val level: Level.Value, val msg: String) extends LogEvent
|
||||
final class Trace(val exception: Throwable) extends LogEvent
|
||||
final class SetLevel(val newLevel: Level.Value) extends LogEvent
|
||||
final class SetTrace(val enabled: Boolean) extends LogEvent
|
||||
final class ControlEvent(val event: ControlEvent.Value, val msg: String) extends LogEvent
|
||||
|
||||
object ControlEvent extends Enumeration
|
||||
{
|
||||
val Start, Header, Finish = Value
|
||||
}
|
||||
|
||||
/** An enumeration defining the levels available for logging. A level includes all of the levels
|
||||
* with id larger than its own id. For example, Warn (id=3) includes Error (id=4).*/
|
||||
object Level extends Enumeration with NotNull
|
||||
{
|
||||
val Debug = Value(1, "debug")
|
||||
val Info = Value(2, "info")
|
||||
val Warn = Value(3, "warn")
|
||||
val Error = Value(4, "error")
|
||||
/** Defines the label to use for success messages. A success message is logged at the info level but
|
||||
* uses this label. Because the label for levels is defined in this module, the success
|
||||
* label is also defined here. */
|
||||
val SuccessLabel = "success"
|
||||
|
||||
// added because elements was renamed to iterator in 2.8.0 nightly
|
||||
def levels = Debug :: Info :: Warn :: Error :: Nil
|
||||
/** Returns the level with the given name wrapped in Some, or None if no level exists for that name. */
|
||||
def apply(s: String) = levels.find(s == _.toString)
|
||||
/** Same as apply, defined for use in pattern matching. */
|
||||
private[xsbt] def unapply(s: String) = apply(s)
|
||||
}
|
||||
Loading…
Reference in New Issue