mirror of https://github.com/sbt/sbt.git
Merge remote-tracking branch 'util-integration/develop' into util-integration
This commit is contained in:
commit
4d1bd18296
|
|
@ -0,0 +1,3 @@
|
|||
Simple Build Tool: Control Component
|
||||
Copyright 2009 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import java.io.IOException
|
||||
|
||||
object ErrorHandling {
|
||||
def translate[T](msg: => String)(f: => T) =
|
||||
try {
|
||||
f
|
||||
} catch {
|
||||
case e: IOException => throw new TranslatedIOException(msg + e.toString, e)
|
||||
case e: Exception => throw new TranslatedException(msg + e.toString, e)
|
||||
}
|
||||
|
||||
def wideConvert[T](f: => T): Either[Throwable, T] =
|
||||
try {
|
||||
Right(f)
|
||||
} catch {
|
||||
case ex @ (_: Exception | _: StackOverflowError) => Left(ex)
|
||||
case err @ (_: ThreadDeath | _: VirtualMachineError) => throw err
|
||||
case x: Throwable => Left(x)
|
||||
}
|
||||
|
||||
def convert[T](f: => T): Either[Exception, T] =
|
||||
try {
|
||||
Right(f)
|
||||
} catch { case e: Exception => Left(e) }
|
||||
|
||||
def reducedToString(e: Throwable): String =
|
||||
if (e.getClass == classOf[RuntimeException]) {
|
||||
val msg = e.getMessage
|
||||
if (msg == null || msg.isEmpty) e.toString else msg
|
||||
} else
|
||||
e.toString
|
||||
}
|
||||
|
||||
sealed class TranslatedException private[sbt] (msg: String, cause: Throwable)
|
||||
extends RuntimeException(msg, cause) {
|
||||
override def toString = msg
|
||||
}
|
||||
|
||||
final class TranslatedIOException private[sbt] (msg: String, cause: IOException)
|
||||
extends TranslatedException(msg, cause)
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
/** Defines a function to call as sbt exits.*/
|
||||
trait ExitHook {
|
||||
|
||||
/** Subclasses should implement this method, which is called when this hook is executed. */
|
||||
def runBeforeExiting(): Unit
|
||||
|
||||
}
|
||||
|
||||
object ExitHook {
|
||||
def apply(f: => Unit): ExitHook = new ExitHook { def runBeforeExiting() = f }
|
||||
}
|
||||
|
||||
object ExitHooks {
|
||||
|
||||
/** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */
|
||||
def runExitHooks(exitHooks: Seq[ExitHook]): Seq[Throwable] =
|
||||
exitHooks.flatMap(hook => ErrorHandling.wideConvert(hook.runBeforeExiting()).left.toOption)
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2011 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
final class MessageOnlyException(override val toString: String) extends RuntimeException(toString)
|
||||
|
||||
/**
|
||||
* A dummy exception for the top-level exception handler to know that an exception
|
||||
* has been handled, but is being passed further up to indicate general failure.
|
||||
*/
|
||||
final class AlreadyHandledException(val underlying: Throwable) extends RuntimeException
|
||||
|
||||
/**
|
||||
* A marker trait for a top-level exception handler to know that this exception
|
||||
* doesn't make sense to display.
|
||||
*/
|
||||
trait UnprintableException extends Throwable
|
||||
|
||||
/**
|
||||
* A marker trait that refines UnprintableException to indicate to a top-level exception handler
|
||||
* that the code throwing this exception has already provided feedback to the user about the error condition.
|
||||
*/
|
||||
trait FeedbackProvidedException extends UnprintableException
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package xsbti;
|
||||
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public interface Logger {
|
||||
void error(Supplier<String> msg);
|
||||
void warn(Supplier<String> msg);
|
||||
void info(Supplier<String> msg);
|
||||
void debug(Supplier<String> msg);
|
||||
void trace(Supplier<Throwable> exception);
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package xsbti;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Optional;
|
||||
|
||||
public interface Position
|
||||
{
|
||||
Optional<Integer> line();
|
||||
String lineContent();
|
||||
Optional<Integer> offset();
|
||||
|
||||
// pointer to the column position of the error/warning
|
||||
Optional<Integer> pointer();
|
||||
Optional<String> pointerSpace();
|
||||
|
||||
Optional<String> sourcePath();
|
||||
Optional<File> sourceFile();
|
||||
|
||||
// Default values to avoid breaking binary compatibility
|
||||
default Optional<Integer> startOffset() { return Optional.empty(); }
|
||||
default Optional<Integer> endOffset() { return Optional.empty(); }
|
||||
default Optional<Integer> startLine() { return Optional.empty(); }
|
||||
default Optional<Integer> startColumn() { return Optional.empty(); }
|
||||
default Optional<Integer> endLine() { return Optional.empty(); }
|
||||
default Optional<Integer> endColumn() { return Optional.empty(); }
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package xsbti;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
public interface Problem
|
||||
{
|
||||
String category();
|
||||
Severity severity();
|
||||
String message();
|
||||
Position position();
|
||||
|
||||
// Default value to avoid breaking binary compatibility
|
||||
/**
|
||||
* If present, the string shown to the user when displaying this Problem.
|
||||
* Otherwise, the Problem will be shown in an implementation-defined way
|
||||
* based on the values of its other fields.
|
||||
*/
|
||||
default Optional<String> rendered() { return Optional.empty(); }
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package xsbti;
|
||||
|
||||
public enum Severity
|
||||
{
|
||||
Info, Warn, Error
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
package xsbti;
|
||||
|
||||
/** Used to pass a pair of values. */
|
||||
public interface T2<A1, A2>
|
||||
{
|
||||
public A1 get1();
|
||||
public A2 get2();
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
Simple Build Tool: Logging Component
|
||||
Copyright 2008, 2009, 2010 Mark Harrah, Tony Sloane
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
27
internal/util-logging/src/main/contraband-scala/sbt/internal/util/AbstractEntry.scala
generated
Normal file
27
internal/util-logging/src/main/contraband-scala/sbt/internal/util/AbstractEntry.scala
generated
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util
|
||||
abstract class AbstractEntry(
|
||||
val channelName: Option[String],
|
||||
val execId: Option[String]) extends Serializable {
|
||||
|
||||
|
||||
|
||||
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case x: AbstractEntry => (this.channelName == x.channelName) && (this.execId == x.execId)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (17 + "sbt.internal.util.AbstractEntry".##) + channelName.##) + execId.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"AbstractEntry(" + channelName + ", " + execId + ")"
|
||||
}
|
||||
}
|
||||
object AbstractEntry {
|
||||
|
||||
}
|
||||
15
internal/util-logging/src/main/contraband-scala/sbt/internal/util/LogOption.scala
generated
Normal file
15
internal/util-logging/src/main/contraband-scala/sbt/internal/util/LogOption.scala
generated
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util
|
||||
/** value for logging options like color */
|
||||
sealed abstract class LogOption extends Serializable
|
||||
object LogOption {
|
||||
|
||||
|
||||
case object Always extends LogOption
|
||||
case object Never extends LogOption
|
||||
case object Auto extends LogOption
|
||||
}
|
||||
59
internal/util-logging/src/main/contraband-scala/sbt/internal/util/ProgressEvent.scala
generated
Normal file
59
internal/util-logging/src/main/contraband-scala/sbt/internal/util/ProgressEvent.scala
generated
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util
|
||||
/** used by super shell */
|
||||
final class ProgressEvent private (
|
||||
val level: String,
|
||||
val items: Vector[sbt.internal.util.ProgressItem],
|
||||
val lastTaskCount: Option[Int],
|
||||
channelName: Option[String],
|
||||
execId: Option[String]) extends sbt.internal.util.AbstractEntry(channelName, execId) with Serializable {
|
||||
|
||||
|
||||
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case x: ProgressEvent => (this.level == x.level) && (this.items == x.items) && (this.lastTaskCount == x.lastTaskCount) && (this.channelName == x.channelName) && (this.execId == x.execId)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (37 * (37 * (37 * (17 + "sbt.internal.util.ProgressEvent".##) + level.##) + items.##) + lastTaskCount.##) + channelName.##) + execId.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"ProgressEvent(" + level + ", " + items + ", " + lastTaskCount + ", " + channelName + ", " + execId + ")"
|
||||
}
|
||||
private[this] def copy(level: String = level, items: Vector[sbt.internal.util.ProgressItem] = items, lastTaskCount: Option[Int] = lastTaskCount, channelName: Option[String] = channelName, execId: Option[String] = execId): ProgressEvent = {
|
||||
new ProgressEvent(level, items, lastTaskCount, channelName, execId)
|
||||
}
|
||||
def withLevel(level: String): ProgressEvent = {
|
||||
copy(level = level)
|
||||
}
|
||||
def withItems(items: Vector[sbt.internal.util.ProgressItem]): ProgressEvent = {
|
||||
copy(items = items)
|
||||
}
|
||||
def withLastTaskCount(lastTaskCount: Option[Int]): ProgressEvent = {
|
||||
copy(lastTaskCount = lastTaskCount)
|
||||
}
|
||||
def withLastTaskCount(lastTaskCount: Int): ProgressEvent = {
|
||||
copy(lastTaskCount = Option(lastTaskCount))
|
||||
}
|
||||
def withChannelName(channelName: Option[String]): ProgressEvent = {
|
||||
copy(channelName = channelName)
|
||||
}
|
||||
def withChannelName(channelName: String): ProgressEvent = {
|
||||
copy(channelName = Option(channelName))
|
||||
}
|
||||
def withExecId(execId: Option[String]): ProgressEvent = {
|
||||
copy(execId = execId)
|
||||
}
|
||||
def withExecId(execId: String): ProgressEvent = {
|
||||
copy(execId = Option(execId))
|
||||
}
|
||||
}
|
||||
object ProgressEvent {
|
||||
|
||||
def apply(level: String, items: Vector[sbt.internal.util.ProgressItem], lastTaskCount: Option[Int], channelName: Option[String], execId: Option[String]): ProgressEvent = new ProgressEvent(level, items, lastTaskCount, channelName, execId)
|
||||
def apply(level: String, items: Vector[sbt.internal.util.ProgressItem], lastTaskCount: Int, channelName: String, execId: String): ProgressEvent = new ProgressEvent(level, items, Option(lastTaskCount), Option(channelName), Option(execId))
|
||||
}
|
||||
41
internal/util-logging/src/main/contraband-scala/sbt/internal/util/ProgressItem.scala
generated
Normal file
41
internal/util-logging/src/main/contraband-scala/sbt/internal/util/ProgressItem.scala
generated
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util
|
||||
/**
|
||||
* used by super shell
|
||||
* @param name name of a task
|
||||
* @param elapsedMicros current elapsed time in micro seconds
|
||||
*/
|
||||
final class ProgressItem private (
|
||||
val name: String,
|
||||
val elapsedMicros: Long) extends Serializable {
|
||||
|
||||
|
||||
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case x: ProgressItem => (this.name == x.name) && (this.elapsedMicros == x.elapsedMicros)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (17 + "sbt.internal.util.ProgressItem".##) + name.##) + elapsedMicros.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"ProgressItem(" + name + ", " + elapsedMicros + ")"
|
||||
}
|
||||
private[this] def copy(name: String = name, elapsedMicros: Long = elapsedMicros): ProgressItem = {
|
||||
new ProgressItem(name, elapsedMicros)
|
||||
}
|
||||
def withName(name: String): ProgressItem = {
|
||||
copy(name = name)
|
||||
}
|
||||
def withElapsedMicros(elapsedMicros: Long): ProgressItem = {
|
||||
copy(elapsedMicros = elapsedMicros)
|
||||
}
|
||||
}
|
||||
object ProgressItem {
|
||||
|
||||
def apply(name: String, elapsedMicros: Long): ProgressItem = new ProgressItem(name, elapsedMicros)
|
||||
}
|
||||
51
internal/util-logging/src/main/contraband-scala/sbt/internal/util/StringEvent.scala
generated
Normal file
51
internal/util-logging/src/main/contraband-scala/sbt/internal/util/StringEvent.scala
generated
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util
|
||||
final class StringEvent private (
|
||||
val level: String,
|
||||
val message: String,
|
||||
channelName: Option[String],
|
||||
execId: Option[String]) extends sbt.internal.util.AbstractEntry(channelName, execId) with Serializable {
|
||||
|
||||
|
||||
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case x: StringEvent => (this.level == x.level) && (this.message == x.message) && (this.channelName == x.channelName) && (this.execId == x.execId)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (37 * (37 * (17 + "sbt.internal.util.StringEvent".##) + level.##) + message.##) + channelName.##) + execId.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"StringEvent(" + level + ", " + message + ", " + channelName + ", " + execId + ")"
|
||||
}
|
||||
private[this] def copy(level: String = level, message: String = message, channelName: Option[String] = channelName, execId: Option[String] = execId): StringEvent = {
|
||||
new StringEvent(level, message, channelName, execId)
|
||||
}
|
||||
def withLevel(level: String): StringEvent = {
|
||||
copy(level = level)
|
||||
}
|
||||
def withMessage(message: String): StringEvent = {
|
||||
copy(message = message)
|
||||
}
|
||||
def withChannelName(channelName: Option[String]): StringEvent = {
|
||||
copy(channelName = channelName)
|
||||
}
|
||||
def withChannelName(channelName: String): StringEvent = {
|
||||
copy(channelName = Option(channelName))
|
||||
}
|
||||
def withExecId(execId: Option[String]): StringEvent = {
|
||||
copy(execId = execId)
|
||||
}
|
||||
def withExecId(execId: String): StringEvent = {
|
||||
copy(execId = Option(execId))
|
||||
}
|
||||
}
|
||||
object StringEvent {
|
||||
|
||||
def apply(level: String, message: String, channelName: Option[String], execId: Option[String]): StringEvent = new StringEvent(level, message, channelName, execId)
|
||||
def apply(level: String, message: String, channelName: String, execId: String): StringEvent = new StringEvent(level, message, Option(channelName), Option(execId))
|
||||
}
|
||||
32
internal/util-logging/src/main/contraband-scala/sbt/internal/util/SuccessEvent.scala
generated
Normal file
32
internal/util-logging/src/main/contraband-scala/sbt/internal/util/SuccessEvent.scala
generated
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util
|
||||
final class SuccessEvent private (
|
||||
val message: String) extends Serializable {
|
||||
|
||||
|
||||
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case x: SuccessEvent => (this.message == x.message)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (17 + "sbt.internal.util.SuccessEvent".##) + message.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"SuccessEvent(" + message + ")"
|
||||
}
|
||||
private[this] def copy(message: String = message): SuccessEvent = {
|
||||
new SuccessEvent(message)
|
||||
}
|
||||
def withMessage(message: String): SuccessEvent = {
|
||||
copy(message = message)
|
||||
}
|
||||
}
|
||||
object SuccessEvent {
|
||||
|
||||
def apply(message: String): SuccessEvent = new SuccessEvent(message)
|
||||
}
|
||||
51
internal/util-logging/src/main/contraband-scala/sbt/internal/util/TraceEvent.scala
generated
Normal file
51
internal/util-logging/src/main/contraband-scala/sbt/internal/util/TraceEvent.scala
generated
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util
|
||||
final class TraceEvent private (
|
||||
val level: String,
|
||||
val message: Throwable,
|
||||
channelName: Option[String],
|
||||
execId: Option[String]) extends sbt.internal.util.AbstractEntry(channelName, execId) with Serializable {
|
||||
|
||||
|
||||
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case x: TraceEvent => (this.level == x.level) && (this.message == x.message) && (this.channelName == x.channelName) && (this.execId == x.execId)
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
37 * (37 * (37 * (37 * (37 * (17 + "sbt.internal.util.TraceEvent".##) + level.##) + message.##) + channelName.##) + execId.##)
|
||||
}
|
||||
override def toString: String = {
|
||||
"TraceEvent(" + level + ", " + message + ", " + channelName + ", " + execId + ")"
|
||||
}
|
||||
private[this] def copy(level: String = level, message: Throwable = message, channelName: Option[String] = channelName, execId: Option[String] = execId): TraceEvent = {
|
||||
new TraceEvent(level, message, channelName, execId)
|
||||
}
|
||||
def withLevel(level: String): TraceEvent = {
|
||||
copy(level = level)
|
||||
}
|
||||
def withMessage(message: Throwable): TraceEvent = {
|
||||
copy(message = message)
|
||||
}
|
||||
def withChannelName(channelName: Option[String]): TraceEvent = {
|
||||
copy(channelName = channelName)
|
||||
}
|
||||
def withChannelName(channelName: String): TraceEvent = {
|
||||
copy(channelName = Option(channelName))
|
||||
}
|
||||
def withExecId(execId: Option[String]): TraceEvent = {
|
||||
copy(execId = execId)
|
||||
}
|
||||
def withExecId(execId: String): TraceEvent = {
|
||||
copy(execId = Option(execId))
|
||||
}
|
||||
}
|
||||
object TraceEvent {
|
||||
|
||||
def apply(level: String, message: Throwable, channelName: Option[String], execId: Option[String]): TraceEvent = new TraceEvent(level, message, channelName, execId)
|
||||
def apply(level: String, message: Throwable, channelName: String, execId: String): TraceEvent = new TraceEvent(level, message, Option(channelName), Option(execId))
|
||||
}
|
||||
11
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/AbstractEntryFormats.scala
generated
Normal file
11
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/AbstractEntryFormats.scala
generated
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
|
||||
import _root_.sjsonnew.JsonFormat
|
||||
trait AbstractEntryFormats { self: sjsonnew.BasicJsonProtocol with sbt.internal.util.codec.StringEventFormats with sbt.internal.util.codec.TraceEventFormats with sbt.internal.util.codec.ProgressItemFormats with sbt.internal.util.codec.ProgressEventFormats =>
|
||||
implicit lazy val AbstractEntryFormat: JsonFormat[sbt.internal.util.AbstractEntry] = flatUnionFormat3[sbt.internal.util.AbstractEntry, sbt.internal.util.StringEvent, sbt.internal.util.TraceEvent, sbt.internal.util.ProgressEvent]("type")
|
||||
}
|
||||
15
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/JsonProtocol.scala
generated
Normal file
15
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/JsonProtocol.scala
generated
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
trait JsonProtocol extends sjsonnew.BasicJsonProtocol
|
||||
with sbt.internal.util.codec.StringEventFormats
|
||||
with sbt.internal.util.codec.TraceEventFormats
|
||||
with sbt.internal.util.codec.ProgressItemFormats
|
||||
with sbt.internal.util.codec.ProgressEventFormats
|
||||
with sbt.internal.util.codec.AbstractEntryFormats
|
||||
with sbt.internal.util.codec.SuccessEventFormats
|
||||
with sbt.internal.util.codec.LogOptionFormats
|
||||
object JsonProtocol extends JsonProtocol
|
||||
31
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/LogOptionFormats.scala
generated
Normal file
31
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/LogOptionFormats.scala
generated
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait LogOptionFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val LogOptionFormat: JsonFormat[sbt.internal.util.LogOption] = new JsonFormat[sbt.internal.util.LogOption] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.util.LogOption = {
|
||||
__jsOpt match {
|
||||
case Some(__js) =>
|
||||
unbuilder.readString(__js) match {
|
||||
case "Always" => sbt.internal.util.LogOption.Always
|
||||
case "Never" => sbt.internal.util.LogOption.Never
|
||||
case "Auto" => sbt.internal.util.LogOption.Auto
|
||||
}
|
||||
case None =>
|
||||
deserializationError("Expected JsString but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.internal.util.LogOption, builder: Builder[J]): Unit = {
|
||||
val str = obj match {
|
||||
case sbt.internal.util.LogOption.Always => "Always"
|
||||
case sbt.internal.util.LogOption.Never => "Never"
|
||||
case sbt.internal.util.LogOption.Auto => "Auto"
|
||||
}
|
||||
builder.writeString(str)
|
||||
}
|
||||
}
|
||||
}
|
||||
35
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/ProgressEventFormats.scala
generated
Normal file
35
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/ProgressEventFormats.scala
generated
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait ProgressEventFormats { self: sbt.internal.util.codec.ProgressItemFormats with sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val ProgressEventFormat: JsonFormat[sbt.internal.util.ProgressEvent] = new JsonFormat[sbt.internal.util.ProgressEvent] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.util.ProgressEvent = {
|
||||
__jsOpt match {
|
||||
case Some(__js) =>
|
||||
unbuilder.beginObject(__js)
|
||||
val level = unbuilder.readField[String]("level")
|
||||
val items = unbuilder.readField[Vector[sbt.internal.util.ProgressItem]]("items")
|
||||
val lastTaskCount = unbuilder.readField[Option[Int]]("lastTaskCount")
|
||||
val channelName = unbuilder.readField[Option[String]]("channelName")
|
||||
val execId = unbuilder.readField[Option[String]]("execId")
|
||||
unbuilder.endObject()
|
||||
sbt.internal.util.ProgressEvent(level, items, lastTaskCount, channelName, execId)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.internal.util.ProgressEvent, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("level", obj.level)
|
||||
builder.addField("items", obj.items)
|
||||
builder.addField("lastTaskCount", obj.lastTaskCount)
|
||||
builder.addField("channelName", obj.channelName)
|
||||
builder.addField("execId", obj.execId)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
29
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/ProgressItemFormats.scala
generated
Normal file
29
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/ProgressItemFormats.scala
generated
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait ProgressItemFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val ProgressItemFormat: JsonFormat[sbt.internal.util.ProgressItem] = new JsonFormat[sbt.internal.util.ProgressItem] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.util.ProgressItem = {
|
||||
__jsOpt match {
|
||||
case Some(__js) =>
|
||||
unbuilder.beginObject(__js)
|
||||
val name = unbuilder.readField[String]("name")
|
||||
val elapsedMicros = unbuilder.readField[Long]("elapsedMicros")
|
||||
unbuilder.endObject()
|
||||
sbt.internal.util.ProgressItem(name, elapsedMicros)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.internal.util.ProgressItem, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("name", obj.name)
|
||||
builder.addField("elapsedMicros", obj.elapsedMicros)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
33
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/StringEventFormats.scala
generated
Normal file
33
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/StringEventFormats.scala
generated
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait StringEventFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val StringEventFormat: JsonFormat[sbt.internal.util.StringEvent] = new JsonFormat[sbt.internal.util.StringEvent] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.util.StringEvent = {
|
||||
__jsOpt match {
|
||||
case Some(__js) =>
|
||||
unbuilder.beginObject(__js)
|
||||
val level = unbuilder.readField[String]("level")
|
||||
val message = unbuilder.readField[String]("message")
|
||||
val channelName = unbuilder.readField[Option[String]]("channelName")
|
||||
val execId = unbuilder.readField[Option[String]]("execId")
|
||||
unbuilder.endObject()
|
||||
sbt.internal.util.StringEvent(level, message, channelName, execId)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.internal.util.StringEvent, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("level", obj.level)
|
||||
builder.addField("message", obj.message)
|
||||
builder.addField("channelName", obj.channelName)
|
||||
builder.addField("execId", obj.execId)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
27
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/SuccessEventFormats.scala
generated
Normal file
27
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/SuccessEventFormats.scala
generated
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait SuccessEventFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val SuccessEventFormat: JsonFormat[sbt.internal.util.SuccessEvent] = new JsonFormat[sbt.internal.util.SuccessEvent] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.util.SuccessEvent = {
|
||||
__jsOpt match {
|
||||
case Some(__js) =>
|
||||
unbuilder.beginObject(__js)
|
||||
val message = unbuilder.readField[String]("message")
|
||||
unbuilder.endObject()
|
||||
sbt.internal.util.SuccessEvent(message)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.internal.util.SuccessEvent, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("message", obj.message)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
29
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/TaskProgressFormats.scala
generated
Normal file
29
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/TaskProgressFormats.scala
generated
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait TaskProgressFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val TaskProgressFormat: JsonFormat[sbt.internal.util.TaskProgress] = new JsonFormat[sbt.internal.util.TaskProgress] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.util.TaskProgress = {
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val name = unbuilder.readField[String]("name")
|
||||
val elapsedMicros = unbuilder.readField[Option[Long]]("elapsedMicros")
|
||||
unbuilder.endObject()
|
||||
sbt.internal.util.TaskProgress(name, elapsedMicros)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.internal.util.TaskProgress, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("name", obj.name)
|
||||
builder.addField("elapsedMicros", obj.elapsedMicros)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
33
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/TraceEventFormats.scala
generated
Normal file
33
internal/util-logging/src/main/contraband-scala/sbt/internal/util/codec/TraceEventFormats.scala
generated
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
|
||||
// DO NOT EDIT MANUALLY
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
|
||||
trait TraceEventFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val TraceEventFormat: JsonFormat[sbt.internal.util.TraceEvent] = new JsonFormat[sbt.internal.util.TraceEvent] {
|
||||
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.util.TraceEvent = {
|
||||
__jsOpt match {
|
||||
case Some(__js) =>
|
||||
unbuilder.beginObject(__js)
|
||||
val level = unbuilder.readField[String]("level")
|
||||
val message = unbuilder.readField[Throwable]("message")
|
||||
val channelName = unbuilder.readField[Option[String]]("channelName")
|
||||
val execId = unbuilder.readField[Option[String]]("execId")
|
||||
unbuilder.endObject()
|
||||
sbt.internal.util.TraceEvent(level, message, channelName, execId)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: sbt.internal.util.TraceEvent, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("level", obj.level)
|
||||
builder.addField("message", obj.message)
|
||||
builder.addField("channelName", obj.channelName)
|
||||
builder.addField("execId", obj.execId)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
package sbt.internal.util
|
||||
@target(Java)
|
||||
@codecPackage("sbt.internal.util.codec")
|
||||
@fullCodec("JsonProtocol")
|
||||
|
||||
enum Severity
|
||||
{
|
||||
Info, Warn, Error
|
||||
}
|
||||
|
||||
type Position {
|
||||
line: Int
|
||||
lineContent: String!
|
||||
offset: Int
|
||||
pointer: Int
|
||||
pointerSpace: String
|
||||
sourcePath: String
|
||||
sourceFile: java.io.File
|
||||
startOffset: Int
|
||||
endOffset: Int
|
||||
startLine: Int
|
||||
startColumn: Int
|
||||
endLine: Int
|
||||
endColumn: Int
|
||||
}
|
||||
|
||||
type Problem {
|
||||
category: String!
|
||||
severity: Severity!
|
||||
message: String!
|
||||
position: Position!
|
||||
rendered: String
|
||||
}
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
package sbt.internal.util
|
||||
@target(Scala)
|
||||
@codecPackage("sbt.internal.util.codec")
|
||||
@fullCodec("JsonProtocol")
|
||||
|
||||
interface AbstractEntry {
|
||||
channelName: String
|
||||
execId: String
|
||||
}
|
||||
|
||||
type StringEvent implements sbt.internal.util.AbstractEntry {
|
||||
level: String!
|
||||
message: String!
|
||||
channelName: String
|
||||
execId: String
|
||||
}
|
||||
|
||||
type TraceEvent implements sbt.internal.util.AbstractEntry {
|
||||
level: String!
|
||||
message: Throwable!
|
||||
channelName: String
|
||||
execId: String
|
||||
}
|
||||
|
||||
## used by super shell
|
||||
type ProgressEvent implements sbt.internal.util.AbstractEntry {
|
||||
level: String!
|
||||
items: [sbt.internal.util.ProgressItem]
|
||||
lastTaskCount: Int
|
||||
channelName: String
|
||||
execId: String
|
||||
}
|
||||
|
||||
## used by super shell
|
||||
type ProgressItem {
|
||||
## name of a task
|
||||
name: String!
|
||||
## current elapsed time in micro seconds
|
||||
elapsedMicros: Long!
|
||||
}
|
||||
|
||||
type SuccessEvent {
|
||||
message: String!
|
||||
}
|
||||
|
||||
## value for logging options like color
|
||||
enum LogOption {
|
||||
Always
|
||||
Never
|
||||
Auto
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
package com.github.ghik.silencer
|
||||
|
||||
import scala.annotation.Annotation
|
||||
|
||||
/**
|
||||
* When silencer compiler plugin is enabled, this annotation suppresses all warnings emitted by scalac for some portion
|
||||
* of source code. It can be applied on any definition (`class`, def`, `val`, `var`, etc.) or on arbitrary expression,
|
||||
* e.g. {123; 456}: @silent`
|
||||
*/
|
||||
class silent extends Annotation
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
|
||||
/** Implements the level-setting methods of Logger.*/
|
||||
abstract class BasicLogger extends AbstractLogger {
|
||||
private var traceEnabledVar: Int = java.lang.Integer.MAX_VALUE
|
||||
private var level: Level.Value = Level.Info
|
||||
private var successEnabledVar = true
|
||||
def successEnabled: Boolean = synchronized { successEnabledVar }
|
||||
def setSuccessEnabled(flag: Boolean): Unit = synchronized { successEnabledVar = flag }
|
||||
def getLevel: Level.Value = synchronized { level }
|
||||
def setLevel(newLevel: Level.Value): Unit = synchronized { level = newLevel }
|
||||
def setTrace(level: Int): Unit = synchronized { traceEnabledVar = level }
|
||||
def getTrace: Int = synchronized { traceEnabledVar }
|
||||
}
|
||||
|
|
@ -0,0 +1,211 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import scala.collection.mutable.ListBuffer
|
||||
import org.apache.logging.log4j.core.{ LogEvent => XLogEvent, Appender }
|
||||
import org.apache.logging.log4j.core.appender.AbstractAppender
|
||||
import org.apache.logging.log4j.core.layout.PatternLayout
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
|
||||
object BufferedAppender {
|
||||
def generateName: String =
|
||||
"buffered-" + generateId.incrementAndGet
|
||||
|
||||
private val generateId: AtomicInteger = new AtomicInteger
|
||||
|
||||
def apply(delegate: Appender): BufferedAppender =
|
||||
apply(generateName, delegate)
|
||||
|
||||
def apply(name: String, delegate: Appender): BufferedAppender = {
|
||||
val appender = new BufferedAppender(name, delegate)
|
||||
appender.start
|
||||
appender
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* An appender that can buffer the logging done on it and then can flush the buffer
|
||||
* to the delegate appender provided in the constructor. Use 'record()' to
|
||||
* start buffering and then 'play' to flush the buffer to the backing appender.
|
||||
* The logging level set at the time a message is originally logged is used, not
|
||||
* the level at the time 'play' is called.
|
||||
*/
|
||||
class BufferedAppender private[BufferedAppender] (name: String, delegate: Appender)
|
||||
extends AbstractAppender(name, null, PatternLayout.createDefaultLayout(), true, Array.empty) {
|
||||
|
||||
private[this] val buffer = new ListBuffer[XLogEvent]
|
||||
private[this] var recording = false
|
||||
|
||||
def append(event: XLogEvent): Unit = {
|
||||
if (recording) {
|
||||
buffer += event.toImmutable
|
||||
} else delegate.append(event)
|
||||
()
|
||||
}
|
||||
|
||||
/** Enables buffering. */
|
||||
def record() = synchronized { recording = true }
|
||||
def buffer[T](f: => T): T = {
|
||||
record()
|
||||
try {
|
||||
f
|
||||
} finally {
|
||||
stopQuietly()
|
||||
}
|
||||
}
|
||||
def bufferQuietly[T](f: => T): T = {
|
||||
record()
|
||||
try {
|
||||
val result = f
|
||||
clearBuffer()
|
||||
result
|
||||
} catch { case e: Throwable => stopQuietly(); throw e }
|
||||
}
|
||||
def stopQuietly() = synchronized {
|
||||
try {
|
||||
stopBuffer()
|
||||
} catch { case _: Exception => () }
|
||||
}
|
||||
|
||||
/**
|
||||
* Flushes the buffer to the delegate logger. This method calls logAll on the delegate
|
||||
* so that the messages are written consecutively. The buffer is cleared in the process.
|
||||
*/
|
||||
def play(): Unit =
|
||||
synchronized {
|
||||
buffer.toList foreach {
|
||||
delegate.append
|
||||
}
|
||||
buffer.clear()
|
||||
}
|
||||
|
||||
/** Clears buffered events and disables buffering. */
|
||||
def clearBuffer(): Unit = synchronized { buffer.clear(); recording = false }
|
||||
|
||||
/** Plays buffered events and disables buffering. */
|
||||
def stopBuffer(): Unit = synchronized { play(); clearBuffer() }
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A logger that can buffer the logging done on it and then can flush the buffer
|
||||
* to the delegate logger provided in the constructor. Use 'startRecording' to
|
||||
* start buffering and then 'play' from to flush the buffer to the backing logger.
|
||||
* The logging level set at the time a message is originally logged is used, not
|
||||
* the level at the time 'play' is called.
|
||||
*
|
||||
* This class assumes that it is the only client of the delegate logger.
|
||||
*/
|
||||
class BufferedLogger(delegate: AbstractLogger) extends BasicLogger {
|
||||
private[this] val buffer = new ListBuffer[LogEvent]
|
||||
private[this] var recording = false
|
||||
|
||||
/** Enables buffering. */
|
||||
def record() = synchronized { recording = true }
|
||||
def buffer[T](f: => T): T = {
|
||||
record()
|
||||
try {
|
||||
f
|
||||
} finally {
|
||||
stopQuietly()
|
||||
}
|
||||
}
|
||||
def bufferQuietly[T](f: => T): T = {
|
||||
record()
|
||||
try {
|
||||
val result = f
|
||||
clear()
|
||||
result
|
||||
} catch { case e: Throwable => stopQuietly(); throw e }
|
||||
}
|
||||
def stopQuietly() = synchronized {
|
||||
try {
|
||||
stop()
|
||||
} catch { case _: Exception => () }
|
||||
}
|
||||
|
||||
/**
|
||||
* Flushes the buffer to the delegate logger. This method calls logAll on the delegate
|
||||
* so that the messages are written consecutively. The buffer is cleared in the process.
|
||||
*/
|
||||
def play(): Unit = synchronized { delegate.logAll(buffer.toList); buffer.clear() }
|
||||
|
||||
/** Clears buffered events and disables buffering. */
|
||||
def clear(): Unit = synchronized { buffer.clear(); recording = false }
|
||||
|
||||
/** Plays buffered events and disables buffering. */
|
||||
def stop(): Unit = synchronized { play(); clear() }
|
||||
|
||||
@deprecated("No longer used.", "1.0.0")
|
||||
override def ansiCodesSupported = delegate.ansiCodesSupported
|
||||
|
||||
override def setLevel(newLevel: Level.Value): Unit = synchronized {
|
||||
super.setLevel(newLevel)
|
||||
if (recording)
|
||||
buffer += new SetLevel(newLevel)
|
||||
else
|
||||
delegate.setLevel(newLevel)
|
||||
()
|
||||
}
|
||||
|
||||
override def setSuccessEnabled(flag: Boolean): Unit = synchronized {
|
||||
super.setSuccessEnabled(flag)
|
||||
if (recording)
|
||||
buffer += new SetSuccess(flag)
|
||||
else
|
||||
delegate.setSuccessEnabled(flag)
|
||||
()
|
||||
}
|
||||
|
||||
override def setTrace(level: Int): Unit = synchronized {
|
||||
super.setTrace(level)
|
||||
if (recording)
|
||||
buffer += new SetTrace(level)
|
||||
else
|
||||
delegate.setTrace(level)
|
||||
()
|
||||
}
|
||||
|
||||
def trace(t: => Throwable): Unit = doBufferableIf(traceEnabled, new Trace(t), _.trace(t))
|
||||
|
||||
def success(message: => String): Unit =
|
||||
doBufferable(Level.Info, new Success(message), _.success(message))
|
||||
|
||||
def log(level: Level.Value, message: => String): Unit =
|
||||
doBufferable(level, new Log(level, message), _.log(level, message))
|
||||
|
||||
def logAll(events: Seq[LogEvent]): Unit = synchronized {
|
||||
if (recording)
|
||||
buffer ++= events
|
||||
else
|
||||
delegate.logAll(events)
|
||||
()
|
||||
}
|
||||
|
||||
def control(event: ControlEvent.Value, message: => String): Unit =
|
||||
doBufferable(Level.Info, new ControlEvent(event, message), _.control(event, message))
|
||||
|
||||
private def doBufferable(
|
||||
level: Level.Value,
|
||||
appendIfBuffered: => LogEvent,
|
||||
doUnbuffered: AbstractLogger => Unit
|
||||
): Unit =
|
||||
doBufferableIf(atLevel(level), appendIfBuffered, doUnbuffered)
|
||||
|
||||
private def doBufferableIf(
|
||||
condition: => Boolean,
|
||||
appendIfBuffered: => LogEvent,
|
||||
doUnbuffered: AbstractLogger => Unit
|
||||
): Unit = synchronized {
|
||||
if (condition) {
|
||||
if (recording)
|
||||
buffer += appendIfBuffered
|
||||
else
|
||||
doUnbuffered(delegate)
|
||||
}
|
||||
()
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,611 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import java.io.{ PrintStream, PrintWriter }
|
||||
import java.lang.StringBuilder
|
||||
import java.util.Locale
|
||||
import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger, AtomicReference }
|
||||
import org.apache.logging.log4j.{ Level => XLevel }
|
||||
import org.apache.logging.log4j.message.{ Message, ObjectMessage, ReusableObjectMessage }
|
||||
import org.apache.logging.log4j.core.{ LogEvent => XLogEvent }
|
||||
import org.apache.logging.log4j.core.appender.AbstractAppender
|
||||
|
||||
import ConsoleAppender._
|
||||
|
||||
object ConsoleLogger {
|
||||
// These are provided so other modules do not break immediately.
|
||||
@deprecated("Use EscHelpers.ESC instead", "0.13.x")
|
||||
final val ESC = EscHelpers.ESC
|
||||
@deprecated("Use EscHelpers.isEscapeTerminator instead", "0.13.x")
|
||||
private[sbt] def isEscapeTerminator(c: Char): Boolean = EscHelpers.isEscapeTerminator(c)
|
||||
@deprecated("Use EscHelpers.hasEscapeSequence instead", "0.13.x")
|
||||
def hasEscapeSequence(s: String): Boolean = EscHelpers.hasEscapeSequence(s)
|
||||
@deprecated("Use EscHelpers.removeEscapeSequences instead", "0.13.x")
|
||||
def removeEscapeSequences(s: String): String = EscHelpers.removeEscapeSequences(s)
|
||||
@deprecated("Use ConsoleAppender.formatEnabledInEnv instead", "0.13.x")
|
||||
val formatEnabled = ConsoleAppender.formatEnabledInEnv
|
||||
@deprecated("Use ConsoleAppender.noSuppressedMessage instead", "0.13.x")
|
||||
val noSuppressedMessage = ConsoleAppender.noSuppressedMessage
|
||||
|
||||
/**
|
||||
* A new `ConsoleLogger` that logs to `out`.
|
||||
*
|
||||
* @param out Where to log the messages.
|
||||
* @return A new `ConsoleLogger` that logs to `out`.
|
||||
*/
|
||||
def apply(out: PrintStream): ConsoleLogger = apply(ConsoleOut.printStreamOut(out))
|
||||
|
||||
/**
|
||||
* A new `ConsoleLogger` that logs to `out`.
|
||||
*
|
||||
* @param out Where to log the messages.
|
||||
* @return A new `ConsoleLogger` that logs to `out`.
|
||||
*/
|
||||
def apply(out: PrintWriter): ConsoleLogger = apply(ConsoleOut.printWriterOut(out))
|
||||
|
||||
/**
|
||||
* A new `ConsoleLogger` that logs to `out`.
|
||||
*
|
||||
* @param out Where to log the messages.
|
||||
* @param ansiCodesSupported `true` if `out` supported ansi codes, `false` otherwise.
|
||||
* @param useFormat `true` to show formatting, `false` to remove it from messages.
|
||||
* @param suppressedMessage How to show suppressed stack traces.
|
||||
* @return A new `ConsoleLogger` that logs to `out`.
|
||||
*/
|
||||
def apply(
|
||||
out: ConsoleOut = ConsoleOut.systemOut,
|
||||
ansiCodesSupported: Boolean = ConsoleAppender.formatEnabledInEnv,
|
||||
useFormat: Boolean = ConsoleAppender.formatEnabledInEnv,
|
||||
suppressedMessage: SuppressedTraceContext => Option[String] =
|
||||
ConsoleAppender.noSuppressedMessage
|
||||
): ConsoleLogger =
|
||||
new ConsoleLogger(out, ansiCodesSupported, useFormat, suppressedMessage)
|
||||
}
|
||||
|
||||
/**
|
||||
* A logger that logs to the console. On supported systems, the level labels are
|
||||
* colored.
|
||||
*/
|
||||
class ConsoleLogger private[ConsoleLogger] (
|
||||
out: ConsoleOut,
|
||||
override val ansiCodesSupported: Boolean,
|
||||
useFormat: Boolean,
|
||||
suppressedMessage: SuppressedTraceContext => Option[String]
|
||||
) extends BasicLogger {
|
||||
|
||||
private[sbt] val appender: ConsoleAppender =
|
||||
ConsoleAppender(generateName(), out, ansiCodesSupported, useFormat, suppressedMessage)
|
||||
|
||||
override def control(event: ControlEvent.Value, message: => String): Unit =
|
||||
appender.control(event, message)
|
||||
|
||||
override def log(level: Level.Value, message: => String): Unit =
|
||||
if (atLevel(level)) {
|
||||
appender.appendLog(level, message)
|
||||
}
|
||||
|
||||
override def success(message: => String): Unit =
|
||||
if (successEnabled) {
|
||||
appender.success(message)
|
||||
}
|
||||
|
||||
override def trace(t: => Throwable): Unit =
|
||||
appender.trace(t, getTrace)
|
||||
|
||||
override def logAll(events: Seq[LogEvent]) =
|
||||
out.lockObject.synchronized { events.foreach(log) }
|
||||
}
|
||||
|
||||
object ConsoleAppender {
|
||||
private[sbt] def cursorUp(n: Int): String = s"\u001B[${n}A"
|
||||
private[sbt] def cursorDown(n: Int): String = s"\u001B[${n}B"
|
||||
private[sbt] def scrollUp(n: Int): String = s"\u001B[${n}S"
|
||||
private[sbt] final val DeleteLine = "\u001B[2K"
|
||||
private[sbt] final val CursorLeft1000 = "\u001B[1000D"
|
||||
private[sbt] final val CursorDown1 = cursorDown(1)
|
||||
private[this] val widthHolder: AtomicInteger = new AtomicInteger
|
||||
private[sbt] def terminalWidth = widthHolder.get
|
||||
private[sbt] def setTerminalWidth(n: Int): Unit = widthHolder.set(n)
|
||||
private[this] val showProgressHolder: AtomicBoolean = new AtomicBoolean(false)
|
||||
def setShowProgress(b: Boolean): Unit = showProgressHolder.set(b)
|
||||
def showProgress: Boolean = showProgressHolder.get
|
||||
|
||||
/** Hide stack trace altogether. */
|
||||
val noSuppressedMessage = (_: SuppressedTraceContext) => None
|
||||
|
||||
/**
|
||||
* Indicates whether formatting has been disabled in environment variables.
|
||||
* 1. -Dsbt.log.noformat=true means no formatting.
|
||||
* 2. -Dsbt.color=always/auto/never/true/false
|
||||
* 3. -Dsbt.colour=always/auto/never/true/false
|
||||
* 4. -Dsbt.log.format=always/auto/never/true/false
|
||||
*/
|
||||
val formatEnabledInEnv: Boolean = {
|
||||
def useColorDefault: Boolean = {
|
||||
// This approximates that both stdin and stdio are connected,
|
||||
// so by default color will be turned off for pipes and redirects.
|
||||
val hasConsole = Option(java.lang.System.console).isDefined
|
||||
ansiSupported && hasConsole
|
||||
}
|
||||
sys.props.get("sbt.log.noformat") match {
|
||||
case Some(_) => !java.lang.Boolean.getBoolean("sbt.log.noformat")
|
||||
case _ =>
|
||||
sys.props
|
||||
.get("sbt.color")
|
||||
.orElse(sys.props.get("sbt.colour"))
|
||||
.orElse(sys.props.get("sbt.log.format"))
|
||||
.flatMap({ s =>
|
||||
parseLogOption(s) match {
|
||||
case LogOption.Always => Some(true)
|
||||
case LogOption.Never => Some(false)
|
||||
case _ => None
|
||||
}
|
||||
})
|
||||
.getOrElse(useColorDefault)
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def parseLogOption(s: String): LogOption =
|
||||
s.toLowerCase match {
|
||||
case "always" => LogOption.Always
|
||||
case "auto" => LogOption.Auto
|
||||
case "never" => LogOption.Never
|
||||
case "true" => LogOption.Always
|
||||
case "false" => LogOption.Never
|
||||
case _ => LogOption.Auto
|
||||
}
|
||||
|
||||
private[this] val generateId: AtomicInteger = new AtomicInteger
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` that writes to standard output.
|
||||
*
|
||||
* @return A new `ConsoleAppender` that writes to standard output.
|
||||
*/
|
||||
def apply(): ConsoleAppender = apply(ConsoleOut.systemOut)
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` that appends log message to `out`.
|
||||
*
|
||||
* @param out Where to write messages.
|
||||
* @return A new `ConsoleAppender`.
|
||||
*/
|
||||
def apply(out: PrintStream): ConsoleAppender = apply(ConsoleOut.printStreamOut(out))
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` that appends log messages to `out`.
|
||||
*
|
||||
* @param out Where to write messages.
|
||||
* @return A new `ConsoleAppender`.
|
||||
*/
|
||||
def apply(out: PrintWriter): ConsoleAppender = apply(ConsoleOut.printWriterOut(out))
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` that writes to `out`.
|
||||
*
|
||||
* @param out Where to write messages.
|
||||
* @return A new `ConsoleAppender that writes to `out`.
|
||||
*/
|
||||
def apply(out: ConsoleOut): ConsoleAppender = apply(generateName(), out)
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` identified by `name`, and that writes to standard output.
|
||||
*
|
||||
* @param name An identifier for the `ConsoleAppender`.
|
||||
* @return A new `ConsoleAppender` that writes to standard output.
|
||||
*/
|
||||
def apply(name: String): ConsoleAppender = apply(name, ConsoleOut.systemOut)
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
|
||||
*
|
||||
* @param name An identifier for the `ConsoleAppender`.
|
||||
* @param out Where to write messages.
|
||||
* @return A new `ConsoleAppender` that writes to `out`.
|
||||
*/
|
||||
def apply(name: String, out: ConsoleOut): ConsoleAppender = apply(name, out, formatEnabledInEnv)
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
|
||||
*
|
||||
* @param name An identifier for the `ConsoleAppender`.
|
||||
* @param out Where to write messages.
|
||||
* @param suppressedMessage How to handle stack traces.
|
||||
* @return A new `ConsoleAppender` that writes to `out`.
|
||||
*/
|
||||
def apply(
|
||||
name: String,
|
||||
out: ConsoleOut,
|
||||
suppressedMessage: SuppressedTraceContext => Option[String]
|
||||
): ConsoleAppender =
|
||||
apply(name, out, formatEnabledInEnv, formatEnabledInEnv, suppressedMessage)
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
|
||||
*
|
||||
* @param name An identifier for the `ConsoleAppender`.
|
||||
* @param out Where to write messages.
|
||||
* @param useFormat `true` to enable format (color, bold, etc.), `false` to remove formatting.
|
||||
* @return A new `ConsoleAppender` that writes to `out`.
|
||||
*/
|
||||
def apply(name: String, out: ConsoleOut, useFormat: Boolean): ConsoleAppender =
|
||||
apply(name, out, formatEnabledInEnv, useFormat, noSuppressedMessage)
|
||||
|
||||
/**
|
||||
* A new `ConsoleAppender` identified by `name`, and that writes to `out`.
|
||||
*
|
||||
* @param name An identifier for the `ConsoleAppender`.
|
||||
* @param out Where to write messages.
|
||||
* @param ansiCodesSupported `true` if the output stream supports ansi codes, `false` otherwise.
|
||||
* @param useFormat `true` to enable format (color, bold, etc.), `false` to remove
|
||||
* formatting.
|
||||
* @return A new `ConsoleAppender` that writes to `out`.
|
||||
*/
|
||||
def apply(
|
||||
name: String,
|
||||
out: ConsoleOut,
|
||||
ansiCodesSupported: Boolean,
|
||||
useFormat: Boolean,
|
||||
suppressedMessage: SuppressedTraceContext => Option[String]
|
||||
): ConsoleAppender = {
|
||||
val appender = new ConsoleAppender(name, out, ansiCodesSupported, useFormat, suppressedMessage)
|
||||
appender.start
|
||||
appender
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the Log4J `level` to the corresponding sbt level.
|
||||
*
|
||||
* @param level A level, as represented by Log4J.
|
||||
* @return The corresponding level in sbt's world.
|
||||
*/
|
||||
def toLevel(level: XLevel): Level.Value =
|
||||
level match {
|
||||
case XLevel.OFF => Level.Debug
|
||||
case XLevel.FATAL => Level.Error
|
||||
case XLevel.ERROR => Level.Error
|
||||
case XLevel.WARN => Level.Warn
|
||||
case XLevel.INFO => Level.Info
|
||||
case XLevel.DEBUG => Level.Debug
|
||||
case _ => Level.Debug
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the sbt `level` to the corresponding Log4J level.
|
||||
*
|
||||
* @param level A level, as represented by sbt.
|
||||
* @return The corresponding level in Log4J's world.
|
||||
*/
|
||||
def toXLevel(level: Level.Value): XLevel =
|
||||
level match {
|
||||
case Level.Error => XLevel.ERROR
|
||||
case Level.Warn => XLevel.WARN
|
||||
case Level.Info => XLevel.INFO
|
||||
case Level.Debug => XLevel.DEBUG
|
||||
}
|
||||
|
||||
private[sbt] def generateName(): String = "out-" + generateId.incrementAndGet
|
||||
|
||||
private[this] def jline1to2CompatMsg = "Found class jline.Terminal, but interface was expected"
|
||||
|
||||
private[this] def ansiSupported =
|
||||
try {
|
||||
val terminal = jline.TerminalFactory.get
|
||||
terminal.restore // #460
|
||||
terminal.isAnsiSupported
|
||||
} catch {
|
||||
case _: Exception => !isWindows
|
||||
|
||||
// sbt 0.13 drops JLine 1.0 from the launcher and uses 2.x as a normal dependency
|
||||
// when 0.13 is used with a 0.12 launcher or earlier, the JLine classes from the launcher get loaded
|
||||
// this results in a linkage error as detected below. The detection is likely jvm specific, but the priority
|
||||
// is avoiding mistakenly identifying something as a launcher incompatibility when it is not
|
||||
case e: IncompatibleClassChangeError if e.getMessage == jline1to2CompatMsg =>
|
||||
throw new IncompatibleClassChangeError(
|
||||
"JLine incompatibility detected. Check that the sbt launcher is version 0.13.x or later."
|
||||
)
|
||||
}
|
||||
|
||||
private[this] def os = System.getProperty("os.name")
|
||||
private[this] def isWindows = os.toLowerCase(Locale.ENGLISH).indexOf("windows") >= 0
|
||||
|
||||
}
|
||||
|
||||
// See http://stackoverflow.com/questions/24205093/how-to-create-a-custom-appender-in-log4j2
|
||||
// for custom appender using Java.
|
||||
// http://logging.apache.org/log4j/2.x/manual/customconfig.html
|
||||
// https://logging.apache.org/log4j/2.x/log4j-core/apidocs/index.html
|
||||
|
||||
/**
|
||||
* A logger that logs to the console. On supported systems, the level labels are
|
||||
* colored.
|
||||
*
|
||||
* This logger is not thread-safe.
|
||||
*/
|
||||
class ConsoleAppender private[ConsoleAppender] (
|
||||
name: String,
|
||||
out: ConsoleOut,
|
||||
ansiCodesSupported: Boolean,
|
||||
useFormat: Boolean,
|
||||
suppressedMessage: SuppressedTraceContext => Option[String]
|
||||
) extends AbstractAppender(name, null, LogExchange.dummyLayout, true, Array.empty) {
|
||||
import scala.Console.{ BLUE, GREEN, RED, YELLOW }
|
||||
|
||||
private val progressState: AtomicReference[ProgressState] = new AtomicReference(null)
|
||||
private[sbt] def setProgressState(state: ProgressState) = progressState.set(state)
|
||||
|
||||
/**
|
||||
* Splits a log message into individual lines and interlaces each line with
|
||||
* the task progress report to reduce the appearance of flickering. It is assumed
|
||||
* that this method is only called while holding the out.lockObject.
|
||||
*/
|
||||
private def supershellInterlaceMsg(msg: String): Unit = {
|
||||
val state = progressState.get
|
||||
import state._
|
||||
val progress = progressLines.get
|
||||
msg.linesIterator.foreach { l =>
|
||||
out.println(s"$DeleteLine$l")
|
||||
if (progress.length > 0) {
|
||||
val pad = if (padding.get > 0) padding.decrementAndGet() else 0
|
||||
val width = ConsoleAppender.terminalWidth
|
||||
val len: Int = progress.foldLeft(progress.length)(_ + terminalLines(width)(_))
|
||||
deleteConsoleLines(blankZone + pad)
|
||||
progress.foreach(printProgressLine)
|
||||
out.print(cursorUp(blankZone + len + padding.get))
|
||||
}
|
||||
}
|
||||
out.flush()
|
||||
}
|
||||
|
||||
private def printProgressLine(line: String): Unit = {
|
||||
out.print(DeleteLine)
|
||||
out.println(line)
|
||||
}
|
||||
|
||||
/**
|
||||
* Receives a new task report and replaces the old one. In the event that the new
|
||||
* report has fewer lines than the previous report, padding lines are added on top
|
||||
* so that the console log lines remain contiguous. When a console line is printed
|
||||
* at the info or greater level, we can decrement the padding because the console
|
||||
* line will have filled in the blank line.
|
||||
*/
|
||||
private def updateProgressState(pe: ProgressEvent): Unit = {
|
||||
val state = progressState.get
|
||||
import state._
|
||||
val sorted = pe.items.sortBy(x => x.elapsedMicros)
|
||||
val info = sorted map { item =>
|
||||
val elapsed = item.elapsedMicros / 1000000L
|
||||
s" | => ${item.name} ${elapsed}s"
|
||||
}
|
||||
|
||||
val width = ConsoleAppender.terminalWidth
|
||||
val currentLength = info.foldLeft(info.length)(_ + terminalLines(width)(_))
|
||||
val previousLines = progressLines.getAndSet(info)
|
||||
val prevLength = previousLines.foldLeft(previousLines.length)(_ + terminalLines(width)(_))
|
||||
|
||||
val prevPadding = padding.get
|
||||
val newPadding = math.max(0, prevLength + prevPadding - currentLength)
|
||||
padding.set(newPadding)
|
||||
|
||||
deleteConsoleLines(newPadding)
|
||||
deleteConsoleLines(blankZone)
|
||||
info.foreach(printProgressLine)
|
||||
|
||||
out.print(cursorUp(blankZone + currentLength + newPadding))
|
||||
out.flush()
|
||||
}
|
||||
private def terminalLines(width: Int): String => Int =
|
||||
(progressLine: String) => if (width > 0) (progressLine.length - 1) / width else 0
|
||||
private def deleteConsoleLines(n: Int): Unit = {
|
||||
(1 to n) foreach { _ =>
|
||||
out.println(DeleteLine)
|
||||
}
|
||||
}
|
||||
|
||||
private val reset: String = {
|
||||
if (ansiCodesSupported && useFormat) scala.Console.RESET
|
||||
else ""
|
||||
}
|
||||
|
||||
private val SUCCESS_LABEL_COLOR = GREEN
|
||||
private val SUCCESS_MESSAGE_COLOR = reset
|
||||
private val NO_COLOR = reset
|
||||
|
||||
private var traceEnabledVar: Int = Int.MaxValue
|
||||
|
||||
def setTrace(level: Int): Unit = synchronized { traceEnabledVar = level }
|
||||
|
||||
/**
|
||||
* Returns the number of lines for stacktrace.
|
||||
*/
|
||||
def getTrace: Int = synchronized { traceEnabledVar }
|
||||
|
||||
override def append(event: XLogEvent): Unit = {
|
||||
val level = ConsoleAppender.toLevel(event.getLevel)
|
||||
val message = event.getMessage
|
||||
appendMessage(level, message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs the stack trace of `t`, possibly shortening it.
|
||||
*
|
||||
* The `traceLevel` parameter configures how the stack trace will be shortened.
|
||||
* See `StackTrace.trimmed`.
|
||||
*
|
||||
* @param t The `Throwable` whose stack trace to log.
|
||||
* @param traceLevel How to shorten the stack trace.
|
||||
*/
|
||||
def trace(t: => Throwable, traceLevel: Int): Unit =
|
||||
out.lockObject.synchronized {
|
||||
if (traceLevel >= 0)
|
||||
write(StackTrace.trimmed(t, traceLevel))
|
||||
if (traceLevel <= 2) {
|
||||
val ctx = new SuppressedTraceContext(traceLevel, ansiCodesSupported && useFormat)
|
||||
for (msg <- suppressedMessage(ctx))
|
||||
appendLog(NO_COLOR, "trace", NO_COLOR, msg)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a `ControlEvent` to the log.
|
||||
*
|
||||
* @param event The kind of `ControlEvent`.
|
||||
* @param message The message to log.
|
||||
*/
|
||||
def control(event: ControlEvent.Value, message: => String): Unit =
|
||||
appendLog(labelColor(Level.Info), Level.Info.toString, BLUE, message)
|
||||
|
||||
/**
|
||||
* Appends the message `message` to the to the log at level `level`.
|
||||
*
|
||||
* @param level The importance level of the message.
|
||||
* @param message The message to log.
|
||||
*/
|
||||
def appendLog(level: Level.Value, message: => String): Unit = {
|
||||
appendLog(labelColor(level), level.toString, NO_COLOR, message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats `msg` with `format, wrapped between `RESET`s
|
||||
*
|
||||
* @param format The format to use
|
||||
* @param msg The message to format
|
||||
* @return The formatted message.
|
||||
*/
|
||||
private def formatted(format: String, msg: String): String = {
|
||||
val builder = new java.lang.StringBuilder(reset.length * 2 + format.length + msg.length)
|
||||
builder.append(reset).append(format).append(msg).append(reset).toString
|
||||
}
|
||||
|
||||
/**
|
||||
* Select the right color for the label given `level`.
|
||||
*
|
||||
* @param level The label to consider to select the color.
|
||||
* @return The color to use to color the label.
|
||||
*/
|
||||
private def labelColor(level: Level.Value): String =
|
||||
level match {
|
||||
case Level.Error => RED
|
||||
case Level.Warn => YELLOW
|
||||
case _ => NO_COLOR
|
||||
}
|
||||
|
||||
/**
|
||||
* Appends a full message to the log. Each line is prefixed with `[$label]`, written in
|
||||
* `labelColor` if formatting is enabled. The lines of the messages are colored with
|
||||
* `messageColor` if formatting is enabled.
|
||||
*
|
||||
* @param labelColor The color to use to format the label.
|
||||
* @param label The label to prefix each line with. The label is shown between square
|
||||
* brackets.
|
||||
* @param messageColor The color to use to format the message.
|
||||
* @param message The message to write.
|
||||
*/
|
||||
private def appendLog(
|
||||
labelColor: String,
|
||||
label: String,
|
||||
messageColor: String,
|
||||
message: String
|
||||
): Unit =
|
||||
out.lockObject.synchronized {
|
||||
val builder: StringBuilder = new StringBuilder(labelColor.length + label.length + messageColor.length + reset.length * 3)
|
||||
message.linesIterator.foreach { line =>
|
||||
builder.ensureCapacity(labelColor.length + label.length + messageColor.length + line.length + reset.length * 3 + 3)
|
||||
builder.setLength(0)
|
||||
def fmted(a: String, b: String) = builder.append(reset).append(a).append(b).append(reset)
|
||||
builder.append(reset).append('[')
|
||||
fmted(labelColor, label)
|
||||
builder.append("] ")
|
||||
fmted(messageColor, line)
|
||||
write(builder.toString)
|
||||
}
|
||||
}
|
||||
|
||||
// success is called by ConsoleLogger.
|
||||
private[sbt] def success(message: => String): Unit = {
|
||||
appendLog(SUCCESS_LABEL_COLOR, Level.SuccessLabel, SUCCESS_MESSAGE_COLOR, message)
|
||||
}
|
||||
|
||||
private def write(msg: String): Unit = {
|
||||
val toWrite =
|
||||
if (!useFormat || !ansiCodesSupported) EscHelpers.removeEscapeSequences(msg) else msg
|
||||
if (progressState.get != null) {
|
||||
supershellInterlaceMsg(toWrite)
|
||||
} else {
|
||||
out.println(toWrite)
|
||||
}
|
||||
}
|
||||
|
||||
private def appendMessage(level: Level.Value, msg: Message): Unit =
|
||||
msg match {
|
||||
case o: ObjectMessage => appendMessageContent(level, o.getParameter)
|
||||
case o: ReusableObjectMessage => appendMessageContent(level, o.getParameter)
|
||||
case _ => appendLog(level, msg.getFormattedMessage)
|
||||
}
|
||||
|
||||
private def appendTraceEvent(te: TraceEvent): Unit = {
|
||||
val traceLevel = getTrace
|
||||
if (traceLevel >= 0) {
|
||||
val throwableShowLines: ShowLines[Throwable] =
|
||||
ShowLines[Throwable]((t: Throwable) => {
|
||||
List(StackTrace.trimmed(t, traceLevel))
|
||||
})
|
||||
val codec: ShowLines[TraceEvent] =
|
||||
ShowLines[TraceEvent]((t: TraceEvent) => {
|
||||
throwableShowLines.showLines(t.message)
|
||||
})
|
||||
codec.showLines(te).toVector foreach { appendLog(Level.Error, _) }
|
||||
}
|
||||
if (traceLevel <= 2) {
|
||||
suppressedMessage(new SuppressedTraceContext(traceLevel, ansiCodesSupported && useFormat)) foreach {
|
||||
appendLog(Level.Error, _)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def appendProgressEvent(pe: ProgressEvent): Unit =
|
||||
if (progressState.get != null) {
|
||||
out.lockObject.synchronized(updateProgressState(pe))
|
||||
}
|
||||
|
||||
private def appendMessageContent(level: Level.Value, o: AnyRef): Unit = {
|
||||
def appendEvent(oe: ObjectEvent[_]): Unit = {
|
||||
val contentType = oe.contentType
|
||||
contentType match {
|
||||
case "sbt.internal.util.TraceEvent" => appendTraceEvent(oe.message.asInstanceOf[TraceEvent])
|
||||
case "sbt.internal.util.ProgressEvent" =>
|
||||
appendProgressEvent(oe.message.asInstanceOf[ProgressEvent])
|
||||
case _ =>
|
||||
LogExchange.stringCodec[AnyRef](contentType) match {
|
||||
case Some(codec) if contentType == "sbt.internal.util.SuccessEvent" =>
|
||||
codec.showLines(oe.message.asInstanceOf[AnyRef]).toVector foreach { success(_) }
|
||||
case Some(codec) =>
|
||||
codec.showLines(oe.message.asInstanceOf[AnyRef]).toVector foreach (appendLog(
|
||||
level,
|
||||
_
|
||||
))
|
||||
case _ => appendLog(level, oe.message.toString)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
o match {
|
||||
case x: StringEvent => Vector(x.message) foreach { appendLog(level, _) }
|
||||
case x: ObjectEvent[_] => appendEvent(x)
|
||||
case _ => Vector(o.toString) foreach { appendLog(level, _) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final class SuppressedTraceContext(val traceLevel: Int, val useFormat: Boolean)
|
||||
private[sbt] final class ProgressState(
|
||||
val progressLines: AtomicReference[Seq[String]],
|
||||
val padding: AtomicInteger,
|
||||
val blankZone: Int
|
||||
) {
|
||||
def this(blankZone: Int) = this(new AtomicReference(Nil), new AtomicInteger(0), blankZone)
|
||||
def reset(): Unit = {
|
||||
progressLines.set(Nil)
|
||||
padding.set(0)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import java.io.{ BufferedWriter, PrintStream, PrintWriter }
|
||||
|
||||
sealed trait ConsoleOut {
|
||||
val lockObject: AnyRef
|
||||
def print(s: String): Unit
|
||||
def println(s: String): Unit
|
||||
def println(): Unit
|
||||
def flush(): Unit
|
||||
}
|
||||
|
||||
object ConsoleOut {
|
||||
def systemOut: ConsoleOut = printStreamOut(System.out)
|
||||
|
||||
def overwriteContaining(s: String): (String, String) => Boolean =
|
||||
(cur, prev) => cur.contains(s) && prev.contains(s)
|
||||
|
||||
/** Move to beginning of previous line and clear the line. */
|
||||
private[this] final val OverwriteLine = "\u001B[A\r\u001B[2K"
|
||||
|
||||
/**
|
||||
* ConsoleOut instance that is backed by System.out. It overwrites the previously printed line
|
||||
* if the function `f(lineToWrite, previousLine)` returns true.
|
||||
*
|
||||
* The ConsoleOut returned by this method assumes that the only newlines are from println calls
|
||||
* and not in the String arguments.
|
||||
*/
|
||||
def systemOutOverwrite(f: (String, String) => Boolean): ConsoleOut = new ConsoleOut {
|
||||
val lockObject = System.out
|
||||
private[this] var last: Option[String] = None
|
||||
private[this] var current = new java.lang.StringBuffer
|
||||
def print(s: String): Unit = synchronized { current.append(s); () }
|
||||
def println(s: String): Unit = synchronized { current.append(s); println() }
|
||||
def println(): Unit = synchronized {
|
||||
val s = current.toString
|
||||
if (ConsoleAppender.formatEnabledInEnv && last.exists(lmsg => f(s, lmsg)))
|
||||
lockObject.print(OverwriteLine)
|
||||
lockObject.println(s)
|
||||
last = Some(s)
|
||||
current.setLength(0)
|
||||
}
|
||||
def flush(): Unit = synchronized {
|
||||
val s = current.toString
|
||||
if (ConsoleAppender.formatEnabledInEnv && last.exists(lmsg => f(s, lmsg)))
|
||||
lockObject.print(OverwriteLine)
|
||||
lockObject.print(s)
|
||||
last = Some(s)
|
||||
current.setLength(0)
|
||||
}
|
||||
}
|
||||
|
||||
def printStreamOut(out: PrintStream): ConsoleOut = new ConsoleOut {
|
||||
val lockObject = out
|
||||
def print(s: String) = out.print(s)
|
||||
def println(s: String) = out.println(s)
|
||||
def println() = out.println()
|
||||
def flush() = out.flush()
|
||||
}
|
||||
def printWriterOut(out: PrintWriter): ConsoleOut = new ConsoleOut {
|
||||
val lockObject = out
|
||||
def print(s: String) = out.print(s)
|
||||
def println(s: String) = { out.println(s); flush() }
|
||||
def println() = { out.println(); flush() }
|
||||
def flush() = { out.flush() }
|
||||
}
|
||||
def bufferedWriterOut(out: BufferedWriter): ConsoleOut = new ConsoleOut {
|
||||
val lockObject = out
|
||||
def print(s: String) = out.write(s)
|
||||
def println(s: String) = { out.write(s); println() }
|
||||
def println() = { out.newLine(); flush() }
|
||||
def flush() = { out.flush() }
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
package sbt.internal.util
|
||||
|
||||
object EscHelpers {
|
||||
|
||||
/** Escape character, used to introduce an escape sequence. */
|
||||
final val ESC = '\u001B'
|
||||
|
||||
/**
|
||||
* An escape terminator is a character in the range `@` (decimal value 64) to `~` (decimal value 126).
|
||||
* It is the final character in an escape sequence.
|
||||
*
|
||||
* cf. http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
|
||||
*/
|
||||
private[sbt] def isEscapeTerminator(c: Char): Boolean =
|
||||
c >= '@' && c <= '~'
|
||||
|
||||
/**
|
||||
* Test if the character AFTER an ESC is the ANSI CSI.
|
||||
*
|
||||
* see: http://en.wikipedia.org/wiki/ANSI_escape_code
|
||||
*
|
||||
* The CSI (control sequence instruction) codes start with ESC + '['. This is for testing the second character.
|
||||
*
|
||||
* There is an additional CSI (one character) that we could test for, but is not frequnetly used, and we don't
|
||||
* check for it.
|
||||
*
|
||||
* cf. http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
|
||||
*/
|
||||
private def isCSI(c: Char): Boolean = c == '['
|
||||
|
||||
/**
|
||||
* Tests whether or not a character needs to immediately terminate the ANSI sequence.
|
||||
*
|
||||
* c.f. http://en.wikipedia.org/wiki/ANSI_escape_code#Sequence_elements
|
||||
*/
|
||||
private def isAnsiTwoCharacterTerminator(c: Char): Boolean =
|
||||
(c >= '@') && (c <= '_')
|
||||
|
||||
/**
|
||||
* Returns true if the string contains the ESC character.
|
||||
*
|
||||
* TODO - this should handle raw CSI (not used much)
|
||||
*/
|
||||
def hasEscapeSequence(s: String): Boolean =
|
||||
s.indexOf(ESC) >= 0
|
||||
|
||||
/**
|
||||
* Returns the string `s` with escape sequences removed.
|
||||
* An escape sequence starts with the ESC character (decimal value 27) and ends with an escape terminator.
|
||||
* @see isEscapeTerminator
|
||||
*/
|
||||
def removeEscapeSequences(s: String): String =
|
||||
if (s.isEmpty || !hasEscapeSequence(s))
|
||||
s
|
||||
else {
|
||||
val sb = new java.lang.StringBuilder
|
||||
nextESC(s, 0, sb)
|
||||
sb.toString
|
||||
}
|
||||
|
||||
private[this] def nextESC(s: String, start: Int, sb: java.lang.StringBuilder): Unit = {
|
||||
val escIndex = s.indexOf(ESC, start)
|
||||
if (escIndex < 0) {
|
||||
sb.append(s, start, s.length)
|
||||
()
|
||||
} else {
|
||||
sb.append(s, start, escIndex)
|
||||
val next: Int =
|
||||
if (escIndex + 1 >= s.length) skipESC(s, escIndex + 1)
|
||||
// If it's a CSI we skip past it and then look for a terminator.
|
||||
else if (isCSI(s.charAt(escIndex + 1))) skipESC(s, escIndex + 2)
|
||||
else if (isAnsiTwoCharacterTerminator(s.charAt(escIndex + 1))) escIndex + 2
|
||||
else {
|
||||
// There could be non-ANSI character sequences we should make sure we handle here.
|
||||
skipESC(s, escIndex + 1)
|
||||
}
|
||||
nextESC(s, next, sb)
|
||||
}
|
||||
}
|
||||
|
||||
/** Skips the escape sequence starting at `i-1`. `i` should be positioned at the character after the ESC that starts the sequence. */
|
||||
private[this] def skipESC(s: String, i: Int): Int = {
|
||||
if (i >= s.length) {
|
||||
i
|
||||
} else if (isEscapeTerminator(s.charAt(i))) {
|
||||
i + 1
|
||||
} else {
|
||||
skipESC(s, i + 1)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,36 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import com.github.ghik.silencer.silent
|
||||
|
||||
/**
|
||||
* A filter logger is used to delegate messages but not the logging level to another logger. This means
|
||||
* that messages are logged at the higher of the two levels set by this logger and its delegate.
|
||||
*/
|
||||
class FilterLogger(delegate: AbstractLogger) extends BasicLogger {
|
||||
@silent override lazy val ansiCodesSupported = delegate.ansiCodesSupported
|
||||
def trace(t: => Throwable): Unit = {
|
||||
if (traceEnabled)
|
||||
delegate.trace(t)
|
||||
}
|
||||
override def setSuccessEnabled(flag: Boolean): Unit = delegate.setSuccessEnabled(flag)
|
||||
override def successEnabled = delegate.successEnabled
|
||||
override def setTrace(level: Int): Unit = delegate.setTrace(level)
|
||||
override def getTrace = delegate.getTrace
|
||||
def log(level: Level.Value, message: => String): Unit = {
|
||||
if (atLevel(level))
|
||||
delegate.log(level, message)
|
||||
}
|
||||
def success(message: => String): Unit = {
|
||||
if (successEnabled)
|
||||
delegate.success(message)
|
||||
}
|
||||
def control(event: ControlEvent.Value, message: => String): Unit = {
|
||||
if (atLevel(Level.Info))
|
||||
delegate.control(event, message)
|
||||
}
|
||||
def logAll(events: Seq[LogEvent]): Unit = delegate.logAll(events)
|
||||
}
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import com.github.ghik.silencer.silent
|
||||
|
||||
/** Promotes the simple Logger interface to the full AbstractLogger interface. */
|
||||
class FullLogger(delegate: Logger) extends BasicLogger {
|
||||
@deprecated("No longer used.", "1.0.0")
|
||||
@silent override val ansiCodesSupported: Boolean = delegate.ansiCodesSupported
|
||||
|
||||
def trace(t: => Throwable): Unit = {
|
||||
if (traceEnabled)
|
||||
delegate.trace(t)
|
||||
}
|
||||
def log(level: Level.Value, message: => String): Unit = {
|
||||
if (atLevel(level))
|
||||
delegate.log(level, message)
|
||||
}
|
||||
def success(message: => String): Unit =
|
||||
if (successEnabled)
|
||||
delegate.success(message)
|
||||
def control(event: ControlEvent.Value, message: => String): Unit =
|
||||
info(message)
|
||||
def logAll(events: Seq[LogEvent]): Unit = events.foreach(log)
|
||||
}
|
||||
object FullLogger {
|
||||
def apply(delegate: Logger): AbstractLogger =
|
||||
delegate match {
|
||||
case d: AbstractLogger => d
|
||||
case _ => new FullLogger(delegate)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import java.io.{ File, PrintWriter }
|
||||
import org.apache.logging.log4j.core.Appender
|
||||
|
||||
/**
|
||||
* Provides the current global logging configuration.
|
||||
*
|
||||
* `full` is the current global logger. It should not be set directly because it is generated as needed from `backing.newLogger`.
|
||||
* `console` is where all logging from all ConsoleLoggers should go.
|
||||
* `backed` is the Logger that other loggers should feed into.
|
||||
* `backing` tracks the files that persist the global logging.
|
||||
* `newLogger` creates a new global logging configuration from a sink and backing configuration.
|
||||
*/
|
||||
final case class GlobalLogging(
|
||||
full: ManagedLogger,
|
||||
console: ConsoleOut,
|
||||
backed: Appender,
|
||||
backing: GlobalLogBacking,
|
||||
newAppender: (ManagedLogger, PrintWriter, GlobalLogBacking) => GlobalLogging
|
||||
)
|
||||
|
||||
final case class GlobalLogging1(
|
||||
full: Logger,
|
||||
console: ConsoleOut,
|
||||
backed: AbstractLogger,
|
||||
backing: GlobalLogBacking,
|
||||
newLogger: (PrintWriter, GlobalLogBacking) => GlobalLogging1
|
||||
)
|
||||
|
||||
/**
|
||||
* Tracks the files that persist the global logging.
|
||||
* `file` is the current backing file. `last` is the previous backing file, if there is one.
|
||||
* `newBackingFile` creates a new temporary location for the next backing file.
|
||||
*/
|
||||
final case class GlobalLogBacking(file: File, last: Option[File], newBackingFile: () => File) {
|
||||
|
||||
/** Shifts the current backing file to `last` and sets the current backing to `newFile`. */
|
||||
def shift(newFile: File) = GlobalLogBacking(newFile, Some(file), newBackingFile)
|
||||
|
||||
/** Shifts the current backing file to `last` and sets the current backing to a new temporary file generated by `newBackingFile`. */
|
||||
def shiftNew() = shift(newBackingFile())
|
||||
|
||||
/**
|
||||
* If there is a previous backing file in `last`, that becomes the current backing file and the previous backing is cleared.
|
||||
* Otherwise, no changes are made.
|
||||
*/
|
||||
def unshift = GlobalLogBacking(last getOrElse file, None, newBackingFile)
|
||||
|
||||
}
|
||||
|
||||
object GlobalLogBacking {
|
||||
def apply(newBackingFile: => File): GlobalLogBacking =
|
||||
GlobalLogBacking(newBackingFile, None, newBackingFile _)
|
||||
}
|
||||
|
||||
object GlobalLogging {
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
|
||||
private def generateName: String = "GlobalLogging" + generateId.incrementAndGet
|
||||
private val generateId: AtomicInteger = new AtomicInteger
|
||||
|
||||
def initial1(
|
||||
newLogger: (PrintWriter, GlobalLogBacking) => GlobalLogging1,
|
||||
newBackingFile: => File,
|
||||
console: ConsoleOut
|
||||
): GlobalLogging1 = {
|
||||
val log = ConsoleLogger(console)
|
||||
GlobalLogging1(log, console, log, GlobalLogBacking(newBackingFile), newLogger)
|
||||
}
|
||||
|
||||
def initial(
|
||||
newAppender: (ManagedLogger, PrintWriter, GlobalLogBacking) => GlobalLogging,
|
||||
newBackingFile: => File,
|
||||
console: ConsoleOut
|
||||
): GlobalLogging = {
|
||||
val loggerName = generateName
|
||||
val log = LogExchange.logger(loggerName)
|
||||
val appender = ConsoleAppender(ConsoleAppender.generateName, console)
|
||||
LogExchange.bindLoggerAppenders(loggerName, List(appender -> Level.Info))
|
||||
GlobalLogging(log, console, appender, GlobalLogBacking(newBackingFile), newAppender)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
|
||||
/**
|
||||
* Provides a `java.io.Writer` interface to a `Logger`. Content is line-buffered and logged at `level`.
|
||||
* A line is delimited by `nl`, which is by default the platform line separator.
|
||||
*/
|
||||
class LoggerWriter(
|
||||
delegate: Logger,
|
||||
unbufferedLevel: Option[Level.Value],
|
||||
nl: String = System.getProperty("line.separator")
|
||||
) extends java.io.Writer {
|
||||
def this(delegate: Logger, level: Level.Value) = this(delegate, Some(level))
|
||||
def this(delegate: Logger) = this(delegate, None)
|
||||
|
||||
private[this] val buffer = new StringBuilder
|
||||
private[this] val lines = new collection.mutable.ListBuffer[String]
|
||||
|
||||
override def close() = flush()
|
||||
|
||||
override def flush(): Unit =
|
||||
synchronized {
|
||||
if (buffer.nonEmpty) {
|
||||
log(buffer.toString)
|
||||
buffer.clear()
|
||||
}
|
||||
}
|
||||
|
||||
def flushLines(level: Level.Value): Unit =
|
||||
synchronized {
|
||||
for (line <- lines)
|
||||
delegate.log(level, line)
|
||||
lines.clear()
|
||||
}
|
||||
|
||||
override def write(content: Array[Char], offset: Int, length: Int): Unit =
|
||||
synchronized {
|
||||
buffer.appendAll(content, offset, length)
|
||||
process()
|
||||
}
|
||||
|
||||
private[this] def process(): Unit = {
|
||||
val i = buffer.indexOf(nl)
|
||||
if (i >= 0) {
|
||||
log(buffer.substring(0, i))
|
||||
buffer.delete(0, i + nl.length)
|
||||
process()
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def log(s: String): Unit = unbufferedLevel match {
|
||||
case None =>
|
||||
lines += s; ()
|
||||
case Some(level) => delegate.log(level, s)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,108 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import java.io.PrintWriter
|
||||
import org.apache.logging.log4j.core.Appender
|
||||
|
||||
object MainAppender {
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
private def generateGlobalBackingName: String =
|
||||
"GlobalBacking" + generateId.incrementAndGet
|
||||
private val generateId: AtomicInteger = new AtomicInteger
|
||||
|
||||
def multiLogger(log: ManagedLogger, config: MainAppenderConfig): ManagedLogger = {
|
||||
import config._
|
||||
// TODO
|
||||
// backed setTrace backingTrace
|
||||
// multi: Logger
|
||||
|
||||
LogExchange.unbindLoggerAppenders(log.name)
|
||||
LogExchange.bindLoggerAppenders(
|
||||
log.name,
|
||||
(consoleOpt.toList map { appender =>
|
||||
appender match {
|
||||
case a: ConsoleAppender =>
|
||||
a.setTrace(screenTrace)
|
||||
case _ => ()
|
||||
}
|
||||
appender -> screenLevel
|
||||
}) :::
|
||||
List(backed -> backingLevel) :::
|
||||
(extra map { x =>
|
||||
(x -> Level.Info)
|
||||
})
|
||||
)
|
||||
log
|
||||
}
|
||||
|
||||
def globalDefault(
|
||||
console: ConsoleOut
|
||||
): (ManagedLogger, PrintWriter, GlobalLogBacking) => GlobalLogging = {
|
||||
lazy val newAppender: (ManagedLogger, PrintWriter, GlobalLogBacking) => GlobalLogging =
|
||||
(log, writer, backing) => {
|
||||
val backed: Appender = defaultBacked(generateGlobalBackingName)(writer)
|
||||
val full = multiLogger(log, defaultMultiConfig(Option(console), backed, Nil))
|
||||
GlobalLogging(full, console, backed, backing, newAppender)
|
||||
}
|
||||
newAppender
|
||||
}
|
||||
|
||||
def defaultMultiConfig(
|
||||
consoleOpt: Option[ConsoleOut],
|
||||
backing: Appender,
|
||||
extra: List[Appender]
|
||||
): MainAppenderConfig =
|
||||
MainAppenderConfig(
|
||||
consoleOpt map { defaultScreen(_, ConsoleAppender.noSuppressedMessage) },
|
||||
backing,
|
||||
extra,
|
||||
Level.Info,
|
||||
Level.Debug,
|
||||
-1,
|
||||
Int.MaxValue
|
||||
)
|
||||
|
||||
def defaultScreen(console: ConsoleOut): Appender =
|
||||
ConsoleAppender(ConsoleAppender.generateName, console)
|
||||
|
||||
def defaultScreen(
|
||||
console: ConsoleOut,
|
||||
suppressedMessage: SuppressedTraceContext => Option[String]
|
||||
): Appender =
|
||||
ConsoleAppender(ConsoleAppender.generateName, console, suppressedMessage = suppressedMessage)
|
||||
|
||||
def defaultScreen(
|
||||
name: String,
|
||||
console: ConsoleOut,
|
||||
suppressedMessage: SuppressedTraceContext => Option[String]
|
||||
): Appender =
|
||||
ConsoleAppender(name, console, suppressedMessage = suppressedMessage)
|
||||
|
||||
def defaultBacked: PrintWriter => Appender =
|
||||
defaultBacked(generateGlobalBackingName, ConsoleAppender.formatEnabledInEnv)
|
||||
|
||||
def defaultBacked(loggerName: String): PrintWriter => Appender =
|
||||
defaultBacked(loggerName, ConsoleAppender.formatEnabledInEnv)
|
||||
|
||||
def defaultBacked(useFormat: Boolean): PrintWriter => Appender =
|
||||
defaultBacked(generateGlobalBackingName, useFormat)
|
||||
|
||||
def defaultBacked(loggerName: String, useFormat: Boolean): PrintWriter => Appender =
|
||||
to => {
|
||||
ConsoleAppender(
|
||||
ConsoleAppender.generateName,
|
||||
ConsoleOut.printWriterOut(to),
|
||||
useFormat = useFormat
|
||||
)
|
||||
}
|
||||
|
||||
final case class MainAppenderConfig(
|
||||
consoleOpt: Option[Appender],
|
||||
backed: Appender,
|
||||
extra: List[Appender],
|
||||
screenLevel: Level.Value,
|
||||
backingLevel: Level.Value,
|
||||
screenTrace: Int,
|
||||
backingTrace: Int
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import org.apache.logging.log4j.{ Logger => XLogger }
|
||||
import org.apache.logging.log4j.message.ObjectMessage
|
||||
import sjsonnew.JsonFormat
|
||||
import scala.reflect.runtime.universe.TypeTag
|
||||
import sbt.internal.util.codec.JsonProtocol._
|
||||
|
||||
/**
|
||||
* Delegates log events to the associated LogExchange.
|
||||
*/
|
||||
class ManagedLogger(
|
||||
val name: String,
|
||||
val channelName: Option[String],
|
||||
val execId: Option[String],
|
||||
xlogger: XLogger
|
||||
) extends Logger {
|
||||
override def trace(t: => Throwable): Unit =
|
||||
logEvent(Level.Error, TraceEvent("Error", t, channelName, execId))
|
||||
override def log(level: Level.Value, message: => String): Unit = {
|
||||
xlogger.log(
|
||||
ConsoleAppender.toXLevel(level),
|
||||
new ObjectMessage(StringEvent(level.toString, message, channelName, execId))
|
||||
)
|
||||
}
|
||||
|
||||
private lazy val SuccessEventTag = scala.reflect.runtime.universe.typeTag[SuccessEvent]
|
||||
// send special event for success since it's not a real log level
|
||||
override def success(message: => String): Unit = {
|
||||
infoEvent[SuccessEvent](SuccessEvent(message))(
|
||||
implicitly[JsonFormat[SuccessEvent]],
|
||||
SuccessEventTag
|
||||
)
|
||||
}
|
||||
|
||||
def registerStringCodec[A: ShowLines: TypeTag]: Unit = {
|
||||
LogExchange.registerStringCodec[A]
|
||||
}
|
||||
|
||||
final def debugEvent[A: JsonFormat: TypeTag](event: => A): Unit = logEvent(Level.Debug, event)
|
||||
final def infoEvent[A: JsonFormat: TypeTag](event: => A): Unit = logEvent(Level.Info, event)
|
||||
final def warnEvent[A: JsonFormat: TypeTag](event: => A): Unit = logEvent(Level.Warn, event)
|
||||
final def errorEvent[A: JsonFormat: TypeTag](event: => A): Unit = logEvent(Level.Error, event)
|
||||
def logEvent[A: JsonFormat: TypeTag](level: Level.Value, event: => A): Unit = {
|
||||
val v: A = event
|
||||
val tag = StringTypeTag[A]
|
||||
LogExchange.getOrElseUpdateJsonCodec(tag.key, implicitly[JsonFormat[A]])
|
||||
// println("logEvent " + tag.key)
|
||||
val entry: ObjectEvent[A] = ObjectEvent(level, v, channelName, execId, tag.key)
|
||||
xlogger.log(
|
||||
ConsoleAppender.toXLevel(level),
|
||||
new ObjectMessage(entry)
|
||||
)
|
||||
}
|
||||
|
||||
@deprecated("No longer used.", "1.0.0")
|
||||
override def ansiCodesSupported = ConsoleAppender.formatEnabledInEnv
|
||||
}
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import com.github.ghik.silencer.silent
|
||||
|
||||
// note that setting the logging level on this logger has no effect on its behavior, only
|
||||
// on the behavior of the delegates.
|
||||
class MultiLogger(delegates: List[AbstractLogger]) extends BasicLogger {
|
||||
@deprecated("No longer used.", "1.0.0")
|
||||
override lazy val ansiCodesSupported = delegates exists supported
|
||||
@silent private[this] def supported = (_: AbstractLogger).ansiCodesSupported
|
||||
|
||||
override def setLevel(newLevel: Level.Value): Unit = {
|
||||
super.setLevel(newLevel)
|
||||
dispatch(new SetLevel(newLevel))
|
||||
}
|
||||
|
||||
override def setTrace(level: Int): Unit = {
|
||||
super.setTrace(level)
|
||||
dispatch(new SetTrace(level))
|
||||
}
|
||||
|
||||
override def setSuccessEnabled(flag: Boolean): Unit = {
|
||||
super.setSuccessEnabled(flag)
|
||||
dispatch(new SetSuccess(flag))
|
||||
}
|
||||
|
||||
def trace(t: => Throwable): Unit = dispatch(new Trace(t))
|
||||
def log(level: Level.Value, message: => String): Unit = dispatch(new Log(level, message))
|
||||
def success(message: => String): Unit = dispatch(new Success(message))
|
||||
def logAll(events: Seq[LogEvent]): Unit = delegates.foreach(_.logAll(events))
|
||||
|
||||
def control(event: ControlEvent.Value, message: => String): Unit =
|
||||
delegates.foreach(_.control(event, message))
|
||||
|
||||
private[this] def dispatch(event: LogEvent): Unit = {
|
||||
for (d <- delegates) {
|
||||
d.log(event)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
package sbt
|
||||
package internal
|
||||
package util
|
||||
|
||||
import sbt.util.Level
|
||||
import sjsonnew.JsonFormat
|
||||
import sjsonnew.support.scalajson.unsafe.Converter
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
|
||||
final class ObjectEvent[A](
|
||||
val level: Level.Value,
|
||||
val message: A,
|
||||
val channelName: Option[String],
|
||||
val execId: Option[String],
|
||||
val contentType: String,
|
||||
val json: JValue
|
||||
) extends Serializable {
|
||||
override def toString: String =
|
||||
s"ObjectEvent($level, $message, $channelName, $execId, $contentType, $json)"
|
||||
}
|
||||
|
||||
object ObjectEvent {
|
||||
def apply[A: JsonFormat](
|
||||
level: Level.Value,
|
||||
message: A,
|
||||
channelName: Option[String],
|
||||
execId: Option[String],
|
||||
contentType: String
|
||||
): ObjectEvent[A] =
|
||||
new ObjectEvent(
|
||||
level,
|
||||
message,
|
||||
channelName,
|
||||
execId,
|
||||
contentType,
|
||||
Converter.toJsonUnsafe(message)
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Tony Sloane
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.io.IO
|
||||
import scala.collection.mutable.ListBuffer
|
||||
|
||||
object StackTrace {
|
||||
def isSbtClass(name: String) = name.startsWith("sbt.") || name.startsWith("xsbt.")
|
||||
|
||||
/**
|
||||
* Return a printable representation of the stack trace associated
|
||||
* with t. Information about t and its Throwable causes is included.
|
||||
* The number of lines to be included for each Throwable is configured
|
||||
* via d which should be greater than or equal to 0.
|
||||
*
|
||||
* - If d is 0, then all elements are included up to (but not including)
|
||||
* the first element that comes from sbt.
|
||||
* - If d is greater than 0, then up to that many lines are included,
|
||||
* where the line for the Throwable is counted plus one line for each stack element.
|
||||
* Less lines will be included if there are not enough stack elements.
|
||||
*
|
||||
* See also ConsoleAppender where d <= 2 is treated specially by
|
||||
* printing a prepared statement.
|
||||
*/
|
||||
def trimmedLines(t: Throwable, d: Int): List[String] = {
|
||||
require(d >= 0)
|
||||
val b = new ListBuffer[String]()
|
||||
|
||||
def appendStackTrace(t: Throwable, first: Boolean): Unit = {
|
||||
|
||||
val include: StackTraceElement => Boolean =
|
||||
if (d == 0)
|
||||
element => !isSbtClass(element.getClassName)
|
||||
else {
|
||||
var count = d - 1
|
||||
(_ => { count -= 1; count >= 0 })
|
||||
}
|
||||
|
||||
def appendElement(e: StackTraceElement): Unit = {
|
||||
b.append("\tat " + e)
|
||||
()
|
||||
}
|
||||
|
||||
if (!first) b.append("Caused by: " + t.toString)
|
||||
else b.append(t.toString)
|
||||
|
||||
val els = t.getStackTrace()
|
||||
var i = 0
|
||||
while ((i < els.size) && include(els(i))) {
|
||||
appendElement(els(i))
|
||||
i += 1
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
appendStackTrace(t, true)
|
||||
var c = t
|
||||
while (c.getCause() != null) {
|
||||
c = c.getCause()
|
||||
appendStackTrace(c, false)
|
||||
}
|
||||
b.toList
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a printable representation of the stack trace associated
|
||||
* with t. Information about t and its Throwable causes is included.
|
||||
* The number of lines to be included for each Throwable is configured
|
||||
* via d which should be greater than or equal to 0.
|
||||
*
|
||||
* - If d is 0, then all elements are included up to (but not including)
|
||||
* the first element that comes from sbt.
|
||||
* - If d is greater than 0, then up to that many lines are included,
|
||||
* where the line for the Throwable is counted plus one line for each stack element.
|
||||
* Less lines will be included if there are not enough stack elements.
|
||||
*/
|
||||
def trimmed(t: Throwable, d: Int): String =
|
||||
trimmedLines(t, d).mkString(IO.Newline)
|
||||
}
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import scala.reflect.runtime.universe._
|
||||
|
||||
/** This is used to carry type information in JSON. */
|
||||
final case class StringTypeTag[A](key: String) {
|
||||
override def toString: String = key
|
||||
}
|
||||
|
||||
object StringTypeTag {
|
||||
def apply[A: TypeTag]: StringTypeTag[A] =
|
||||
synchronized {
|
||||
def doApply: StringTypeTag[A] = {
|
||||
val tag = implicitly[TypeTag[A]]
|
||||
val tpe = tag.tpe
|
||||
val k = typeToString(tpe)
|
||||
// println(tpe.getClass.toString + " " + k)
|
||||
StringTypeTag[A](k)
|
||||
}
|
||||
def retry(n: Int): StringTypeTag[A] =
|
||||
try {
|
||||
doApply
|
||||
} catch {
|
||||
case e: NullPointerException =>
|
||||
if (n < 1) throw new RuntimeException("NPE in StringTypeTag", e)
|
||||
else {
|
||||
Thread.sleep(1)
|
||||
retry(n - 1)
|
||||
}
|
||||
}
|
||||
retry(3)
|
||||
}
|
||||
|
||||
def typeToString(tpe: Type): String =
|
||||
tpe match {
|
||||
case TypeRef(_, sym, args) =>
|
||||
if (args.nonEmpty) {
|
||||
val typeCon = tpe.typeSymbol.fullName
|
||||
val typeArgs = args map typeToString
|
||||
s"""$typeCon[${typeArgs.mkString(",")}]"""
|
||||
} else tpe.toString
|
||||
case _ => tpe.toString
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright (C) 2017 Lightbend Inc. <http://www.lightbend.com>
|
||||
*/
|
||||
|
||||
package sbt
|
||||
package internal
|
||||
package util.codec
|
||||
|
||||
import sjsonnew.{ JsonWriter => JW, JsonReader => JR, JsonFormat => JF, _ }
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe._
|
||||
|
||||
trait JValueFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit val JNullFormat: JF[JNull.type] = new JF[JNull.type] {
|
||||
def write[J](x: JNull.type, b: Builder[J]) = b.writeNull()
|
||||
def read[J](j: Option[J], u: Unbuilder[J]) = JNull
|
||||
}
|
||||
|
||||
implicit val JBooleanFormat: JF[JBoolean] = projectFormat(_.get, (x: Boolean) => JBoolean(x))
|
||||
implicit val JStringFormat: JF[JString] = projectFormat(_.value, (x: String) => JString(x))
|
||||
|
||||
implicit val JNumberFormat: JF[JNumber] =
|
||||
projectFormat(x => BigDecimal(x.value), (x: BigDecimal) => JNumber(x.toString))
|
||||
|
||||
implicit val JArrayFormat: JF[JArray] = projectFormat[JArray, Array[JValue]](_.value, JArray(_))
|
||||
|
||||
implicit lazy val JObjectJsonWriter: JW[JObject] = new JW[JObject] {
|
||||
def write[J](x: JObject, b: Builder[J]) = {
|
||||
b.beginObject()
|
||||
x.value foreach (jsonField => JValueFormat.addField(jsonField.field, jsonField.value, b))
|
||||
b.endObject()
|
||||
}
|
||||
}
|
||||
|
||||
implicit lazy val JValueJsonWriter: JW[JValue] = new JW[JValue] {
|
||||
def write[J](x: JValue, b: Builder[J]) = x match {
|
||||
case x: JNull.type => JNullFormat.write(x, b)
|
||||
case x: JBoolean => JBooleanFormat.write(x, b)
|
||||
case x: JString => JStringFormat.write(x, b)
|
||||
case x: JNumber => JNumberFormat.write(x, b)
|
||||
case x: JArray => JArrayFormat.write(x, b)
|
||||
case x: JObject => JObjectJsonWriter.write(x, b)
|
||||
}
|
||||
}
|
||||
|
||||
// This passes through JValue, or returns JNull instead of blowing up with unimplemented.
|
||||
implicit lazy val JValueJsonReader: JR[JValue] = new JR[JValue] {
|
||||
def read[J](j: Option[J], u: Unbuilder[J]) = j match {
|
||||
case Some(x: JValue) => x
|
||||
case Some(x) => sys.error(s"Uknown AST $x")
|
||||
case _ => JNull
|
||||
}
|
||||
}
|
||||
|
||||
implicit lazy val JValueFormat: JF[JValue] =
|
||||
jsonFormat[JValue](JValueJsonReader, JValueJsonWriter)
|
||||
}
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
package sbt.internal.util.codec
|
||||
import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
|
||||
import xsbti.Position
|
||||
import java.util.Optional
|
||||
|
||||
trait PositionFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val PositionFormat: JsonFormat[Position] = new JsonFormat[Position] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Position = {
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val line0 = unbuilder.readField[Optional[java.lang.Integer]]("line")
|
||||
val lineContent0 = unbuilder.readField[String]("lineContent")
|
||||
val offset0 = unbuilder.readField[Optional[java.lang.Integer]]("offset")
|
||||
val pointer0 = unbuilder.readField[Optional[java.lang.Integer]]("pointer")
|
||||
val pointerSpace0 = unbuilder.readField[Optional[String]]("pointerSpace")
|
||||
val sourcePath0 = unbuilder.readField[Optional[String]]("sourcePath")
|
||||
val sourceFile0 = unbuilder.readField[Optional[java.io.File]]("sourceFile")
|
||||
val startOffset0 = unbuilder.readField[Optional[java.lang.Integer]]("startOffset")
|
||||
val endOffset0 = unbuilder.readField[Optional[java.lang.Integer]]("endOffset")
|
||||
val startLine0 = unbuilder.readField[Optional[java.lang.Integer]]("startLine")
|
||||
val startColumn0 = unbuilder.readField[Optional[java.lang.Integer]]("startColumn")
|
||||
val endLine0 = unbuilder.readField[Optional[java.lang.Integer]]("endLine")
|
||||
val endColumn0 = unbuilder.readField[Optional[java.lang.Integer]]("endColumn")
|
||||
|
||||
unbuilder.endObject()
|
||||
new Position() {
|
||||
override val line = line0
|
||||
override val lineContent = lineContent0
|
||||
override val offset = offset0
|
||||
override val pointer = pointer0
|
||||
override val pointerSpace = pointerSpace0
|
||||
override val sourcePath = sourcePath0
|
||||
override val sourceFile = sourceFile0
|
||||
override val startOffset = startOffset0
|
||||
override val endOffset = endOffset0
|
||||
override val startLine = startLine0
|
||||
override val startColumn = startColumn0
|
||||
override val endLine = endLine0
|
||||
override val endColumn = endColumn0
|
||||
|
||||
}
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: Position, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("line", obj.line)
|
||||
builder.addField("lineContent", obj.lineContent)
|
||||
builder.addField("offset", obj.offset)
|
||||
builder.addField("pointer", obj.pointer)
|
||||
builder.addField("pointerSpace", obj.pointerSpace)
|
||||
builder.addField("sourcePath", obj.sourcePath)
|
||||
builder.addField("sourceFile", obj.sourceFile)
|
||||
builder.addField("startOffset", obj.startOffset)
|
||||
builder.addField("endOffset", obj.endOffset)
|
||||
builder.addField("startLine", obj.startLine)
|
||||
builder.addField("startColumn", obj.startColumn)
|
||||
builder.addField("endLine", obj.endLine)
|
||||
builder.addField("endColumn", obj.endColumn)
|
||||
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
package sbt.internal.util.codec
|
||||
|
||||
import xsbti.{ Problem, Severity, Position }
|
||||
import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
|
||||
import java.util.Optional
|
||||
|
||||
trait ProblemFormats { self: SeverityFormats with PositionFormats with sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val ProblemFormat: JsonFormat[Problem] = new JsonFormat[Problem] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Problem = {
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val category0 = unbuilder.readField[String]("category")
|
||||
val severity0 = unbuilder.readField[Severity]("severity")
|
||||
val message0 = unbuilder.readField[String]("message")
|
||||
val position0 = unbuilder.readField[Position]("position")
|
||||
val rendered0 = unbuilder.readField[Optional[String]]("rendered")
|
||||
|
||||
unbuilder.endObject()
|
||||
new Problem {
|
||||
override val category = category0
|
||||
override val position = position0
|
||||
override val message = message0
|
||||
override val severity = severity0
|
||||
override val rendered = rendered0
|
||||
}
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: Problem, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("category", obj.category)
|
||||
builder.addField("severity", obj.severity)
|
||||
builder.addField("message", obj.message)
|
||||
builder.addField("position", obj.position)
|
||||
builder.addField("rendered", obj.rendered)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
/**
|
||||
* This code is generated using [[http://www.scala-sbt.org/contraband/ sbt-contraband]].
|
||||
*/
|
||||
package sbt.internal.util.codec
|
||||
|
||||
import _root_.sjsonnew.{ deserializationError, Builder, JsonFormat, Unbuilder }
|
||||
import xsbti.Severity;
|
||||
|
||||
trait SeverityFormats { self: sjsonnew.BasicJsonProtocol =>
|
||||
implicit lazy val SeverityFormat: JsonFormat[Severity] = new JsonFormat[Severity] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Severity = {
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.readString(js) match {
|
||||
case "Info" => Severity.Info
|
||||
case "Warn" => Severity.Warn
|
||||
case "Error" => Severity.Error
|
||||
}
|
||||
case None =>
|
||||
deserializationError("Expected JsString but found None")
|
||||
}
|
||||
}
|
||||
override def write[J](obj: Severity, builder: Builder[J]): Unit = {
|
||||
val str = obj match {
|
||||
case Severity.Info => "Info"
|
||||
case Severity.Warn => "Warn"
|
||||
case Severity.Error => "Error"
|
||||
}
|
||||
builder.writeString(str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
package sbt
|
||||
package internal.util.codec
|
||||
|
||||
import sbt.util.ShowLines
|
||||
import sbt.internal.util.SuccessEvent
|
||||
|
||||
trait SuccessEventShowLines {
|
||||
implicit val sbtSuccessEventShowLines: ShowLines[SuccessEvent] =
|
||||
ShowLines[SuccessEvent]((e: SuccessEvent) => {
|
||||
Vector(e.message)
|
||||
})
|
||||
}
|
||||
|
||||
object SuccessEventShowLines extends SuccessEventShowLines
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
package sbt
|
||||
package internal.util.codec
|
||||
|
||||
import sbt.util.ShowLines
|
||||
import sbt.internal.util.{ StackTrace, TraceEvent }
|
||||
|
||||
trait ThrowableShowLines {
|
||||
implicit val sbtThrowableShowLines: ShowLines[Throwable] =
|
||||
ShowLines[Throwable]((t: Throwable) => {
|
||||
// 0 means enabled with default behavior. See StackTrace.scala.
|
||||
val traceLevel = 0
|
||||
List(StackTrace.trimmed(t, traceLevel))
|
||||
})
|
||||
}
|
||||
|
||||
object ThrowableShowLines extends ThrowableShowLines
|
||||
|
||||
trait TraceEventShowLines {
|
||||
implicit val sbtTraceEventShowLines: ShowLines[TraceEvent] =
|
||||
ShowLines[TraceEvent]((t: TraceEvent) => {
|
||||
ThrowableShowLines.sbtThrowableShowLines.showLines(t.message)
|
||||
})
|
||||
}
|
||||
|
||||
object TraceEventShowLines extends TraceEventShowLines
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
package sbt.util
|
||||
|
||||
abstract class AbstractLogger extends Logger {
|
||||
def getLevel: Level.Value
|
||||
def setLevel(newLevel: Level.Value): Unit
|
||||
def setTrace(flag: Int): Unit
|
||||
def getTrace: Int
|
||||
final def traceEnabled: Boolean = getTrace >= 0
|
||||
def successEnabled: Boolean
|
||||
def setSuccessEnabled(flag: Boolean): Unit
|
||||
|
||||
def atLevel(level: Level.Value): Boolean = level.id >= getLevel.id
|
||||
def control(event: ControlEvent.Value, message: => String): Unit
|
||||
|
||||
def logAll(events: Seq[LogEvent]): Unit
|
||||
|
||||
/** Defined in terms of other methods in Logger and should not be called from them. */
|
||||
final def log(event: LogEvent): Unit = {
|
||||
event match {
|
||||
case s: Success => success(s.msg)
|
||||
case l: Log => log(l.level, l.msg)
|
||||
case t: Trace => trace(t.exception)
|
||||
case setL: SetLevel => setLevel(setL.newLevel)
|
||||
case setT: SetTrace => setTrace(setT.level)
|
||||
case setS: SetSuccess => setSuccessEnabled(setS.enabled)
|
||||
case c: ControlEvent => control(c.event, c.msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,171 @@
|
|||
package sbt.util
|
||||
|
||||
import xsbti.{ Position, Problem, Severity, T2 }
|
||||
import java.io.File
|
||||
import java.util.Optional
|
||||
import java.util.function.Supplier
|
||||
|
||||
object InterfaceUtil {
|
||||
def toSupplier[A](a: => A): Supplier[A] = new Supplier[A] {
|
||||
override def get: A = a
|
||||
}
|
||||
|
||||
import java.util.function.{ Function => JavaFunction }
|
||||
def toJavaFunction[A1, R](f: A1 => R): JavaFunction[A1, R] = new JavaFunction[A1, R] {
|
||||
override def apply(t: A1): R = f(t)
|
||||
}
|
||||
|
||||
def t2[A1, A2](x: (A1, A2)): T2[A1, A2] = new ConcreteT2(x._1, x._2)
|
||||
|
||||
def toOption[A](m: Optional[A]): Option[A] =
|
||||
if (m.isPresent) Some(m.get) else None
|
||||
|
||||
def toOptional[A](o: Option[A]): Optional[A] =
|
||||
o match {
|
||||
case Some(v) => Optional.of(v)
|
||||
case None => Optional.empty()
|
||||
}
|
||||
|
||||
def jo2o[A](o: Optional[A]): Option[A] =
|
||||
if (o.isPresent) Some(o.get)
|
||||
else None
|
||||
|
||||
def o2jo[A](o: Option[A]): Optional[A] =
|
||||
o match {
|
||||
case Some(v) => Optional.ofNullable(v)
|
||||
case None => Optional.empty[A]()
|
||||
}
|
||||
|
||||
@deprecated("Use the overload of this method with more arguments", "1.2.2")
|
||||
def position(
|
||||
line0: Option[Integer],
|
||||
content: String,
|
||||
offset0: Option[Integer],
|
||||
pointer0: Option[Integer],
|
||||
pointerSpace0: Option[String],
|
||||
sourcePath0: Option[String],
|
||||
sourceFile0: Option[File]
|
||||
): Position =
|
||||
position(
|
||||
line0,
|
||||
content,
|
||||
offset0,
|
||||
pointer0,
|
||||
pointerSpace0,
|
||||
sourcePath0,
|
||||
sourceFile0,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None
|
||||
)
|
||||
|
||||
def position(
|
||||
line0: Option[Integer],
|
||||
content: String,
|
||||
offset0: Option[Integer],
|
||||
pointer0: Option[Integer],
|
||||
pointerSpace0: Option[String],
|
||||
sourcePath0: Option[String],
|
||||
sourceFile0: Option[File],
|
||||
startOffset0: Option[Integer],
|
||||
endOffset0: Option[Integer],
|
||||
startLine0: Option[Integer],
|
||||
startColumn0: Option[Integer],
|
||||
endLine0: Option[Integer],
|
||||
endColumn0: Option[Integer]
|
||||
): Position =
|
||||
new ConcretePosition(
|
||||
line0,
|
||||
content,
|
||||
offset0,
|
||||
pointer0,
|
||||
pointerSpace0,
|
||||
sourcePath0,
|
||||
sourceFile0,
|
||||
startOffset0,
|
||||
endOffset0,
|
||||
startLine0,
|
||||
startColumn0,
|
||||
endLine0,
|
||||
endColumn0
|
||||
)
|
||||
|
||||
@deprecated("Use the overload of this method with more arguments", "1.2.2")
|
||||
def problem(cat: String, pos: Position, msg: String, sev: Severity): Problem =
|
||||
problem(cat, pos, msg, sev, None)
|
||||
|
||||
def problem(
|
||||
cat: String,
|
||||
pos: Position,
|
||||
msg: String,
|
||||
sev: Severity,
|
||||
rendered: Option[String]
|
||||
): Problem =
|
||||
new ConcreteProblem(cat, pos, msg, sev, rendered)
|
||||
|
||||
private final class ConcreteT2[A1, A2](a1: A1, a2: A2) extends T2[A1, A2] {
|
||||
val get1: A1 = a1
|
||||
val get2: A2 = a2
|
||||
override def toString: String = s"ConcreteT2($a1, $a2)"
|
||||
override def equals(o: Any): Boolean = o match {
|
||||
case o: ConcreteT2[A1, A2] =>
|
||||
this.get1 == o.get1 &&
|
||||
this.get2 == o.get2
|
||||
case _ => false
|
||||
}
|
||||
override def hashCode: Int = {
|
||||
var hash = 1
|
||||
hash = hash * 31 + this.get1.##
|
||||
hash = hash * 31 + this.get2.##
|
||||
hash
|
||||
}
|
||||
}
|
||||
|
||||
private final class ConcretePosition(
|
||||
line0: Option[Integer],
|
||||
content: String,
|
||||
offset0: Option[Integer],
|
||||
pointer0: Option[Integer],
|
||||
pointerSpace0: Option[String],
|
||||
sourcePath0: Option[String],
|
||||
sourceFile0: Option[File],
|
||||
startOffset0: Option[Integer],
|
||||
endOffset0: Option[Integer],
|
||||
startLine0: Option[Integer],
|
||||
startColumn0: Option[Integer],
|
||||
endLine0: Option[Integer],
|
||||
endColumn0: Option[Integer]
|
||||
) extends Position {
|
||||
val line = o2jo(line0)
|
||||
val lineContent = content
|
||||
val offset = o2jo(offset0)
|
||||
val pointer = o2jo(pointer0)
|
||||
val pointerSpace = o2jo(pointerSpace0)
|
||||
val sourcePath = o2jo(sourcePath0)
|
||||
val sourceFile = o2jo(sourceFile0)
|
||||
override val startOffset = o2jo(startOffset0)
|
||||
override val endOffset = o2jo(endOffset0)
|
||||
override val startLine = o2jo(startLine0)
|
||||
override val startColumn = o2jo(startColumn0)
|
||||
override val endLine = o2jo(endLine0)
|
||||
override val endColumn = o2jo(endColumn0)
|
||||
}
|
||||
|
||||
private final class ConcreteProblem(
|
||||
cat: String,
|
||||
pos: Position,
|
||||
msg: String,
|
||||
sev: Severity,
|
||||
rendered0: Option[String]
|
||||
) extends Problem {
|
||||
val category = cat
|
||||
val position = pos
|
||||
val message = msg
|
||||
val severity = sev
|
||||
override val rendered = o2jo(rendered0)
|
||||
override def toString = s"[$severity] $pos: $message"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
/**
|
||||
* An enumeration defining the levels available for logging. A level includes all of the levels
|
||||
* with id larger than its own id. For example, Warn (id=3) includes Error (id=4).
|
||||
*/
|
||||
object Level extends Enumeration {
|
||||
val Debug = Value(1, "debug")
|
||||
val Info = Value(2, "info")
|
||||
val Warn = Value(3, "warn")
|
||||
val Error = Value(4, "error")
|
||||
|
||||
/**
|
||||
* Defines the label to use for success messages.
|
||||
* Because the label for levels is defined in this module, the success label is also defined here.
|
||||
*/
|
||||
val SuccessLabel = "success"
|
||||
|
||||
def union(a: Value, b: Value) = if (a.id < b.id) a else b
|
||||
def unionAll(vs: Seq[Value]) = vs reduceLeft union
|
||||
|
||||
/** Returns the level with the given name wrapped in Some, or None if no level exists for that name. */
|
||||
def apply(s: String) = values.find(s == _.toString)
|
||||
|
||||
/** Same as apply, defined for use in pattern matching. */
|
||||
private[sbt] def unapply(s: String) = apply(s)
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
sealed trait LogEvent
|
||||
final class Success(val msg: String) extends LogEvent
|
||||
final class Log(val level: Level.Value, val msg: String) extends LogEvent
|
||||
final class Trace(val exception: Throwable) extends LogEvent
|
||||
final class SetLevel(val newLevel: Level.Value) extends LogEvent
|
||||
final class SetTrace(val level: Int) extends LogEvent
|
||||
final class SetSuccess(val enabled: Boolean) extends LogEvent
|
||||
final class ControlEvent(val event: ControlEvent.Value, val msg: String) extends LogEvent
|
||||
|
||||
object ControlEvent extends Enumeration {
|
||||
val Start, Header, Finish = Value
|
||||
}
|
||||
|
|
@ -0,0 +1,147 @@
|
|||
package sbt.util
|
||||
|
||||
import sbt.internal.util._
|
||||
import org.apache.logging.log4j.{ LogManager => XLogManager, Level => XLevel }
|
||||
import org.apache.logging.log4j.core._
|
||||
import org.apache.logging.log4j.core.appender.AsyncAppender
|
||||
import org.apache.logging.log4j.core.config.{ AppenderRef, LoggerConfig }
|
||||
import org.apache.logging.log4j.core.layout.PatternLayout
|
||||
import scala.collection.JavaConverters._
|
||||
import scala.collection.concurrent
|
||||
import scala.reflect.runtime.universe.TypeTag
|
||||
import sjsonnew.JsonFormat
|
||||
|
||||
// http://logging.apache.org/log4j/2.x/manual/customconfig.html
|
||||
// https://logging.apache.org/log4j/2.x/log4j-core/apidocs/index.html
|
||||
|
||||
sealed abstract class LogExchange {
|
||||
private[sbt] lazy val context: LoggerContext = init()
|
||||
private[sbt] lazy val builtInStringCodecs: Unit = initStringCodecs()
|
||||
private[sbt] lazy val asyncStdout: AsyncAppender = buildAsyncStdout
|
||||
private[sbt] val jsonCodecs: concurrent.Map[String, JsonFormat[_]] = concurrent.TrieMap()
|
||||
private[sbt] val stringCodecs: concurrent.Map[String, ShowLines[_]] = concurrent.TrieMap()
|
||||
|
||||
def logger(name: String): ManagedLogger = logger(name, None, None)
|
||||
def logger(name: String, channelName: Option[String], execId: Option[String]): ManagedLogger = {
|
||||
val _ = context
|
||||
val codecs = builtInStringCodecs
|
||||
val ctx = XLogManager.getContext(false) match { case x: LoggerContext => x }
|
||||
val config = ctx.getConfiguration
|
||||
val loggerConfig = LoggerConfig.createLogger(
|
||||
false,
|
||||
XLevel.DEBUG,
|
||||
name,
|
||||
// disable the calculation of caller location as it is very expensive
|
||||
// https://issues.apache.org/jira/browse/LOG4J2-153
|
||||
"false",
|
||||
Array[AppenderRef](),
|
||||
null,
|
||||
config,
|
||||
null
|
||||
)
|
||||
config.addLogger(name, loggerConfig)
|
||||
ctx.updateLoggers
|
||||
val logger = ctx.getLogger(name)
|
||||
new ManagedLogger(name, channelName, execId, logger)
|
||||
}
|
||||
def unbindLoggerAppenders(loggerName: String): Unit = {
|
||||
val lc = loggerConfig(loggerName)
|
||||
lc.getAppenders.asScala foreach {
|
||||
case (k, v) => lc.removeAppender(k)
|
||||
}
|
||||
}
|
||||
def bindLoggerAppenders(loggerName: String, appenders: List[(Appender, Level.Value)]): Unit = {
|
||||
val lc = loggerConfig(loggerName)
|
||||
appenders foreach {
|
||||
case (x, lv) => lc.addAppender(x, ConsoleAppender.toXLevel(lv), null)
|
||||
}
|
||||
}
|
||||
def loggerConfig(loggerName: String): LoggerConfig = {
|
||||
val ctx = XLogManager.getContext(false) match { case x: LoggerContext => x }
|
||||
val config = ctx.getConfiguration
|
||||
config.getLoggerConfig(loggerName)
|
||||
}
|
||||
|
||||
// Construct these StringTypeTags manually, because they're used at the very startup of sbt
|
||||
// and we'll try not to initialize the universe by using the StringTypeTag.apply that requires a TypeTag
|
||||
// A better long-term solution could be to make StringTypeTag.apply a macro.
|
||||
lazy val stringTypeTagThrowable = StringTypeTag[Throwable]("scala.Throwable")
|
||||
lazy val stringTypeTagTraceEvent = StringTypeTag[TraceEvent]("sbt.internal.util.TraceEvent")
|
||||
lazy val stringTypeTagSuccessEvent = StringTypeTag[SuccessEvent]("sbt.internal.util.SuccessEvent")
|
||||
|
||||
private[sbt] def initStringCodecs(): Unit = {
|
||||
import sbt.internal.util.codec.ThrowableShowLines._
|
||||
import sbt.internal.util.codec.TraceEventShowLines._
|
||||
import sbt.internal.util.codec.SuccessEventShowLines._
|
||||
|
||||
registerStringCodecByStringTypeTag(stringTypeTagThrowable)
|
||||
registerStringCodecByStringTypeTag(stringTypeTagTraceEvent)
|
||||
registerStringCodecByStringTypeTag(stringTypeTagSuccessEvent)
|
||||
}
|
||||
|
||||
// This is a dummy layout to avoid casting error during PatternLayout.createDefaultLayout()
|
||||
// that was originally used for ConsoleAppender.
|
||||
// The stacktrace shows it's having issue initializing default DefaultConfiguration.
|
||||
// Since we currently do not use Layout inside ConsoleAppender, the actual pattern is not relevant.
|
||||
private[sbt] lazy val dummyLayout: PatternLayout = {
|
||||
val _ = context
|
||||
val ctx = XLogManager.getContext(false) match { case x: LoggerContext => x }
|
||||
val config = ctx.getConfiguration
|
||||
val lo = PatternLayout.newBuilder
|
||||
.withConfiguration(config)
|
||||
.withPattern(PatternLayout.SIMPLE_CONVERSION_PATTERN)
|
||||
.build
|
||||
lo
|
||||
}
|
||||
|
||||
def jsonCodec[A](tag: String): Option[JsonFormat[A]] =
|
||||
jsonCodecs.get(tag) map { _.asInstanceOf[JsonFormat[A]] }
|
||||
def hasJsonCodec(tag: String): Boolean =
|
||||
jsonCodecs.contains(tag)
|
||||
def getOrElseUpdateJsonCodec[A](tag: String, v: JsonFormat[A]): JsonFormat[A] =
|
||||
jsonCodecs.getOrElseUpdate(tag, v).asInstanceOf[JsonFormat[A]]
|
||||
def stringCodec[A](tag: String): Option[ShowLines[A]] =
|
||||
stringCodecs.get(tag) map { _.asInstanceOf[ShowLines[A]] }
|
||||
def hasStringCodec(tag: String): Boolean =
|
||||
stringCodecs.contains(tag)
|
||||
def getOrElseUpdateStringCodec[A](tag: String, v: ShowLines[A]): ShowLines[A] =
|
||||
stringCodecs.getOrElseUpdate(tag, v).asInstanceOf[ShowLines[A]]
|
||||
|
||||
def registerStringCodec[A: ShowLines: TypeTag]: Unit = {
|
||||
val tag = StringTypeTag[A]
|
||||
registerStringCodecByStringTypeTag(tag)
|
||||
}
|
||||
|
||||
private[sbt] def registerStringCodecByStringTypeTag[A: ShowLines](tag: StringTypeTag[A]): Unit = {
|
||||
val ev = implicitly[ShowLines[A]]
|
||||
val _ = getOrElseUpdateStringCodec(tag.key, ev)
|
||||
}
|
||||
|
||||
private[sbt] def buildAsyncStdout: AsyncAppender = {
|
||||
val ctx = XLogManager.getContext(false) match { case x: LoggerContext => x }
|
||||
val config = ctx.getConfiguration
|
||||
val appender = ConsoleAppender("Stdout")
|
||||
// CustomConsoleAppenderImpl.createAppender("Stdout", layout, null, null)
|
||||
appender.start
|
||||
config.addAppender(appender)
|
||||
val asyncAppender: AsyncAppender = AsyncAppender
|
||||
.newBuilder()
|
||||
.setName("AsyncStdout")
|
||||
.setAppenderRefs(Array(AppenderRef.createAppenderRef("Stdout", XLevel.DEBUG, null)))
|
||||
.setBlocking(false)
|
||||
.setConfiguration(config)
|
||||
.build
|
||||
asyncAppender.start
|
||||
config.addAppender(asyncAppender)
|
||||
asyncAppender
|
||||
}
|
||||
private[sbt] def init(): LoggerContext = {
|
||||
import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory
|
||||
import org.apache.logging.log4j.core.config.Configurator
|
||||
val builder = ConfigurationBuilderFactory.newConfigurationBuilder
|
||||
builder.setConfigurationName("sbt.util.logging")
|
||||
val ctx = Configurator.initialize(builder.build())
|
||||
ctx match { case x: LoggerContext => x }
|
||||
}
|
||||
}
|
||||
object LogExchange extends LogExchange
|
||||
|
|
@ -0,0 +1,123 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2008, 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
import xsbti.{ Logger => xLogger }
|
||||
import xsbti.{ Position, Problem, Severity }
|
||||
|
||||
import sys.process.ProcessLogger
|
||||
import sbt.internal.util.{ BufferedLogger, FullLogger }
|
||||
import java.io.File
|
||||
import java.util.Optional
|
||||
import java.util.function.Supplier
|
||||
|
||||
/**
|
||||
* This is intended to be the simplest logging interface for use by code that wants to log.
|
||||
* It does not include configuring the logger.
|
||||
*/
|
||||
abstract class Logger extends xLogger {
|
||||
final def verbose(message: => String): Unit = debug(message)
|
||||
final def debug(message: => String): Unit = log(Level.Debug, message)
|
||||
final def info(message: => String): Unit = log(Level.Info, message)
|
||||
final def warn(message: => String): Unit = log(Level.Warn, message)
|
||||
final def error(message: => String): Unit = log(Level.Error, message)
|
||||
|
||||
// Added by sys.process.ProcessLogger
|
||||
final def err(message: => String): Unit = log(Level.Error, message)
|
||||
// sys.process.ProcessLogger
|
||||
final def out(message: => String): Unit = log(Level.Info, message)
|
||||
|
||||
@deprecated("No longer used.", "1.0.0")
|
||||
def ansiCodesSupported: Boolean = false
|
||||
|
||||
def trace(t: => Throwable): Unit
|
||||
def success(message: => String): Unit
|
||||
def log(level: Level.Value, message: => String): Unit
|
||||
|
||||
def debug(msg: Supplier[String]): Unit = log(Level.Debug, msg)
|
||||
def warn(msg: Supplier[String]): Unit = log(Level.Warn, msg)
|
||||
def info(msg: Supplier[String]): Unit = log(Level.Info, msg)
|
||||
def error(msg: Supplier[String]): Unit = log(Level.Error, msg)
|
||||
def trace(msg: Supplier[Throwable]): Unit = trace(msg.get())
|
||||
def log(level: Level.Value, msg: Supplier[String]): Unit = log(level, msg.get)
|
||||
}
|
||||
|
||||
object Logger {
|
||||
def transferLevels(oldLog: AbstractLogger, newLog: AbstractLogger): Unit = {
|
||||
newLog.setLevel(oldLog.getLevel)
|
||||
newLog.setTrace(oldLog.getTrace)
|
||||
}
|
||||
|
||||
val Null: AbstractLogger = new AbstractLogger {
|
||||
def getLevel: Level.Value = Level.Error
|
||||
def setLevel(newLevel: Level.Value): Unit = ()
|
||||
def getTrace: Int = 0
|
||||
def setTrace(flag: Int): Unit = ()
|
||||
def successEnabled: Boolean = false
|
||||
def setSuccessEnabled(flag: Boolean): Unit = ()
|
||||
def control(event: ControlEvent.Value, message: => String): Unit = ()
|
||||
def logAll(events: Seq[LogEvent]): Unit = ()
|
||||
def trace(t: => Throwable): Unit = ()
|
||||
def success(message: => String): Unit = ()
|
||||
def log(level: Level.Value, message: => String): Unit = ()
|
||||
}
|
||||
|
||||
implicit def absLog2PLog(log: AbstractLogger): ProcessLogger =
|
||||
new BufferedLogger(log) with ProcessLogger
|
||||
|
||||
implicit def log2PLog(log: Logger): ProcessLogger = absLog2PLog(new FullLogger(log))
|
||||
|
||||
implicit def xlog2Log(lg: xLogger): Logger = lg match {
|
||||
case l: Logger => l
|
||||
case _ => wrapXLogger(lg)
|
||||
}
|
||||
|
||||
private[this] def wrapXLogger(lg: xLogger): Logger = new Logger {
|
||||
import InterfaceUtil.toSupplier
|
||||
override def debug(msg: Supplier[String]): Unit = lg.debug(msg)
|
||||
override def warn(msg: Supplier[String]): Unit = lg.warn(msg)
|
||||
override def info(msg: Supplier[String]): Unit = lg.info(msg)
|
||||
override def error(msg: Supplier[String]): Unit = lg.error(msg)
|
||||
override def trace(msg: Supplier[Throwable]): Unit = lg.trace(msg)
|
||||
override def log(level: Level.Value, msg: Supplier[String]): Unit = lg.log(level, msg)
|
||||
def trace(t: => Throwable): Unit = trace(toSupplier(t))
|
||||
def success(s: => String): Unit = info(toSupplier(s))
|
||||
def log(level: Level.Value, msg: => String): Unit = {
|
||||
val fmsg = toSupplier(msg)
|
||||
level match {
|
||||
case Level.Debug => lg.debug(fmsg)
|
||||
case Level.Info => lg.info(fmsg)
|
||||
case Level.Warn => lg.warn(fmsg)
|
||||
case Level.Error => lg.error(fmsg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def jo2o[A](o: Optional[A]): Option[A] = InterfaceUtil.jo2o(o)
|
||||
def o2jo[A](o: Option[A]): Optional[A] = InterfaceUtil.o2jo(o)
|
||||
|
||||
@deprecated("Use InterfaceUtil.position", "1.2.2")
|
||||
def position(
|
||||
line0: Option[Integer],
|
||||
content: String,
|
||||
offset0: Option[Integer],
|
||||
pointer0: Option[Integer],
|
||||
pointerSpace0: Option[String],
|
||||
sourcePath0: Option[String],
|
||||
sourceFile0: Option[File]
|
||||
): Position =
|
||||
InterfaceUtil.position(
|
||||
line0,
|
||||
content,
|
||||
offset0,
|
||||
pointer0,
|
||||
pointerSpace0,
|
||||
sourcePath0,
|
||||
sourceFile0
|
||||
)
|
||||
|
||||
@deprecated("Use InterfaceUtil.problem", "1.2.2")
|
||||
def problem(cat: String, pos: Position, msg: String, sev: Severity): Problem =
|
||||
InterfaceUtil.problem(cat, pos, msg, sev)
|
||||
}
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
package sbt.util
|
||||
|
||||
trait ShowLines[A] {
|
||||
def showLines(a: A): Seq[String]
|
||||
}
|
||||
object ShowLines {
|
||||
def apply[A](f: A => Seq[String]): ShowLines[A] =
|
||||
new ShowLines[A] {
|
||||
def showLines(a: A): Seq[String] = f(a)
|
||||
}
|
||||
|
||||
implicit class ShowLinesOp[A: ShowLines](a: A) {
|
||||
def lines: Seq[String] = implicitly[ShowLines[A]].showLines(a)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1 @@
|
|||
Log4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector
|
||||
|
|
@ -0,0 +1,143 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
import Gen.{ listOf, oneOf }
|
||||
|
||||
import EscHelpers.{ ESC, hasEscapeSequence, isEscapeTerminator, removeEscapeSequences }
|
||||
|
||||
object Escapes extends Properties("Escapes") {
|
||||
property("genTerminator only generates terminators") =
|
||||
forAllNoShrink(genTerminator)((c: Char) => isEscapeTerminator(c))
|
||||
|
||||
property("genWithoutTerminator only generates terminators") =
|
||||
forAllNoShrink(genWithoutTerminator) { (s: String) =>
|
||||
s.forall(c => !isEscapeTerminator(c))
|
||||
}
|
||||
|
||||
property("hasEscapeSequence is false when no escape character is present") =
|
||||
forAllNoShrink(genWithoutEscape)((s: String) => !hasEscapeSequence(s))
|
||||
|
||||
property("hasEscapeSequence is true when escape character is present") =
|
||||
forAllNoShrink(genWithRandomEscapes)((s: String) => hasEscapeSequence(s))
|
||||
|
||||
property("removeEscapeSequences is the identity when no escape character is present") =
|
||||
forAllNoShrink(genWithoutEscape) { (s: String) =>
|
||||
val removed: String = removeEscapeSequences(s)
|
||||
("Escape sequence removed: '" + removed + "'") |:
|
||||
(removed == s)
|
||||
}
|
||||
|
||||
property("No escape characters remain after removeEscapeSequences") = forAll { (s: String) =>
|
||||
val removed: String = removeEscapeSequences(s)
|
||||
("Escape sequence removed: '" + removed + "'") |:
|
||||
!hasEscapeSequence(removed)
|
||||
}
|
||||
|
||||
private[this] final val ecs = ESC.toString
|
||||
private val partialEscapeSequences =
|
||||
Gen.oneOf(Gen const ecs, Gen const ecs ++ "[", Gen.choose('@', '_').map(ecs :+ _))
|
||||
|
||||
property("removeEscapeSequences handles partial escape sequences") =
|
||||
forAll(partialEscapeSequences) { s =>
|
||||
val removed: String = removeEscapeSequences(s)
|
||||
s"Escape sequence removed: '$removed'" |: !hasEscapeSequence(removed)
|
||||
}
|
||||
|
||||
property("removeEscapeSequences returns string without escape sequences") =
|
||||
forAllNoShrink(genWithoutEscape, genEscapePairs) {
|
||||
(start: String, escapes: List[EscapeAndNot]) =>
|
||||
val withEscapes: String =
|
||||
start + escapes.map(ean => ean.escape.makeString + ean.notEscape).mkString("")
|
||||
val removed: String = removeEscapeSequences(withEscapes)
|
||||
val original = start + escapes.map(_.notEscape).mkString("")
|
||||
val diffCharString = diffIndex(original, removed)
|
||||
("Input string : '" + withEscapes + "'") |:
|
||||
("Expected : '" + original + "'") |:
|
||||
("Escapes removed : '" + removed + "'") |:
|
||||
(diffCharString) |:
|
||||
(original == removed)
|
||||
}
|
||||
|
||||
def diffIndex(expect: String, original: String): String = {
|
||||
var i = 0;
|
||||
while (i < expect.length && i < original.length) {
|
||||
if (expect.charAt(i) != original.charAt(i))
|
||||
return ("Differing character, idx: " + i + ", char: " + original.charAt(i) +
|
||||
", expected: " + expect.charAt(i))
|
||||
i += 1
|
||||
}
|
||||
if (expect.length != original.length) return s"Strings are different lengths!"
|
||||
"No differences found"
|
||||
}
|
||||
|
||||
final case class EscapeAndNot(escape: EscapeSequence, notEscape: String) {
|
||||
override def toString =
|
||||
s"EscapeAntNot(escape = [$escape], notEscape = [${notEscape.map(_.toInt)}])"
|
||||
}
|
||||
|
||||
// 2.10.5 warns on "implicit numeric widening" but it looks like a bug: https://issues.scala-lang.org/browse/SI-8450
|
||||
final case class EscapeSequence(content: String, terminator: Char) {
|
||||
if (!content.isEmpty) {
|
||||
assert(
|
||||
content.tail.forall(c => !isEscapeTerminator(c)),
|
||||
"Escape sequence content contains an escape terminator: '" + content + "'"
|
||||
)
|
||||
assert(
|
||||
(content.head == '[') || !isEscapeTerminator(content.head),
|
||||
"Escape sequence content contains an escape terminator: '" + content.headOption + "'"
|
||||
)
|
||||
}
|
||||
assert(isEscapeTerminator(terminator))
|
||||
def makeString: String = ESC + content + terminator
|
||||
|
||||
override def toString =
|
||||
if (content.isEmpty) s"ESC (${terminator.toInt})"
|
||||
else s"ESC ($content) (${terminator.toInt})"
|
||||
}
|
||||
|
||||
private[this] def noEscape(s: String): String = s.replace(ESC, ' ')
|
||||
|
||||
lazy val genEscapeSequence: Gen[EscapeSequence] =
|
||||
oneOf(genKnownSequence, genTwoCharacterSequence, genArbitraryEscapeSequence)
|
||||
|
||||
lazy val genEscapePair: Gen[EscapeAndNot] =
|
||||
for (esc <- genEscapeSequence; not <- genWithoutEscape) yield EscapeAndNot(esc, not)
|
||||
|
||||
lazy val genEscapePairs: Gen[List[EscapeAndNot]] = listOf(genEscapePair)
|
||||
|
||||
lazy val genArbitraryEscapeSequence: Gen[EscapeSequence] =
|
||||
for (content <- genWithoutTerminator if !content.isEmpty; term <- genTerminator)
|
||||
yield new EscapeSequence("[" + content, term)
|
||||
|
||||
lazy val genKnownSequence: Gen[EscapeSequence] =
|
||||
oneOf((misc ++ setGraphicsMode ++ setMode ++ resetMode).map(toEscapeSequence))
|
||||
|
||||
def toEscapeSequence(s: String): EscapeSequence = EscapeSequence(s.init, s.last)
|
||||
|
||||
lazy val misc = Seq("14;23H", "5;3f", "2A", "94B", "19C", "85D", "s", "u", "2J", "K")
|
||||
|
||||
lazy val setGraphicsMode: Seq[String] =
|
||||
for (txt <- 0 to 8; fg <- 30 to 37; bg <- 40 to 47)
|
||||
yield txt.toString + ";" + fg.toString + ";" + bg.toString + "m"
|
||||
|
||||
lazy val resetMode = setModeLike('I')
|
||||
lazy val setMode = setModeLike('h')
|
||||
def setModeLike(term: Char): Seq[String] = (0 to 19).map(i => "=" + i.toString + term)
|
||||
|
||||
lazy val genWithoutTerminator =
|
||||
genRawString.map(_.filter(c => !isEscapeTerminator(c) && (c != '[')))
|
||||
|
||||
lazy val genTwoCharacterSequence =
|
||||
// 91 == [ which is the CSI escape sequence.
|
||||
oneOf((64 to 95)) filter (_ != 91) map (c => new EscapeSequence("", c.toChar))
|
||||
|
||||
lazy val genTerminator: Gen[Char] = Gen.choose('@', '~')
|
||||
lazy val genWithoutEscape: Gen[String] = genRawString.map(noEscape)
|
||||
|
||||
def genWithRandomEscapes: Gen[String] =
|
||||
for (ls <- listOf(genRawString); end <- genRawString)
|
||||
yield ls.mkString("", ESC.toString, ESC.toString + end)
|
||||
|
||||
private def genRawString = Arbitrary.arbString.arbitrary
|
||||
}
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
package sbt.util
|
||||
|
||||
import sbt.internal.util._
|
||||
|
||||
import org.scalatest._
|
||||
|
||||
class LogExchangeSpec extends FlatSpec with Matchers {
|
||||
import LogExchange._
|
||||
|
||||
checkTypeTag("stringTypeTagThrowable", stringTypeTagThrowable, StringTypeTag[Throwable])
|
||||
checkTypeTag("stringTypeTagTraceEvent", stringTypeTagTraceEvent, StringTypeTag[TraceEvent])
|
||||
checkTypeTag("stringTypeTagSuccessEvent", stringTypeTagSuccessEvent, StringTypeTag[SuccessEvent])
|
||||
|
||||
private def checkTypeTag[A](name: String, inc: StringTypeTag[A], exp: StringTypeTag[A]): Unit =
|
||||
s"LogExchange.$name" should s"match real StringTypeTag[$exp]" in {
|
||||
val StringTypeTag(incomingString) = inc
|
||||
val StringTypeTag(expectedString) = exp
|
||||
if ((incomingString startsWith "scala.") || (expectedString startsWith "scala.")) {
|
||||
// > historically [Scala] has been inconsistent whether `scala.` is included, or not
|
||||
// > would it be hard to make the test accept either result?
|
||||
// https://github.com/scala/community-builds/pull/758#issuecomment-409760633
|
||||
assert((incomingString stripPrefix "scala.") == (expectedString stripPrefix "scala."))
|
||||
} else {
|
||||
assert(incomingString == expectedString)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,164 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah */
|
||||
|
||||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
import org.scalacheck._
|
||||
import Arbitrary._
|
||||
import Gen.{ listOfN, oneOf }
|
||||
import Prop._
|
||||
|
||||
import java.io.Writer
|
||||
|
||||
object LogWriterTest extends Properties("Log Writer") {
|
||||
final val MaxLines = 100
|
||||
final val MaxSegments = 10
|
||||
|
||||
/* Tests that content written through a LoggerWriter is properly passed to the underlying Logger.
|
||||
* Each line, determined by the specified newline separator, must be logged at the correct logging level. */
|
||||
property("properly logged") = forAll { (output: Output, newLine: NewLine) =>
|
||||
import output.{ lines, level }
|
||||
val log = new RecordingLogger
|
||||
val writer = new LoggerWriter(log, Some(level), newLine.str)
|
||||
logLines(writer, lines, newLine.str)
|
||||
val events = log.getEvents
|
||||
("Recorded:\n" + events.map(show).mkString("\n")) |:
|
||||
check(toLines(lines), events, level)
|
||||
}
|
||||
|
||||
/**
|
||||
* Displays a LogEvent in a useful format for debugging. In particular, we are only interested in `Log` types
|
||||
* and non-printable characters should be escaped
|
||||
*/
|
||||
def show(event: LogEvent): String =
|
||||
event match {
|
||||
case l: Log => "Log('" + Escape(l.msg) + "', " + l.level + ")"
|
||||
case _ => "Not Log"
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the given lines to the Writer. `lines` is taken to be a list of lines, which are
|
||||
* represented as separately written segments (ToLog instances). ToLog.`byCharacter`
|
||||
* indicates whether to write the segment by character (true) or all at once (false)
|
||||
*/
|
||||
def logLines(writer: Writer, lines: List[List[ToLog]], newLine: String): Unit = {
|
||||
for (line <- lines; section <- line) {
|
||||
val content = section.content
|
||||
val normalized = Escape.newline(content, newLine)
|
||||
if (section.byCharacter)
|
||||
normalized.foreach(c => writer.write(c.toInt))
|
||||
else
|
||||
writer.write(normalized)
|
||||
}
|
||||
writer.flush()
|
||||
}
|
||||
|
||||
/** Converts the given lines in segments to lines as Strings for checking the results of the test.*/
|
||||
def toLines(lines: List[List[ToLog]]): List[String] =
|
||||
lines.map(_.map(_.contentOnly).mkString)
|
||||
|
||||
/** Checks that the expected `lines` were recorded as `events` at level `Lvl`.*/
|
||||
def check(lines: List[String], events: List[LogEvent], Lvl: Level.Value): Boolean =
|
||||
(lines zip events) forall {
|
||||
case (line, log: Log) => log.level == Lvl && line == log.msg
|
||||
case _ => false
|
||||
}
|
||||
|
||||
/* The following are implicit generators to build up a write sequence.
|
||||
* ToLog represents a written segment. NewLine represents one of the possible
|
||||
* newline separators. A List[ToLog] represents a full line and always includes a
|
||||
* final ToLog with a trailing '\n'. Newline characters are otherwise not present in
|
||||
* the `content` of a ToLog instance.*/
|
||||
|
||||
implicit lazy val arbOut: Arbitrary[Output] = Arbitrary(genOutput)
|
||||
implicit lazy val arbLog: Arbitrary[ToLog] = Arbitrary(genLog)
|
||||
implicit lazy val arbLine: Arbitrary[List[ToLog]] = Arbitrary(genLine)
|
||||
implicit lazy val arbNewLine: Arbitrary[NewLine] = Arbitrary(genNewLine)
|
||||
implicit lazy val arbLevel: Arbitrary[Level.Value] = Arbitrary(genLevel)
|
||||
|
||||
implicit def genLine(implicit logG: Gen[ToLog]): Gen[List[ToLog]] =
|
||||
for (l <- listOf[ToLog](MaxSegments); last <- logG)
|
||||
yield (addNewline(last) :: l.filter(!_.content.isEmpty)).reverse
|
||||
|
||||
implicit def genLog(implicit content: Arbitrary[String], byChar: Arbitrary[Boolean]): Gen[ToLog] =
|
||||
for (c <- content.arbitrary; by <- byChar.arbitrary) yield {
|
||||
assert(c != null)
|
||||
new ToLog(removeNewlines(c), by)
|
||||
}
|
||||
|
||||
implicit lazy val genNewLine: Gen[NewLine] =
|
||||
for (str <- oneOf("\n", "\r", "\r\n")) yield new NewLine(str)
|
||||
|
||||
implicit lazy val genLevel: Gen[Level.Value] =
|
||||
oneOf(Level.values.toSeq)
|
||||
|
||||
implicit lazy val genOutput: Gen[Output] =
|
||||
for (ls <- listOf[List[ToLog]](MaxLines); lv <- genLevel) yield new Output(ls, lv)
|
||||
|
||||
def removeNewlines(s: String) = s.replaceAll("""[\n\r]+""", "")
|
||||
def addNewline(l: ToLog): ToLog =
|
||||
new ToLog(l.content + "\n", l.byCharacter) // \n will be replaced by a random line terminator for all lines
|
||||
|
||||
def listOf[T](max: Int)(implicit content: Arbitrary[T]): Gen[List[T]] =
|
||||
Gen.choose(0, max) flatMap (sz => listOfN(sz, content.arbitrary))
|
||||
}
|
||||
|
||||
/* Helper classes*/
|
||||
|
||||
final class Output(val lines: List[List[ToLog]], val level: Level.Value) {
|
||||
override def toString =
|
||||
"Level: " + level + "\n" + lines.map(_.mkString).mkString("\n")
|
||||
}
|
||||
|
||||
final class NewLine(val str: String) {
|
||||
override def toString = Escape(str)
|
||||
}
|
||||
|
||||
final class ToLog(val content: String, val byCharacter: Boolean) {
|
||||
def contentOnly = Escape.newline(content, "")
|
||||
|
||||
override def toString =
|
||||
if (content.isEmpty) "" else "ToLog('" + Escape(contentOnly) + "', " + byCharacter + ")"
|
||||
}
|
||||
|
||||
/** Defines some utility methods for escaping unprintable characters.*/
|
||||
object Escape {
|
||||
|
||||
/** Escapes characters with code less than 20 by printing them as unicode escapes.*/
|
||||
def apply(s: String): String = {
|
||||
val builder = new StringBuilder(s.length)
|
||||
for (c <- s) {
|
||||
val char = c.toInt
|
||||
def escaped = pad(char.toHexString.toUpperCase, 4, '0')
|
||||
if (c < 20) builder.append("\\u").append(escaped) else builder.append(c)
|
||||
}
|
||||
builder.toString
|
||||
}
|
||||
|
||||
def pad(s: String, minLength: Int, extra: Char) = {
|
||||
val diff = minLength - s.length
|
||||
if (diff <= 0) s else List.fill(diff)(extra).mkString("", "", s)
|
||||
}
|
||||
|
||||
/** Replaces a \n character at the end of a string `s` with `nl`.*/
|
||||
def newline(s: String, nl: String): String =
|
||||
if (s.endsWith("\n")) s.substring(0, s.length - 1) + nl else s
|
||||
|
||||
}
|
||||
|
||||
/** Records logging events for later retrieval.*/
|
||||
final class RecordingLogger extends BasicLogger {
|
||||
private var events: List[LogEvent] = Nil
|
||||
|
||||
def getEvents = events.reverse
|
||||
|
||||
override def ansiCodesSupported = true
|
||||
def trace(t: => Throwable): Unit = { events ::= new Trace(t) }
|
||||
def log(level: Level.Value, message: => String): Unit = { events ::= new Log(level, message) }
|
||||
def success(message: => String): Unit = { events ::= new Success(message) }
|
||||
def logAll(es: Seq[LogEvent]): Unit = { events :::= es.toList }
|
||||
|
||||
def control(event: ControlEvent.Value, message: => String): Unit =
|
||||
events ::= new ControlEvent(event, message)
|
||||
}
|
||||
|
|
@ -0,0 +1,134 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import org.scalatest._
|
||||
import sbt.util._
|
||||
import java.io.{ File, PrintWriter }
|
||||
import sbt.io.Using
|
||||
|
||||
class ManagedLoggerSpec extends FlatSpec with Matchers {
|
||||
"ManagedLogger" should "log to console" in {
|
||||
val log = LogExchange.logger("foo")
|
||||
LogExchange.bindLoggerAppenders("foo", List(LogExchange.asyncStdout -> Level.Info))
|
||||
log.info("test")
|
||||
log.debug("test")
|
||||
}
|
||||
|
||||
it should "support event logging" in {
|
||||
import sjsonnew.BasicJsonProtocol._
|
||||
val log = LogExchange.logger("foo")
|
||||
LogExchange.bindLoggerAppenders("foo", List(LogExchange.asyncStdout -> Level.Info))
|
||||
log.infoEvent(1)
|
||||
}
|
||||
|
||||
it should "validate performance improvement of disabling location calculation for async loggers" in {
|
||||
val log = LogExchange.logger("foo")
|
||||
LogExchange.bindLoggerAppenders("foo", List(LogExchange.asyncStdout -> Level.Info))
|
||||
val before = System.currentTimeMillis()
|
||||
1 to 10000 foreach { _ =>
|
||||
log.debug("test")
|
||||
}
|
||||
val after = System.currentTimeMillis()
|
||||
|
||||
log.info(s"Peformance test took: ${after - before}ms")
|
||||
}
|
||||
|
||||
it should "support logging Throwable out of the box" in {
|
||||
import sbt.internal.util.codec.JsonProtocol._
|
||||
val log = LogExchange.logger("foo")
|
||||
LogExchange.bindLoggerAppenders("foo", List(LogExchange.asyncStdout -> Level.Info))
|
||||
log.infoEvent(SuccessEvent("yes"))
|
||||
}
|
||||
|
||||
it should "allow registering Show[Int]" in {
|
||||
import sjsonnew.BasicJsonProtocol._
|
||||
val log = LogExchange.logger("foo")
|
||||
LogExchange.bindLoggerAppenders("foo", List(LogExchange.asyncStdout -> Level.Info))
|
||||
implicit val intShow: ShowLines[Int] =
|
||||
ShowLines((x: Int) => Vector(s"String representation of $x"))
|
||||
log.registerStringCodec[Int]
|
||||
log.infoEvent(1)
|
||||
}
|
||||
|
||||
it should "allow registering Show[Array[Int]]" in {
|
||||
import sjsonnew.BasicJsonProtocol._
|
||||
val log = LogExchange.logger("foo")
|
||||
LogExchange.bindLoggerAppenders("foo", List(LogExchange.asyncStdout -> Level.Info))
|
||||
implicit val intArrayShow: ShowLines[Array[Int]] =
|
||||
ShowLines((x: Array[Int]) => Vector(s"String representation of ${x.mkString}"))
|
||||
log.registerStringCodec[Array[Int]]
|
||||
log.infoEvent(Array(1, 2, 3))
|
||||
}
|
||||
|
||||
it should "allow registering Show[Vector[Vector[Int]]]" in {
|
||||
import sjsonnew.BasicJsonProtocol._
|
||||
val log = LogExchange.logger("foo")
|
||||
LogExchange.bindLoggerAppenders("foo", List(LogExchange.asyncStdout -> Level.Info))
|
||||
implicit val intVectorShow: ShowLines[Vector[Vector[Int]]] =
|
||||
ShowLines((xss: Vector[Vector[Int]]) => Vector(s"String representation of $xss"))
|
||||
log.registerStringCodec[Vector[Vector[Int]]]
|
||||
log.infoEvent(Vector(Vector(1, 2, 3)))
|
||||
}
|
||||
|
||||
it should "be thread safe" in {
|
||||
import java.util.concurrent.{ Executors, TimeUnit }
|
||||
val pool = Executors.newFixedThreadPool(100)
|
||||
for {
|
||||
i <- 1 to 10000
|
||||
} {
|
||||
pool.submit(new Runnable {
|
||||
def run(): Unit = {
|
||||
val stringTypeTag = StringTypeTag[List[Int]]
|
||||
val log = LogExchange.logger(s"foo$i")
|
||||
LogExchange.bindLoggerAppenders(s"foo$i", List(LogExchange.asyncStdout -> Level.Info))
|
||||
if (i % 100 == 0) {
|
||||
log.info(s"foo$i test $stringTypeTag")
|
||||
}
|
||||
Thread.sleep(1)
|
||||
}
|
||||
})
|
||||
}
|
||||
pool.shutdown
|
||||
pool.awaitTermination(30, TimeUnit.SECONDS)
|
||||
}
|
||||
|
||||
"global logging" should "log immediately after initialization" in {
|
||||
// this is passed into State normally
|
||||
val global0 = initialGlobalLogging
|
||||
val full = global0.full
|
||||
(1 to 3).toList foreach { x =>
|
||||
full.info(s"test$x")
|
||||
}
|
||||
}
|
||||
|
||||
// This is done in Mainloop.scala
|
||||
it should "create a new backing with newAppender" in {
|
||||
val global0 = initialGlobalLogging
|
||||
val logBacking0 = global0.backing
|
||||
val global1 = Using.fileWriter(append = true)(logBacking0.file) { writer =>
|
||||
val out = new PrintWriter(writer)
|
||||
val g = global0.newAppender(global0.full, out, logBacking0)
|
||||
val full = g.full
|
||||
(1 to 3).toList foreach (x => full.info(s"newAppender $x"))
|
||||
assert(logBacking0.file.exists)
|
||||
g
|
||||
}
|
||||
val logBacking1 = global1.backing
|
||||
Using.fileWriter(append = true)(logBacking1.file) { writer =>
|
||||
val out = new PrintWriter(writer)
|
||||
val g = global1.newAppender(global1.full, out, logBacking1)
|
||||
val full = g.full
|
||||
(1 to 3).toList foreach (x => full.info(s"newAppender $x"))
|
||||
// println(logBacking.file)
|
||||
// print("Press enter to continue. ")
|
||||
// System.console.readLine
|
||||
assert(logBacking1.file.exists)
|
||||
}
|
||||
}
|
||||
|
||||
val console = ConsoleOut.systemOut
|
||||
def initialGlobalLogging: GlobalLogging = GlobalLogging.initial(
|
||||
MainAppender.globalDefault(console),
|
||||
File.createTempFile("sbt", ".log"),
|
||||
console
|
||||
)
|
||||
}
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import sbt.util._
|
||||
|
||||
object TestLogger {
|
||||
def apply[T](f: Logger => T): T = {
|
||||
val log = new BufferedLogger(ConsoleLogger())
|
||||
log.setLevel(Level.Debug)
|
||||
log.bufferQuietly(f(log))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import scala.language.experimental.macros
|
||||
|
||||
sealed trait SourcePosition
|
||||
|
||||
sealed trait FilePosition extends SourcePosition {
|
||||
def path: String
|
||||
def startLine: Int
|
||||
}
|
||||
|
||||
case object NoPosition extends SourcePosition
|
||||
|
||||
final case class LinePosition(path: String, startLine: Int) extends FilePosition
|
||||
|
||||
final case class LineRange(start: Int, end: Int) {
|
||||
def shift(n: Int) = new LineRange(start + n, end + n)
|
||||
}
|
||||
|
||||
final case class RangePosition(path: String, range: LineRange) extends FilePosition {
|
||||
def startLine = range.start
|
||||
}
|
||||
|
||||
object SourcePosition {
|
||||
|
||||
/** Creates a SourcePosition by using the enclosing position of the invocation of this method.
|
||||
* @return SourcePosition
|
||||
*/
|
||||
def fromEnclosing(): SourcePosition = macro SourcePositionMacro.fromEnclosingImpl
|
||||
|
||||
}
|
||||
|
||||
import scala.annotation.tailrec
|
||||
import scala.reflect.macros.blackbox
|
||||
import scala.reflect.internal.util.UndefinedPosition
|
||||
|
||||
final class SourcePositionMacro(val c: blackbox.Context) {
|
||||
import c.universe.{ NoPosition => _, _ }
|
||||
|
||||
def fromEnclosingImpl(): Expr[SourcePosition] = {
|
||||
val pos = c.enclosingPosition
|
||||
if (!pos.isInstanceOf[UndefinedPosition] && pos.line >= 0 && pos.source != null) {
|
||||
val f = pos.source.file
|
||||
val name = constant[String](ownerSource(f.path, f.name))
|
||||
val line = constant[Int](pos.line)
|
||||
reify { LinePosition(name.splice, line.splice) }
|
||||
} else
|
||||
reify { NoPosition }
|
||||
}
|
||||
|
||||
private[this] def ownerSource(path: String, name: String): String = {
|
||||
@tailrec def inEmptyPackage(s: Symbol): Boolean =
|
||||
s != NoSymbol && (
|
||||
s.owner == c.mirror.EmptyPackage
|
||||
|| s.owner == c.mirror.EmptyPackageClass
|
||||
|| inEmptyPackage(s.owner)
|
||||
)
|
||||
|
||||
c.internal.enclosingOwner match {
|
||||
case ec if !ec.isStatic => name
|
||||
case ec if inEmptyPackage(ec) => path
|
||||
case ec => s"(${ec.fullName}) $name"
|
||||
}
|
||||
}
|
||||
|
||||
private[this] def constant[T: WeakTypeTag](t: T): Expr[T] = c.Expr[T](Literal(Constant(t)))
|
||||
}
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
package sbt.internal.util
|
||||
|
||||
import org.scalatest._
|
||||
|
||||
class SourcePositionSpec extends FlatSpec {
|
||||
"SourcePosition()" should "return a sane SourcePosition" in {
|
||||
val filename = "SourcePositionSpec.scala"
|
||||
val lineNumber = 9
|
||||
SourcePosition.fromEnclosing() match {
|
||||
case LinePosition(path, startLine) => assert(path === filename && startLine === lineNumber)
|
||||
case RangePosition(path, range) => assert(path === filename && inRange(range, lineNumber))
|
||||
case NoPosition => fail("No source position found")
|
||||
}
|
||||
}
|
||||
|
||||
private def inRange(range: LineRange, lineNo: Int) =
|
||||
range.start until range.end contains lineNo
|
||||
}
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import Relation._
|
||||
|
||||
object Relation {
|
||||
|
||||
/** Constructs a new immutable, finite relation that is initially empty. */
|
||||
def empty[A, B]: Relation[A, B] = make(Map.empty, Map.empty)
|
||||
|
||||
/**
|
||||
* Constructs a [[Relation]] from underlying `forward` and `reverse` representations, without checking that they are consistent.
|
||||
* This is a low-level constructor and the alternatives [[empty]] and [[reconstruct]] should be preferred.
|
||||
*/
|
||||
def make[A, B](forward: Map[A, Set[B]], reverse: Map[B, Set[A]]): Relation[A, B] =
|
||||
new MRelation(forward, reverse)
|
||||
|
||||
/** Constructs a relation such that for every entry `_1 -> _2s` in `forward` and every `_2` in `_2s`, `(_1, _2)` is in the relation. */
|
||||
def reconstruct[A, B](forward: Map[A, Set[B]]): Relation[A, B] = {
|
||||
val reversePairs = for ((a, bs) <- forward.view; b <- bs.view) yield (b, a)
|
||||
val reverse = reversePairs.foldLeft(Map.empty[B, Set[A]]) {
|
||||
case (m, (b, a)) => add(m, b, a :: Nil)
|
||||
}
|
||||
make(forward filter { case (a, bs) => bs.nonEmpty }, reverse)
|
||||
}
|
||||
|
||||
def merge[A, B](rels: Traversable[Relation[A, B]]): Relation[A, B] =
|
||||
rels.foldLeft(Relation.empty[A, B])(_ ++ _)
|
||||
|
||||
private[sbt] def remove[X, Y](map: M[X, Y], from: X, to: Y): M[X, Y] =
|
||||
map.get(from) match {
|
||||
case Some(tos) =>
|
||||
val newSet = tos - to
|
||||
if (newSet.isEmpty) map - from else map.updated(from, newSet)
|
||||
case None => map
|
||||
}
|
||||
|
||||
private[sbt] def combine[X, Y](a: M[X, Y], b: M[X, Y]): M[X, Y] =
|
||||
b.foldLeft(a)((map, mapping) => add(map, mapping._1, mapping._2))
|
||||
|
||||
private[sbt] def add[X, Y](map: M[X, Y], from: X, to: Traversable[Y]): M[X, Y] =
|
||||
map.updated(from, get(map, from) ++ to)
|
||||
|
||||
private[sbt] def get[X, Y](map: M[X, Y], t: X): Set[Y] = map.getOrElse(t, Set.empty[Y])
|
||||
|
||||
private[sbt] type M[X, Y] = Map[X, Set[Y]]
|
||||
}
|
||||
|
||||
/** Binary relation between A and B. It is a set of pairs (_1, _2) for _1 in A, _2 in B. */
|
||||
trait Relation[A, B] {
|
||||
|
||||
/** Returns the set of all `_2`s such that `(_1, _2)` is in this relation. */
|
||||
def forward(_1: A): Set[B]
|
||||
|
||||
/** Returns the set of all `_1`s such that `(_1, _2)` is in this relation. */
|
||||
def reverse(_2: B): Set[A]
|
||||
|
||||
/** Includes `pair` in the relation. */
|
||||
def +(pair: (A, B)): Relation[A, B]
|
||||
|
||||
/** Includes `(a, b)` in the relation. */
|
||||
def +(a: A, b: B): Relation[A, B]
|
||||
|
||||
/** Includes in the relation `(a, b)` for all `b` in `bs`. */
|
||||
def +(a: A, bs: Traversable[B]): Relation[A, B]
|
||||
|
||||
/** Returns the union of the relation `r` with this relation. */
|
||||
def ++(r: Relation[A, B]): Relation[A, B]
|
||||
|
||||
/** Includes the given pairs in this relation. */
|
||||
def ++(rs: Traversable[(A, B)]): Relation[A, B]
|
||||
|
||||
/** Removes all elements `(_1, _2)` for all `_1` in `_1s` from this relation. */
|
||||
def --(_1s: Traversable[A]): Relation[A, B]
|
||||
|
||||
/** Removes all `pairs` from this relation. */
|
||||
def --(pairs: TraversableOnce[(A, B)]): Relation[A, B]
|
||||
|
||||
/** Removes all `relations` from this relation. */
|
||||
def --(relations: Relation[A, B]): Relation[A, B]
|
||||
|
||||
/** Removes all pairs `(_1, _2)` from this relation. */
|
||||
def -(_1: A): Relation[A, B]
|
||||
|
||||
/** Removes `pair` from this relation. */
|
||||
def -(pair: (A, B)): Relation[A, B]
|
||||
|
||||
/** Returns the set of all `_1`s such that `(_1, _2)` is in this relation. */
|
||||
def _1s: collection.Set[A]
|
||||
|
||||
/** Returns the set of all `_2`s such that `(_1, _2)` is in this relation. */
|
||||
def _2s: collection.Set[B]
|
||||
|
||||
/** Returns the number of pairs in this relation */
|
||||
def size: Int
|
||||
|
||||
/** Returns true iff `(a,b)` is in this relation*/
|
||||
def contains(a: A, b: B): Boolean
|
||||
|
||||
/** Returns a relation with only pairs `(a,b)` for which `f(a,b)` is true.*/
|
||||
def filter(f: (A, B) => Boolean): Relation[A, B]
|
||||
|
||||
/**
|
||||
* Returns a pair of relations: the first contains only pairs `(a,b)` for which `f(a,b)` is true and
|
||||
* the other only pairs `(a,b)` for which `f(a,b)` is false.
|
||||
*/
|
||||
def partition(f: (A, B) => Boolean): (Relation[A, B], Relation[A, B])
|
||||
|
||||
/** Partitions this relation into a map of relations according to some discriminator function. */
|
||||
def groupBy[K](discriminator: ((A, B)) => K): Map[K, Relation[A, B]]
|
||||
|
||||
/** Returns all pairs in this relation.*/
|
||||
def all: Traversable[(A, B)]
|
||||
|
||||
/**
|
||||
* Represents this relation as a `Map` from a `_1` to the set of `_2`s such that `(_1, _2)` is in this relation.
|
||||
*
|
||||
* Specifically, there is one entry for each `_1` such that `(_1, _2)` is in this relation for some `_2`.
|
||||
* The value associated with a given `_1` is the set of all `_2`s such that `(_1, _2)` is in this relation.
|
||||
*/
|
||||
def forwardMap: Map[A, Set[B]]
|
||||
|
||||
/**
|
||||
* Represents this relation as a `Map` from a `_2` to the set of `_1`s such that `(_1, _2)` is in this relation.
|
||||
*
|
||||
* Specifically, there is one entry for each `_2` such that `(_1, _2)` is in this relation for some `_1`.
|
||||
* The value associated with a given `_2` is the set of all `_1`s such that `(_1, _2)` is in this relation.
|
||||
*/
|
||||
def reverseMap: Map[B, Set[A]]
|
||||
|
||||
}
|
||||
|
||||
// Note that we assume without checking that fwd and rev are consistent.
|
||||
private final class MRelation[A, B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]])
|
||||
extends Relation[A, B] {
|
||||
def forwardMap = fwd
|
||||
def reverseMap = rev
|
||||
|
||||
def forward(t: A) = get(fwd, t)
|
||||
def reverse(t: B) = get(rev, t)
|
||||
|
||||
def _1s = fwd.keySet
|
||||
def _2s = rev.keySet
|
||||
|
||||
def size = (fwd.valuesIterator map (_.size)).sum
|
||||
|
||||
def all: Traversable[(A, B)] =
|
||||
fwd.iterator.flatMap { case (a, bs) => bs.iterator.map(b => (a, b)) }.toTraversable
|
||||
|
||||
def +(pair: (A, B)) = this + (pair._1, Set(pair._2))
|
||||
def +(from: A, to: B) = this + (from, to :: Nil)
|
||||
def +(from: A, to: Traversable[B]) =
|
||||
if (to.isEmpty) this
|
||||
else new MRelation(add(fwd, from, to), to.foldLeft(rev)((map, t) => add(map, t, from :: Nil)))
|
||||
|
||||
def ++(rs: Traversable[(A, B)]) = rs.foldLeft(this: Relation[A, B]) { _ + _ }
|
||||
def ++(other: Relation[A, B]) =
|
||||
new MRelation[A, B](combine(fwd, other.forwardMap), combine(rev, other.reverseMap))
|
||||
|
||||
def --(ts: Traversable[A]): Relation[A, B] = ts.foldLeft(this: Relation[A, B]) { _ - _ }
|
||||
def --(pairs: TraversableOnce[(A, B)]): Relation[A, B] =
|
||||
pairs.foldLeft(this: Relation[A, B])(_ - _)
|
||||
def --(relations: Relation[A, B]): Relation[A, B] = --(relations.all)
|
||||
|
||||
def -(pair: (A, B)): Relation[A, B] =
|
||||
new MRelation(remove(fwd, pair._1, pair._2), remove(rev, pair._2, pair._1))
|
||||
|
||||
def -(t: A): Relation[A, B] =
|
||||
fwd.get(t) match {
|
||||
case Some(rs) =>
|
||||
val upRev = rs.foldLeft(rev)((map, r) => remove(map, r, t))
|
||||
new MRelation(fwd - t, upRev)
|
||||
case None => this
|
||||
}
|
||||
|
||||
def filter(f: (A, B) => Boolean): Relation[A, B] = Relation.empty[A, B] ++ all.filter(f.tupled)
|
||||
|
||||
def partition(f: (A, B) => Boolean): (Relation[A, B], Relation[A, B]) = {
|
||||
val (y, n) = all.partition(f.tupled)
|
||||
(Relation.empty[A, B] ++ y, Relation.empty[A, B] ++ n)
|
||||
}
|
||||
|
||||
def groupBy[K](discriminator: ((A, B)) => K): Map[K, Relation[A, B]] =
|
||||
(all.groupBy(discriminator) mapValues { Relation.empty[A, B] ++ _ }).toMap
|
||||
|
||||
def contains(a: A, b: B): Boolean = forward(a)(b)
|
||||
|
||||
override def equals(other: Any) = other match {
|
||||
// We assume that the forward and reverse maps are consistent, so we only use the forward map
|
||||
// for equality. Note that key -> Empty is semantically the same as key not existing.
|
||||
case o: MRelation[A, B] =>
|
||||
forwardMap.filterNot(_._2.isEmpty) == o.forwardMap.filterNot(_._2.isEmpty)
|
||||
case _ => false
|
||||
}
|
||||
|
||||
override def hashCode = fwd.filterNot(_._2.isEmpty).hashCode()
|
||||
|
||||
override def toString =
|
||||
all.map { case (a, b) => a + " -> " + b }.mkString("Relation [", ", ", "]")
|
||||
}
|
||||
|
|
@ -0,0 +1,83 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.internal.util
|
||||
|
||||
import org.scalacheck._
|
||||
import Prop._
|
||||
|
||||
object RelationTest extends Properties("Relation") {
|
||||
property("Added entry check") = forAll { (pairs: List[(Int, Double)]) =>
|
||||
val r = Relation.empty[Int, Double] ++ pairs
|
||||
check(r, pairs)
|
||||
}
|
||||
def check(r: Relation[Int, Double], pairs: Seq[(Int, Double)]) = {
|
||||
val _1s = pairs.map(_._1).toSet
|
||||
val _2s = pairs.map(_._2).toSet
|
||||
|
||||
r._1s == _1s && r.forwardMap.keySet == _1s &&
|
||||
r._2s == _2s && r.reverseMap.keySet == _2s &&
|
||||
pairs.forall {
|
||||
case (a, b) =>
|
||||
(r.forward(a) contains b) &&
|
||||
(r.reverse(b) contains a) &&
|
||||
(r.forwardMap(a) contains b) &&
|
||||
(r.reverseMap(b) contains a)
|
||||
}
|
||||
}
|
||||
|
||||
property("Does not contain removed entries") = forAll { (pairs: List[(Int, Double, Boolean)]) =>
|
||||
val add = pairs.map { case (a, b, c) => (a, b) }
|
||||
val added = Relation.empty[Int, Double] ++ add
|
||||
|
||||
val removeFine = pairs.collect { case (a, b, true) => (a, b) }
|
||||
val removeCoarse = removeFine.map(_._1)
|
||||
val r = added -- removeCoarse
|
||||
|
||||
def notIn[X, Y](map: Map[X, Set[Y]], a: X, b: Y) = map.get(a).forall(set => !(set contains b))
|
||||
|
||||
all(removeCoarse) { rem =>
|
||||
("_1s does not contain removed" |: (!r._1s.contains(rem))) &&
|
||||
("Forward does not contain removed" |: r.forward(rem).isEmpty) &&
|
||||
("Forward map does not contain removed" |: !r.forwardMap.contains(rem)) &&
|
||||
("Removed is not a value in reverse map" |: !r.reverseMap.values.toSet.contains(rem))
|
||||
} &&
|
||||
all(removeFine) {
|
||||
case (a, b) =>
|
||||
("Forward does not contain removed" |: (!r.forward(a).contains(b))) &&
|
||||
("Reverse does not contain removed" |: (!r.reverse(b).contains(a))) &&
|
||||
("Forward map does not contain removed" |: (notIn(r.forwardMap, a, b))) &&
|
||||
("Reverse map does not contain removed" |: (notIn(r.reverseMap, b, a)))
|
||||
}
|
||||
}
|
||||
|
||||
property("Groups correctly") = forAll { (entries: List[(Int, Double)], randomInt: Int) =>
|
||||
val splitInto = math.abs(randomInt) % 10 + 1 // Split into 1-10 groups.
|
||||
val rel = Relation.empty[Int, Double] ++ entries
|
||||
val grouped = rel groupBy (_._1 % splitInto)
|
||||
all(grouped.toSeq) {
|
||||
case (k, rel_k) => rel_k._1s forall { _ % splitInto == k }
|
||||
}
|
||||
}
|
||||
|
||||
property("Computes size correctly") = forAll { (entries: List[(Int, Double)]) =>
|
||||
val rel = Relation.empty[Int, Double] ++ entries
|
||||
val expected = rel.all.size // Note: not entries.length, as entries may have duplicates.
|
||||
val computed = rel.size
|
||||
"Expected size: %d. Computed size: %d.".format(expected, computed) |: expected == computed
|
||||
}
|
||||
|
||||
def all[T](s: Seq[T])(p: T => Prop): Prop =
|
||||
if (s.isEmpty) true else s.map(p).reduceLeft(_ && _)
|
||||
}
|
||||
|
||||
object EmptyRelationTest extends Properties("Empty relation") {
|
||||
lazy val e = Relation.empty[Int, Double]
|
||||
|
||||
property("Forward empty") = forAll((i: Int) => e.forward(i).isEmpty)
|
||||
property("Reverse empty") = forAll((i: Double) => e.reverse(i).isEmpty)
|
||||
property("Forward map empty") = e.forwardMap.isEmpty
|
||||
property("Reverse map empty") = e.reverseMap.isEmpty
|
||||
property("_1 empty") = e._1s.isEmpty
|
||||
property("_2 empty") = e._2s.isEmpty
|
||||
}
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
package sbt.internal.scripted;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import xsbti.Logger;
|
||||
|
||||
public class ScriptConfig {
|
||||
|
||||
private String label;
|
||||
private File testDirectory;
|
||||
private Logger logger;
|
||||
|
||||
public ScriptConfig(String label, File testDirectory, Logger logger) {
|
||||
this.label = label;
|
||||
this.testDirectory = testDirectory;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public String label() {
|
||||
return this.label;
|
||||
}
|
||||
|
||||
public File testDirectory() {
|
||||
return this.testDirectory;
|
||||
}
|
||||
|
||||
public Logger logger() {
|
||||
return this.logger;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package internal
|
||||
package scripted
|
||||
|
||||
object CommentHandler extends BasicStatementHandler {
|
||||
def apply(command: String, args: List[String]) = ()
|
||||
}
|
||||
|
|
@ -0,0 +1,146 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package internal
|
||||
package scripted
|
||||
|
||||
import java.io.File
|
||||
import sbt.io.{ IO, Path }
|
||||
import sbt.io.syntax._
|
||||
import Path._
|
||||
import sbt.io.IO
|
||||
|
||||
class FileCommands(baseDirectory: File) extends BasicStatementHandler {
|
||||
lazy val commands = commandMap
|
||||
def commandMap =
|
||||
Map(
|
||||
"touch" nonEmpty touch _,
|
||||
"delete" nonEmpty delete _,
|
||||
"exists" nonEmpty exists _,
|
||||
"mkdir" nonEmpty makeDirectories _,
|
||||
"absent" nonEmpty absent _,
|
||||
// "sync" twoArg("Two directory paths", sync _),
|
||||
"newer" twoArg ("Two paths", newer _),
|
||||
"pause" noArg {
|
||||
println("Pausing in " + baseDirectory)
|
||||
/*readLine("Press enter to continue. ") */
|
||||
print("Press enter to continue. ")
|
||||
System.console.readLine
|
||||
println()
|
||||
},
|
||||
"sleep" oneArg ("Time in milliseconds", time => Thread.sleep(time.toLong)),
|
||||
"exec" nonEmpty (execute _),
|
||||
"copy" copy (to => rebase(baseDirectory, to)),
|
||||
"copy-file" twoArg ("Two paths", copyFile _),
|
||||
"must-mirror" twoArg ("Two paths", diffFiles _),
|
||||
"copy-flat" copy flat
|
||||
)
|
||||
|
||||
def apply(command: String, arguments: List[String]): Unit =
|
||||
commands.get(command).map(_(arguments)) match {
|
||||
case Some(_) => ()
|
||||
case _ => scriptError("Unknown command " + command); ()
|
||||
}
|
||||
|
||||
def scriptError(message: String): Unit = sys.error("Test script error: " + message)
|
||||
def spaced[T](l: Seq[T]) = l.mkString(" ")
|
||||
def fromStrings(paths: List[String]) = paths.map(fromString)
|
||||
def fromString(path: String) = new File(baseDirectory, path)
|
||||
def touch(paths: List[String]): Unit = IO.touch(fromStrings(paths))
|
||||
def delete(paths: List[String]): Unit = IO.delete(fromStrings(paths))
|
||||
/*def sync(from: String, to: String) =
|
||||
IO.sync(fromString(from), fromString(to), log)*/
|
||||
def copyFile(from: String, to: String): Unit =
|
||||
IO.copyFile(fromString(from), fromString(to))
|
||||
def makeDirectories(paths: List[String]) =
|
||||
IO.createDirectories(fromStrings(paths))
|
||||
def diffFiles(file1: String, file2: String): Unit = {
|
||||
val lines1 = IO.readLines(fromString(file1))
|
||||
val lines2 = IO.readLines(fromString(file2))
|
||||
if (lines1 != lines2)
|
||||
scriptError(
|
||||
"File contents are different:\n" + lines1.mkString("\n") +
|
||||
"\nAnd:\n" + lines2.mkString("\n")
|
||||
)
|
||||
}
|
||||
|
||||
def newer(a: String, b: String): Unit = {
|
||||
val pathA = fromString(a)
|
||||
val pathB = fromString(b)
|
||||
val isNewer = pathA.exists &&
|
||||
(!pathB.exists || IO.getModifiedTimeOrZero(pathA) > IO.getModifiedTimeOrZero(pathB))
|
||||
if (!isNewer) {
|
||||
scriptError(s"$pathA is not newer than $pathB")
|
||||
}
|
||||
}
|
||||
def exists(paths: List[String]): Unit = {
|
||||
val notPresent = fromStrings(paths).filter(!_.exists)
|
||||
if (notPresent.nonEmpty)
|
||||
scriptError("File(s) did not exist: " + notPresent.mkString("[ ", " , ", " ]"))
|
||||
}
|
||||
def absent(paths: List[String]): Unit = {
|
||||
val present = fromStrings(paths).filter(_.exists)
|
||||
if (present.nonEmpty)
|
||||
scriptError("File(s) existed: " + present.mkString("[ ", " , ", " ]"))
|
||||
}
|
||||
def execute(command: List[String]): Unit = execute0(command.head, command.tail)
|
||||
def execute0(command: String, args: List[String]): Unit = {
|
||||
if (command.trim.isEmpty)
|
||||
scriptError("Command was empty.")
|
||||
else {
|
||||
val exitValue = sys.process.Process(command :: args, baseDirectory).!
|
||||
if (exitValue != 0)
|
||||
sys.error("Nonzero exit value (" + exitValue + ")")
|
||||
}
|
||||
}
|
||||
|
||||
// these are for readability of the command list
|
||||
implicit def commandBuilder(s: String): CommandBuilder = new CommandBuilder(s)
|
||||
final class CommandBuilder(commandName: String) {
|
||||
type NamedCommand = (String, List[String] => Unit)
|
||||
def nonEmpty(action: List[String] => Unit): NamedCommand =
|
||||
commandName -> { paths =>
|
||||
if (paths.isEmpty)
|
||||
scriptError("No arguments specified for " + commandName + " command.")
|
||||
else
|
||||
action(paths)
|
||||
}
|
||||
def twoArg(requiredArgs: String, action: (String, String) => Unit): NamedCommand =
|
||||
commandName -> {
|
||||
case List(from, to) => action(from, to)
|
||||
case other => wrongArguments(requiredArgs, other)
|
||||
}
|
||||
def noArg(action: => Unit): NamedCommand =
|
||||
commandName -> {
|
||||
case Nil => action
|
||||
case other => wrongArguments(other)
|
||||
}
|
||||
def oneArg(requiredArgs: String, action: String => Unit): NamedCommand =
|
||||
commandName -> {
|
||||
case List(single) => action(single)
|
||||
case other => wrongArguments(requiredArgs, other)
|
||||
}
|
||||
def copy(mapper: File => FileMap): NamedCommand =
|
||||
commandName -> {
|
||||
case Nil => scriptError("No paths specified for " + commandName + " command.")
|
||||
case path :: Nil => scriptError("No destination specified for " + commandName + " command.")
|
||||
case paths =>
|
||||
val mapped = fromStrings(paths)
|
||||
val map = mapper(mapped.last)
|
||||
IO.copy(mapped.init pair map)
|
||||
()
|
||||
}
|
||||
|
||||
def wrongArguments(args: List[String]): Unit =
|
||||
scriptError(
|
||||
"Command '" + commandName + "' does not accept arguments (found '" + spaced(args) + "')."
|
||||
)
|
||||
|
||||
def wrongArguments(requiredArgs: String, args: List[String]): Unit =
|
||||
scriptError(
|
||||
"Wrong number of arguments to " + commandName + " command. " +
|
||||
requiredArgs + " required, found: '" + spaced(args) + "'."
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package internal
|
||||
package scripted
|
||||
|
||||
final class FilteredLoader(parent: ClassLoader) extends ClassLoader(parent) {
|
||||
@throws(classOf[ClassNotFoundException])
|
||||
override final def loadClass(className: String, resolve: Boolean): Class[_] = {
|
||||
if (className.startsWith("java.") || className.startsWith("javax."))
|
||||
super.loadClass(className, resolve)
|
||||
else
|
||||
throw new ClassNotFoundException(className)
|
||||
}
|
||||
override def getResources(name: String) = null
|
||||
override def getResource(name: String) = null
|
||||
}
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
package sbt.internal.scripted
|
||||
|
||||
trait HandlersProvider {
|
||||
def getHandlers(config: ScriptConfig): Map[Char, StatementHandler]
|
||||
}
|
||||
|
|
@ -0,0 +1,53 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package internal
|
||||
package scripted
|
||||
|
||||
final class TestException(statement: Statement, msg: String, exception: Throwable)
|
||||
extends RuntimeException(statement.linePrefix + " " + msg, exception)
|
||||
|
||||
class ScriptRunner {
|
||||
import scala.collection.mutable.HashMap
|
||||
def apply(statements: List[(StatementHandler, Statement)]): Unit = {
|
||||
val states = new HashMap[StatementHandler, Any]
|
||||
def processStatement(handler: StatementHandler, statement: Statement): Unit = {
|
||||
val state = states(handler).asInstanceOf[handler.State]
|
||||
val nextState =
|
||||
try {
|
||||
Right(handler(statement.command, statement.arguments, state))
|
||||
} catch {
|
||||
case e: Exception => Left(e)
|
||||
}
|
||||
nextState match {
|
||||
case Left(err) =>
|
||||
if (statement.successExpected) {
|
||||
err match {
|
||||
case t: TestFailed =>
|
||||
throw new TestException(statement, "Command failed: " + t.getMessage, null)
|
||||
case _ => throw new TestException(statement, "Command failed", err)
|
||||
}
|
||||
} else
|
||||
()
|
||||
case Right(s) =>
|
||||
if (statement.successExpected)
|
||||
states(handler) = s
|
||||
else
|
||||
throw new TestException(statement, "Command succeeded but failure was expected", null)
|
||||
}
|
||||
}
|
||||
val handlers = Set() ++ statements.map(_._1)
|
||||
|
||||
try {
|
||||
handlers.foreach(handler => states(handler) = handler.initialState)
|
||||
statements foreach (Function.tupled(processStatement))
|
||||
} finally {
|
||||
for (handler <- handlers; state <- states.get(handler)) {
|
||||
try {
|
||||
handler.finish(state.asInstanceOf[handler.State])
|
||||
} catch { case e: Exception => () }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
package sbt
|
||||
package internal
|
||||
package scripted
|
||||
|
||||
import java.io.File
|
||||
import sbt.util.{ Logger, LogExchange, Level }
|
||||
import sbt.internal.util.{ ManagedLogger, ConsoleAppender, BufferedAppender }
|
||||
import sbt.io.IO.wrapNull
|
||||
import sbt.io.{ DirectoryFilter, HiddenFileFilter }
|
||||
import sbt.io.syntax._
|
||||
import sbt.internal.io.Resources
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
|
||||
object ScriptedRunnerImpl {
|
||||
def run(
|
||||
resourceBaseDirectory: File,
|
||||
bufferLog: Boolean,
|
||||
tests: Array[String],
|
||||
handlersProvider: HandlersProvider
|
||||
): Unit = {
|
||||
val runner = new ScriptedTests(resourceBaseDirectory, bufferLog, handlersProvider)
|
||||
val logger = newLogger
|
||||
val allTests = get(tests, resourceBaseDirectory, logger) flatMap {
|
||||
case ScriptedTest(group, name) =>
|
||||
runner.scriptedTest(group, name, logger)
|
||||
}
|
||||
runAll(allTests)
|
||||
}
|
||||
def runAll(tests: Seq[() => Option[String]]): Unit = {
|
||||
val errors = for (test <- tests; err <- test()) yield err
|
||||
if (errors.nonEmpty)
|
||||
sys.error(errors.mkString("Failed tests:\n\t", "\n\t", "\n"))
|
||||
}
|
||||
def get(tests: Seq[String], baseDirectory: File, log: ManagedLogger): Seq[ScriptedTest] =
|
||||
if (tests.isEmpty) listTests(baseDirectory, log) else parseTests(tests)
|
||||
def listTests(baseDirectory: File, log: ManagedLogger): Seq[ScriptedTest] =
|
||||
(new ListTests(baseDirectory, _ => true, log)).listTests
|
||||
def parseTests(in: Seq[String]): Seq[ScriptedTest] =
|
||||
for (testString <- in) yield {
|
||||
val Array(group, name) = testString.split("/").map(_.trim)
|
||||
ScriptedTest(group, name)
|
||||
}
|
||||
private[sbt] val generateId: AtomicInteger = new AtomicInteger
|
||||
private[sbt] def newLogger: ManagedLogger = {
|
||||
val loggerName = "scripted-" + generateId.incrementAndGet
|
||||
val x = LogExchange.logger(loggerName)
|
||||
x
|
||||
}
|
||||
}
|
||||
|
||||
final class ScriptedTests(
|
||||
resourceBaseDirectory: File,
|
||||
bufferLog: Boolean,
|
||||
handlersProvider: HandlersProvider
|
||||
) {
|
||||
private val testResources = new Resources(resourceBaseDirectory)
|
||||
private val consoleAppender: ConsoleAppender = ConsoleAppender()
|
||||
|
||||
val ScriptFilename = "test"
|
||||
val PendingScriptFilename = "pending"
|
||||
|
||||
def scriptedTest(group: String, name: String, log: xsbti.Logger): Seq[() => Option[String]] =
|
||||
scriptedTest(group, name, Logger.xlog2Log(log))
|
||||
|
||||
def scriptedTest(group: String, name: String, log: ManagedLogger): Seq[() => Option[String]] =
|
||||
scriptedTest(group, name, (_ => ()), log)
|
||||
|
||||
def scriptedTest(
|
||||
group: String,
|
||||
name: String,
|
||||
prescripted: File => Unit,
|
||||
log: ManagedLogger
|
||||
): Seq[() => Option[String]] = {
|
||||
for (groupDir <- (resourceBaseDirectory * group).get; nme <- (groupDir * name).get) yield {
|
||||
val g = groupDir.getName
|
||||
val n = nme.getName
|
||||
val str = s"$g / $n"
|
||||
() => {
|
||||
println("Running " + str)
|
||||
testResources.readWriteResourceDirectory(g, n) { testDirectory =>
|
||||
val disabled = new File(testDirectory, "disabled").isFile
|
||||
if (disabled) {
|
||||
log.info("D " + str + " [DISABLED]")
|
||||
None
|
||||
} else {
|
||||
try {
|
||||
scriptedTest(str, testDirectory, prescripted, log); None
|
||||
} catch {
|
||||
case _: TestException | _: PendingTestSuccessException => Some(str)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def scriptedTest(
|
||||
label: String,
|
||||
testDirectory: File,
|
||||
prescripted: File => Unit,
|
||||
log: ManagedLogger
|
||||
): Unit = {
|
||||
val buffered = BufferedAppender(consoleAppender)
|
||||
LogExchange.unbindLoggerAppenders(log.name)
|
||||
LogExchange.bindLoggerAppenders(log.name, (buffered -> Level.Debug) :: Nil)
|
||||
if (bufferLog) {
|
||||
buffered.record()
|
||||
}
|
||||
def createParser() = {
|
||||
// val fileHandler = new FileCommands(testDirectory)
|
||||
// // val sbtHandler = new SbtHandler(testDirectory, launcher, buffered, launchOpts)
|
||||
// new TestScriptParser(Map('$' -> fileHandler, /* '>' -> sbtHandler, */ '#' -> CommentHandler))
|
||||
val scriptConfig = new ScriptConfig(label, testDirectory, log)
|
||||
new TestScriptParser(handlersProvider getHandlers scriptConfig)
|
||||
}
|
||||
val (file, pending) = {
|
||||
val normal = new File(testDirectory, ScriptFilename)
|
||||
val pending = new File(testDirectory, PendingScriptFilename)
|
||||
if (pending.isFile) (pending, true) else (normal, false)
|
||||
}
|
||||
val pendingString = if (pending) " [PENDING]" else ""
|
||||
|
||||
def runTest() = {
|
||||
val run = new ScriptRunner
|
||||
val parser = createParser()
|
||||
run(parser.parse(file))
|
||||
}
|
||||
def testFailed(): Unit = {
|
||||
if (pending) buffered.clearBuffer() else buffered.stopBuffer()
|
||||
log.error("x " + label + pendingString)
|
||||
}
|
||||
|
||||
try {
|
||||
prescripted(testDirectory)
|
||||
runTest()
|
||||
log.info("+ " + label + pendingString)
|
||||
if (pending) throw new PendingTestSuccessException(label)
|
||||
} catch {
|
||||
case e: TestException =>
|
||||
testFailed()
|
||||
e.getCause match {
|
||||
case null | _: java.net.SocketException => log.error(" " + e.getMessage)
|
||||
case _ => if (!pending) e.printStackTrace
|
||||
}
|
||||
if (!pending) throw e
|
||||
case e: PendingTestSuccessException =>
|
||||
testFailed()
|
||||
log.error(" Mark as passing to remove this failure.")
|
||||
throw e
|
||||
case e: Exception =>
|
||||
testFailed()
|
||||
if (!pending) throw e
|
||||
} finally {
|
||||
buffered.clearBuffer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// object ScriptedTests extends ScriptedRunner {
|
||||
// val emptyCallback: File => Unit = { _ => () }
|
||||
// }
|
||||
|
||||
final case class ScriptedTest(group: String, name: String) {
|
||||
override def toString = group + "/" + name
|
||||
}
|
||||
|
||||
object ListTests {
|
||||
def list(directory: File, filter: java.io.FileFilter) = wrapNull(directory.listFiles(filter))
|
||||
}
|
||||
import ListTests._
|
||||
final class ListTests(baseDirectory: File, accept: ScriptedTest => Boolean, log: Logger) {
|
||||
def filter = DirectoryFilter -- HiddenFileFilter
|
||||
def listTests: Seq[ScriptedTest] = {
|
||||
list(baseDirectory, filter) flatMap { group =>
|
||||
val groupName = group.getName
|
||||
listTests(group).map(ScriptedTest(groupName, _))
|
||||
}
|
||||
}
|
||||
private[this] def listTests(group: File): Seq[String] = {
|
||||
val groupName = group.getName
|
||||
val allTests = list(group, filter).sortBy(_.getName)
|
||||
if (allTests.isEmpty) {
|
||||
log.warn("No tests in test group " + groupName)
|
||||
Seq.empty
|
||||
} else {
|
||||
val (included, skipped) =
|
||||
allTests.toList.partition(test => accept(ScriptedTest(groupName, test.getName)))
|
||||
if (included.isEmpty)
|
||||
log.warn("Test group " + groupName + " skipped.")
|
||||
else if (skipped.nonEmpty) {
|
||||
log.warn("Tests skipped in group " + group.getName + ":")
|
||||
skipped.foreach(testName => log.warn(" " + testName.getName))
|
||||
}
|
||||
Seq(included.map(_.getName): _*)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class PendingTestSuccessException(label: String) extends Exception {
|
||||
override def getMessage: String =
|
||||
s"The pending test $label succeeded. Mark this test as passing to remove this failure."
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package internal
|
||||
package scripted
|
||||
|
||||
trait StatementHandler {
|
||||
type State
|
||||
def initialState: State
|
||||
def apply(command: String, arguments: List[String], state: State): State
|
||||
def finish(state: State): Unit
|
||||
}
|
||||
|
||||
trait BasicStatementHandler extends StatementHandler {
|
||||
final type State = Unit
|
||||
final def initialState = ()
|
||||
|
||||
final def apply(command: String, arguments: List[String], state: Unit): Unit =
|
||||
apply(command, arguments)
|
||||
|
||||
def apply(command: String, arguments: List[String]): Unit
|
||||
def finish(state: Unit) = ()
|
||||
}
|
||||
|
||||
/** Use when a stack trace is not useful */
|
||||
final class TestFailed(msg: String) extends RuntimeException(msg) {
|
||||
override def fillInStackTrace = this
|
||||
}
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt
|
||||
package internal
|
||||
package scripted
|
||||
|
||||
import java.io.File
|
||||
import scala.util.parsing.combinator._
|
||||
import scala.util.parsing.input.Positional
|
||||
import Character.isWhitespace
|
||||
import sbt.io.IO
|
||||
|
||||
/*
|
||||
statement*
|
||||
statement ::= startChar successChar word+ nl
|
||||
startChar ::= <single character>
|
||||
successChar ::= '+' | '-'
|
||||
word ::= [^ \[\]]+
|
||||
comment ::= '#' \S* nl
|
||||
nl ::= '\r' \'n' | '\n' | '\r' | eof
|
||||
*/
|
||||
final case class Statement(
|
||||
command: String,
|
||||
arguments: List[String],
|
||||
successExpected: Boolean,
|
||||
line: Int
|
||||
) {
|
||||
def linePrefix = "{line " + line + "} "
|
||||
}
|
||||
|
||||
private object TestScriptParser {
|
||||
val SuccessLiteral = "success"
|
||||
val FailureLiteral = "failure"
|
||||
val WordRegex = """[^ \[\]\s'\"][^ \[\]\s]*""".r
|
||||
}
|
||||
|
||||
import TestScriptParser._
|
||||
class TestScriptParser(handlers: Map[Char, StatementHandler]) extends RegexParsers {
|
||||
require(handlers.nonEmpty)
|
||||
override def skipWhitespace = false
|
||||
|
||||
import IO.read
|
||||
if (handlers.keys.exists(isWhitespace))
|
||||
sys.error("Start characters cannot be whitespace")
|
||||
if (handlers.keys.exists(key => key == '+' || key == '-'))
|
||||
sys.error("Start characters cannot be '+' or '-'")
|
||||
|
||||
def parse(scriptFile: File): List[(StatementHandler, Statement)] =
|
||||
parse(read(scriptFile), Some(scriptFile.getAbsolutePath))
|
||||
def parse(script: String): List[(StatementHandler, Statement)] = parse(script, None)
|
||||
private def parse(script: String, label: Option[String]): List[(StatementHandler, Statement)] = {
|
||||
parseAll(statements, script) match {
|
||||
case Success(result, next) => result
|
||||
case err: NoSuccess => {
|
||||
val labelString = label.map("'" + _ + "' ").getOrElse("")
|
||||
sys.error("Could not parse test script, " + labelString + err.toString)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lazy val statements = rep1(space ~> statement <~ newline)
|
||||
|
||||
def statement: Parser[(StatementHandler, Statement)] = {
|
||||
trait PositionalStatement extends Positional {
|
||||
def tuple: (StatementHandler, Statement)
|
||||
}
|
||||
positioned {
|
||||
val command = (word | err("expected command"))
|
||||
val arguments = rep(space ~> (word | failure("expected argument")))
|
||||
(successParser ~ (space ~> startCharacterParser <~ space) ~! command ~! arguments) ^^ {
|
||||
case successExpected ~ start ~ command ~ arguments =>
|
||||
new PositionalStatement {
|
||||
def tuple =
|
||||
(handlers(start), new Statement(command, arguments, successExpected, pos.line))
|
||||
}
|
||||
}
|
||||
} ^^ (_.tuple)
|
||||
}
|
||||
|
||||
def successParser: Parser[Boolean] = ('+' ^^^ true) | ('-' ^^^ false) | success(true)
|
||||
def space: Parser[String] = """[ \t]*""".r
|
||||
|
||||
lazy val word: Parser[String] =
|
||||
("\'" ~> "[^'\n\r]*".r <~ "\'") | ("\"" ~> "[^\"\n\r]*".r <~ "\"") | WordRegex
|
||||
|
||||
def startCharacterParser: Parser[Char] =
|
||||
elem("start character", handlers.contains _) |
|
||||
(
|
||||
(newline | err("expected start character " + handlers.keys.mkString("(", "", ")")))
|
||||
~> failure("end of input")
|
||||
)
|
||||
|
||||
def newline = """\s*([\n\r]|$)""".r
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
Simple Build Tool: Cache Component
|
||||
Copyright 2009 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
package sbt
|
||||
package internal
|
||||
package util
|
||||
|
||||
class EmptyCacheError extends RuntimeException
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
package sbt.util
|
||||
|
||||
import sjsonnew.{ BasicJsonProtocol, JsonFormat }
|
||||
|
||||
trait BasicCacheImplicits { self: BasicJsonProtocol =>
|
||||
|
||||
implicit def basicCache[I: JsonFormat, O: JsonFormat]: Cache[I, O] =
|
||||
new BasicCache[I, O]()
|
||||
|
||||
def wrapIn[I, J](implicit f: I => J, g: J => I, jCache: SingletonCache[J]): SingletonCache[I] =
|
||||
new SingletonCache[I] {
|
||||
override def read(from: Input): I = g(jCache.read(from))
|
||||
override def write(to: Output, value: I) = jCache.write(to, f(value))
|
||||
}
|
||||
|
||||
def singleton[T](t: T): SingletonCache[T] =
|
||||
SingletonCache.basicSingletonCache(asSingleton(t))
|
||||
}
|
||||
|
|
@ -0,0 +1,78 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
import java.io.File
|
||||
|
||||
/** The result of a cache query */
|
||||
sealed trait CacheResult[K]
|
||||
|
||||
/** A successful hit on the cache */
|
||||
case class Hit[O](value: O) extends CacheResult[O]
|
||||
|
||||
/**
|
||||
* A cache miss.
|
||||
* `update` associates the missing key with `O` in the cache.
|
||||
*/
|
||||
case class Miss[O](update: O => Unit) extends CacheResult[O]
|
||||
|
||||
/**
|
||||
* A simple cache with keys of type `I` and values of type `O`
|
||||
*/
|
||||
trait Cache[I, O] {
|
||||
|
||||
/**
|
||||
* Queries the cache backed with store `store` for key `key`.
|
||||
*/
|
||||
def apply(store: CacheStore)(key: I): CacheResult[O]
|
||||
}
|
||||
|
||||
object Cache {
|
||||
|
||||
/**
|
||||
* Materializes a cache.
|
||||
*/
|
||||
def cache[I, O](implicit c: Cache[I, O]): Cache[I, O] = c
|
||||
|
||||
/**
|
||||
* Returns a function that represents a cache that inserts on miss.
|
||||
*
|
||||
* @param cacheFile The store that backs this cache.
|
||||
* @param default A function that computes a default value to insert on
|
||||
*/
|
||||
def cached[I, O](cacheFile: File)(default: I => O)(implicit cache: Cache[I, O]): I => O =
|
||||
cached(CacheStore(cacheFile))(default)
|
||||
|
||||
/**
|
||||
* Returns a function that represents a cache that inserts on miss.
|
||||
*
|
||||
* @param store The store that backs this cache.
|
||||
* @param default A function that computes a default value to insert on
|
||||
*/
|
||||
def cached[I, O](store: CacheStore)(default: I => O)(implicit cache: Cache[I, O]): I => O =
|
||||
key =>
|
||||
cache(store)(key) match {
|
||||
case Hit(value) =>
|
||||
value
|
||||
|
||||
case Miss(update) =>
|
||||
val result = default(key)
|
||||
update(result)
|
||||
result
|
||||
}
|
||||
|
||||
def debug[I](label: String, cache: SingletonCache[I]): SingletonCache[I] =
|
||||
new SingletonCache[I] {
|
||||
override def read(from: Input): I = {
|
||||
val value = cache.read(from)
|
||||
println(label + ".read: " + value)
|
||||
value
|
||||
}
|
||||
|
||||
override def write(to: Output, value: I): Unit = {
|
||||
println(label + ".write: " + value)
|
||||
cache.write(to, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,6 @@
|
|||
package sbt.util
|
||||
|
||||
import sjsonnew.BasicJsonProtocol
|
||||
|
||||
object CacheImplicits extends CacheImplicits
|
||||
trait CacheImplicits extends BasicCacheImplicits with BasicJsonProtocol
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
package sbt.util
|
||||
|
||||
import java.io.{ File, InputStream, OutputStream }
|
||||
import sbt.io.syntax.fileToRichFile
|
||||
import sbt.io.{ IO, Using }
|
||||
import sjsonnew.{ IsoString, JsonReader, JsonWriter, SupportConverter }
|
||||
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
|
||||
/** A `CacheStore` is used by the caching infrastructure to persist cached information. */
|
||||
abstract class CacheStore extends Input with Output {
|
||||
|
||||
/** Delete the persisted information. */
|
||||
def delete(): Unit
|
||||
|
||||
}
|
||||
|
||||
object CacheStore {
|
||||
implicit lazy val jvalueIsoString: IsoString[JValue] =
|
||||
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
|
||||
|
||||
/** Returns file-based CacheStore using standard JSON converter. */
|
||||
def apply(cacheFile: File): CacheStore = file(cacheFile)
|
||||
|
||||
/** Returns file-based CacheStore using standard JSON converter. */
|
||||
def file(cacheFile: File): CacheStore = new FileBasedStore[JValue](cacheFile, Converter)
|
||||
}
|
||||
|
||||
/** Factory that can make new stores. */
|
||||
abstract class CacheStoreFactory {
|
||||
|
||||
/** Create a new store. */
|
||||
def make(identifier: String): CacheStore
|
||||
|
||||
/** Create a new `CacheStoreFactory` from this factory. */
|
||||
def sub(identifier: String): CacheStoreFactory
|
||||
|
||||
/** A symbolic alias for `sub`. */
|
||||
final def /(identifier: String): CacheStoreFactory = sub(identifier)
|
||||
}
|
||||
|
||||
object CacheStoreFactory {
|
||||
implicit lazy val jvalueIsoString: IsoString[JValue] =
|
||||
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
|
||||
|
||||
/** Returns directory-based CacheStoreFactory using standard JSON converter. */
|
||||
def apply(base: File): CacheStoreFactory = directory(base)
|
||||
|
||||
/** Returns directory-based CacheStoreFactory using standard JSON converter. */
|
||||
def directory(base: File): CacheStoreFactory = new DirectoryStoreFactory[JValue](base, Converter)
|
||||
}
|
||||
|
||||
/** A factory that creates new stores persisted in `base`. */
|
||||
class DirectoryStoreFactory[J: IsoString](base: File, converter: SupportConverter[J])
|
||||
extends CacheStoreFactory {
|
||||
IO.createDirectory(base)
|
||||
|
||||
def make(identifier: String): CacheStore = new FileBasedStore(base / identifier, converter)
|
||||
|
||||
def sub(identifier: String): CacheStoreFactory =
|
||||
new DirectoryStoreFactory(base / identifier, converter)
|
||||
}
|
||||
|
||||
/** A `CacheStore` that persists information in `file`. */
|
||||
class FileBasedStore[J: IsoString](file: File, converter: SupportConverter[J]) extends CacheStore {
|
||||
IO.touch(file, setModified = false)
|
||||
|
||||
def read[T: JsonReader]() =
|
||||
Using.fileInputStream(file)(stream => new PlainInput(stream, converter).read())
|
||||
|
||||
def write[T: JsonWriter](value: T) =
|
||||
Using.fileOutputStream(append = false)(file) { stream =>
|
||||
new PlainOutput(stream, converter).write(value)
|
||||
}
|
||||
|
||||
def delete() = IO.delete(file)
|
||||
def close() = ()
|
||||
}
|
||||
|
||||
/** A store that reads from `inputStream` and writes to `outputStream`. */
|
||||
class StreamBasedStore[J: IsoString](
|
||||
inputStream: InputStream,
|
||||
outputStream: OutputStream,
|
||||
converter: SupportConverter[J]
|
||||
) extends CacheStore {
|
||||
def read[T: JsonReader]() = new PlainInput(inputStream, converter).read()
|
||||
def write[T: JsonWriter](value: T) = new PlainOutput(outputStream, converter).write(value)
|
||||
def delete() = ()
|
||||
def close() = { inputStream.close(); outputStream.close() }
|
||||
}
|
||||
|
|
@ -0,0 +1,241 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
import java.io.File
|
||||
|
||||
import scala.util.control.NonFatal
|
||||
import sbt.io.{ Hash, IO }
|
||||
import sjsonnew.{ Builder, DeserializationException, JsonFormat, Unbuilder, deserializationError }
|
||||
import CacheImplicits.{ arrayFormat => _, _ }
|
||||
|
||||
sealed trait FileInfo { def file: File }
|
||||
sealed trait HashFileInfo extends FileInfo {
|
||||
@deprecated("Use hashArray instead", "1.3.0")
|
||||
def hash: List[Byte] = hashArray.toList
|
||||
private[util] def hashArray: Array[Byte]
|
||||
}
|
||||
sealed trait ModifiedFileInfo extends FileInfo { def lastModified: Long }
|
||||
sealed trait PlainFileInfo extends FileInfo { def exists: Boolean }
|
||||
|
||||
sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo
|
||||
|
||||
object HashFileInfo {
|
||||
implicit val format: JsonFormat[HashFileInfo] = FileInfo.hash.format
|
||||
}
|
||||
object ModifiedFileInfo {
|
||||
implicit val format: JsonFormat[ModifiedFileInfo] = FileInfo.lastModified.format
|
||||
}
|
||||
object PlainFileInfo {
|
||||
implicit val format: JsonFormat[PlainFileInfo] = FileInfo.exists.format
|
||||
}
|
||||
object HashModifiedFileInfo {
|
||||
implicit val format: JsonFormat[HashModifiedFileInfo] = FileInfo.full.format
|
||||
}
|
||||
|
||||
private final case class PlainFile(file: File, exists: Boolean) extends PlainFileInfo
|
||||
private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo
|
||||
@deprecated("Kept for plugin compat, but will be removed in sbt 2.0", "1.3.0")
|
||||
private final case class FileHash(file: File, override val hash: List[Byte]) extends HashFileInfo {
|
||||
override val hashArray: Array[Byte] = hash.toArray
|
||||
}
|
||||
private final case class FileHashArrayRepr(file: File, override val hashArray: Array[Byte])
|
||||
extends HashFileInfo {
|
||||
override def hashCode(): Int = (file, java.util.Arrays.hashCode(hashArray)).hashCode()
|
||||
override def equals(obj: Any): Boolean = obj match {
|
||||
case that: FileHashArrayRepr =>
|
||||
this.file == that.file && java.util.Arrays.equals(this.hashArray, that.hashArray)
|
||||
case _ => false
|
||||
}
|
||||
}
|
||||
@deprecated("Kept for plugin compat, but will be removed in sbt 2.0", "1.3.0")
|
||||
private final case class FileHashModified(
|
||||
file: File,
|
||||
override val hash: List[Byte],
|
||||
lastModified: Long
|
||||
) extends HashModifiedFileInfo {
|
||||
override val hashArray: Array[Byte] = hash.toArray
|
||||
}
|
||||
private final case class FileHashModifiedArrayRepr(
|
||||
file: File,
|
||||
override val hashArray: Array[Byte],
|
||||
lastModified: Long
|
||||
) extends HashModifiedFileInfo
|
||||
|
||||
final case class FilesInfo[F <: FileInfo] private (files: Set[F])
|
||||
object FilesInfo {
|
||||
def empty[F <: FileInfo]: FilesInfo[F] = FilesInfo(Set.empty[F])
|
||||
|
||||
implicit def format[F <: FileInfo: JsonFormat]: JsonFormat[FilesInfo[F]] =
|
||||
projectFormat(_.files, (fs: Set[F]) => FilesInfo(fs))
|
||||
|
||||
def full: FileInfo.Style = FileInfo.full
|
||||
def hash: FileInfo.Style = FileInfo.hash
|
||||
def lastModified: FileInfo.Style = FileInfo.lastModified
|
||||
def exists: FileInfo.Style = FileInfo.exists
|
||||
}
|
||||
|
||||
object FileInfo {
|
||||
|
||||
/**
|
||||
* Stores byte arrays as hex encoded strings, but falls back to reading an array of integers,
|
||||
* which is how it used to be stored, if that fails.
|
||||
*/
|
||||
implicit val byteArrayFormat: JsonFormat[Array[Byte]] = new JsonFormat[Array[Byte]] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): Array[Byte] = {
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
try {
|
||||
Hash.fromHex(unbuilder.readString(js))
|
||||
} catch {
|
||||
case _: DeserializationException =>
|
||||
CacheImplicits.arrayFormat[Byte].read(jsOpt, unbuilder)
|
||||
}
|
||||
case None => Array.empty
|
||||
}
|
||||
}
|
||||
|
||||
override def write[J](obj: Array[Byte], builder: Builder[J]): Unit = {
|
||||
builder.writeString(Hash.toHex(obj))
|
||||
}
|
||||
}
|
||||
|
||||
sealed trait Style {
|
||||
type F <: FileInfo
|
||||
|
||||
implicit def format: JsonFormat[F]
|
||||
implicit def formats: JsonFormat[FilesInfo[F]] =
|
||||
projectFormat(_.files, (fs: Set[F]) => FilesInfo(fs))
|
||||
|
||||
def apply(file: File): F
|
||||
def apply(files: Set[File]): FilesInfo[F] = FilesInfo(files map apply)
|
||||
|
||||
def unapply(info: F): File = info.file
|
||||
def unapply(infos: FilesInfo[F]): Set[File] = infos.files map (_.file)
|
||||
}
|
||||
|
||||
object full extends Style {
|
||||
type F = HashModifiedFileInfo
|
||||
|
||||
implicit val format: JsonFormat[HashModifiedFileInfo] = new JsonFormat[HashModifiedFileInfo] {
|
||||
def write[J](obj: HashModifiedFileInfo, builder: Builder[J]) = {
|
||||
builder.beginObject()
|
||||
builder.addField("file", obj.file)
|
||||
builder.addField("hash", obj.hashArray)
|
||||
builder.addField("lastModified", obj.lastModified)
|
||||
builder.endObject()
|
||||
}
|
||||
|
||||
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val hash = unbuilder.readField[Array[Byte]]("hash")
|
||||
val lastModified = unbuilder.readField[Long]("lastModified")
|
||||
unbuilder.endObject()
|
||||
FileHashModifiedArrayRepr(file, hash, lastModified)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
|
||||
implicit def apply(file: File): HashModifiedFileInfo =
|
||||
FileHashModifiedArrayRepr(file.getAbsoluteFile, Hash(file), IO.getModifiedTimeOrZero(file))
|
||||
def apply(file: File, hash: Array[Byte], lastModified: Long): HashModifiedFileInfo =
|
||||
FileHashModifiedArrayRepr(file.getAbsoluteFile, hash, lastModified)
|
||||
}
|
||||
|
||||
object hash extends Style {
|
||||
type F = HashFileInfo
|
||||
|
||||
implicit val format: JsonFormat[HashFileInfo] = new JsonFormat[HashFileInfo] {
|
||||
def write[J](obj: HashFileInfo, builder: Builder[J]) = {
|
||||
builder.beginObject()
|
||||
builder.addField("file", obj.file)
|
||||
builder.addField("hash", obj.hashArray)
|
||||
builder.endObject()
|
||||
}
|
||||
|
||||
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val hash = unbuilder.readField[Array[Byte]]("hash")
|
||||
unbuilder.endObject()
|
||||
FileHashArrayRepr(file, hash)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
|
||||
implicit def apply(file: File): HashFileInfo =
|
||||
FileHashArrayRepr(file.getAbsoluteFile, computeHash(file))
|
||||
def apply(file: File, bytes: Array[Byte]): HashFileInfo =
|
||||
FileHashArrayRepr(file.getAbsoluteFile, bytes)
|
||||
|
||||
private def computeHash(file: File): Array[Byte] =
|
||||
try Hash(file)
|
||||
catch { case NonFatal(_) => Array.empty }
|
||||
}
|
||||
|
||||
object lastModified extends Style {
|
||||
type F = ModifiedFileInfo
|
||||
|
||||
implicit val format: JsonFormat[ModifiedFileInfo] = new JsonFormat[ModifiedFileInfo] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): ModifiedFileInfo =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val lastModified = unbuilder.readField[Long]("lastModified")
|
||||
unbuilder.endObject()
|
||||
FileModified(file, lastModified)
|
||||
case None =>
|
||||
deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
|
||||
override def write[J](obj: ModifiedFileInfo, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("file", obj.file)
|
||||
builder.addField("lastModified", obj.lastModified)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
|
||||
implicit def apply(file: File): ModifiedFileInfo =
|
||||
FileModified(file.getAbsoluteFile, IO.getModifiedTimeOrZero(file))
|
||||
def apply(file: File, lastModified: Long): ModifiedFileInfo =
|
||||
FileModified(file.getAbsoluteFile, lastModified)
|
||||
}
|
||||
|
||||
object exists extends Style {
|
||||
type F = PlainFileInfo
|
||||
|
||||
implicit val format: JsonFormat[PlainFileInfo] = new JsonFormat[PlainFileInfo] {
|
||||
def write[J](obj: PlainFileInfo, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("file", obj.file)
|
||||
builder.addField("exists", obj.exists)
|
||||
builder.endObject()
|
||||
}
|
||||
|
||||
def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]) = jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val file = unbuilder.readField[File]("file")
|
||||
val exists = unbuilder.readField[Boolean]("exists")
|
||||
unbuilder.endObject()
|
||||
PlainFile(file, exists)
|
||||
case None => deserializationError("Expected JsObject but found None")
|
||||
}
|
||||
}
|
||||
|
||||
implicit def apply(file: File): PlainFileInfo = {
|
||||
val abs = file.getAbsoluteFile
|
||||
PlainFile(abs, abs.exists)
|
||||
}
|
||||
def apply(file: File, exists: Boolean): PlainFileInfo = {
|
||||
val abs = file.getAbsoluteFile
|
||||
PlainFile(abs, exists)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
package sbt.util
|
||||
|
||||
import java.io.{ Closeable, InputStream }
|
||||
import scala.util.control.NonFatal
|
||||
import sjsonnew.{ IsoString, JsonReader, SupportConverter }
|
||||
import sbt.io.{ IO, Using }
|
||||
import sbt.internal.util.EmptyCacheError
|
||||
|
||||
trait Input extends Closeable {
|
||||
def read[T: JsonReader](): T
|
||||
def read[T: JsonReader](default: => T): T =
|
||||
try read[T]()
|
||||
catch { case NonFatal(_) => default }
|
||||
}
|
||||
|
||||
class PlainInput[J: IsoString](input: InputStream, converter: SupportConverter[J]) extends Input {
|
||||
val isoFormat: IsoString[J] = implicitly
|
||||
|
||||
private def readFully(): String = {
|
||||
Using.streamReader((input, IO.utf8)) { reader =>
|
||||
val builder = new StringBuilder()
|
||||
val bufferSize = 1024
|
||||
val buffer = new Array[Char](bufferSize)
|
||||
var read = 0
|
||||
while ({ read = reader.read(buffer, 0, bufferSize); read != -1 }) {
|
||||
builder.appendAll(buffer, 0, read)
|
||||
}
|
||||
builder.toString()
|
||||
}
|
||||
}
|
||||
|
||||
def read[T: JsonReader](): T = {
|
||||
val str = readFully()
|
||||
if (str == "") throw new EmptyCacheError()
|
||||
else converter.fromJson(isoFormat.from(str)).get
|
||||
}
|
||||
|
||||
def close() = input.close()
|
||||
}
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
package sbt.util
|
||||
|
||||
import java.io.{ Closeable, OutputStream }
|
||||
import sjsonnew.{ IsoString, JsonWriter, SupportConverter }
|
||||
import sbt.io.Using
|
||||
|
||||
trait Output extends Closeable {
|
||||
def write[T: JsonWriter](value: T): Unit
|
||||
}
|
||||
|
||||
class PlainOutput[J: IsoString](output: OutputStream, converter: SupportConverter[J])
|
||||
extends Output {
|
||||
val isoFormat: IsoString[J] = implicitly
|
||||
|
||||
def write[T: JsonWriter](value: T) = {
|
||||
val js = converter.toJson(value).get
|
||||
val asString = isoFormat.to(js)
|
||||
Using.bufferedOutputStream(output) { writer =>
|
||||
val out = new java.io.PrintWriter(writer)
|
||||
out.print(asString)
|
||||
out.flush()
|
||||
}
|
||||
}
|
||||
|
||||
def close() = output.close()
|
||||
}
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
import scala.util.Try
|
||||
|
||||
import sjsonnew.JsonFormat
|
||||
import sjsonnew.support.murmurhash.Hasher
|
||||
|
||||
import CacheImplicits._
|
||||
|
||||
/**
|
||||
* A cache that stores a single value.
|
||||
*/
|
||||
trait SingletonCache[A] {
|
||||
|
||||
/** Reads the cache from the backing `from`. */
|
||||
def read(from: Input): A
|
||||
|
||||
/** Writes `value` to the backing `to`. */
|
||||
def write(to: Output, value: A): Unit
|
||||
|
||||
}
|
||||
|
||||
object SingletonCache {
|
||||
|
||||
implicit def basicSingletonCache[A: JsonFormat]: SingletonCache[A] =
|
||||
new SingletonCache[A] {
|
||||
override def read(from: Input): A = from.read[A]
|
||||
override def write(to: Output, value: A) = to.write(value)
|
||||
}
|
||||
|
||||
/** A lazy `SingletonCache` */
|
||||
def lzy[A: JsonFormat](mkCache: => SingletonCache[A]): SingletonCache[A] =
|
||||
new SingletonCache[A] {
|
||||
lazy val cache = mkCache
|
||||
override def read(from: Input): A = cache.read(from)
|
||||
override def write(to: Output, value: A) = cache.write(to, value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple key-value cache.
|
||||
*/
|
||||
class BasicCache[I: JsonFormat, O: JsonFormat] extends Cache[I, O] {
|
||||
private val singletonCache: SingletonCache[(Long, O)] = implicitly
|
||||
val jsonFormat: JsonFormat[I] = implicitly
|
||||
override def apply(store: CacheStore)(key: I): CacheResult[O] = {
|
||||
val keyHash: Long = Hasher.hashUnsafe[I](key).toLong
|
||||
Try {
|
||||
val (previousKeyHash, previousValue) = singletonCache.read(store)
|
||||
if (keyHash == previousKeyHash) Hit(previousValue)
|
||||
else Miss(update(store)(keyHash))
|
||||
} getOrElse Miss(update(store)(keyHash))
|
||||
}
|
||||
|
||||
private def update(store: CacheStore)(keyHash: Long) = (value: O) => {
|
||||
singletonCache.write(store, (keyHash, value))
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
package sbt.util
|
||||
|
||||
import scala.reflect.Manifest
|
||||
|
||||
import sjsonnew.{ BasicJsonProtocol, Builder, deserializationError, JsonFormat, Unbuilder }
|
||||
|
||||
object StampedFormat extends BasicJsonProtocol {
|
||||
|
||||
def apply[T](format: JsonFormat[T])(implicit mf: Manifest[JsonFormat[T]]): JsonFormat[T] = {
|
||||
withStamp(stamp(format))(format)
|
||||
}
|
||||
|
||||
def withStamp[T, S](stamp: S)(format: JsonFormat[T])(
|
||||
implicit formatStamp: JsonFormat[S],
|
||||
equivStamp: Equiv[S]
|
||||
): JsonFormat[T] =
|
||||
new JsonFormat[T] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): T =
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
val stampedLength = unbuilder.beginArray(js)
|
||||
if (stampedLength != 2)
|
||||
sys.error(s"Expected JsArray of size 2, found JsArray of size $stampedLength.")
|
||||
val readStamp = unbuilder.nextElement
|
||||
val readValue = unbuilder.nextElement
|
||||
val actualStamp = formatStamp.read(Some(readStamp), unbuilder)
|
||||
if (equivStamp.equiv(actualStamp, stamp)) format.read(Some(readValue), unbuilder)
|
||||
else sys.error(s"Incorrect stamp. Expected: $stamp, Found: $readStamp")
|
||||
|
||||
case None =>
|
||||
deserializationError("Expected JsArray but found None.")
|
||||
}
|
||||
|
||||
override def write[J](obj: T, builder: Builder[J]): Unit = {
|
||||
builder.beginArray()
|
||||
formatStamp.write(stamp, builder)
|
||||
format.write(obj, builder)
|
||||
builder.endArray()
|
||||
}
|
||||
}
|
||||
|
||||
private def stamp[T](format: JsonFormat[T])(implicit mf: Manifest[JsonFormat[T]]): Int =
|
||||
typeHash(mf)
|
||||
|
||||
private def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
package sbt.util
|
||||
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
|
||||
import CacheImplicits._
|
||||
|
||||
import sjsonnew.IsoString
|
||||
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
|
||||
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
import org.scalatest.FlatSpec
|
||||
|
||||
class CacheSpec extends FlatSpec {
|
||||
|
||||
implicit val isoString: IsoString[JValue] =
|
||||
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
|
||||
|
||||
"A cache" should "NOT throw an exception if read without being written previously" in {
|
||||
testCache[String, Int] {
|
||||
case (cache, store) =>
|
||||
cache(store)("missing") match {
|
||||
case Hit(_) => fail
|
||||
case Miss(_) => ()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it should "write a very simple value" in {
|
||||
testCache[String, Int] {
|
||||
case (cache, store) =>
|
||||
cache(store)("missing") match {
|
||||
case Hit(_) => fail
|
||||
case Miss(update) => update(5)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it should "be updatable" in {
|
||||
testCache[String, Int] {
|
||||
case (cache, store) =>
|
||||
val value = 5
|
||||
cache(store)("someKey") match {
|
||||
case Hit(_) => fail
|
||||
case Miss(update) => update(value)
|
||||
}
|
||||
|
||||
cache(store)("someKey") match {
|
||||
case Hit(read) => assert(read === value); ()
|
||||
case Miss(_) => fail
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it should "return the value that has been previously written" in {
|
||||
testCache[String, Int] {
|
||||
case (cache, store) =>
|
||||
val key = "someKey"
|
||||
val value = 5
|
||||
cache(store)(key) match {
|
||||
case Hit(_) => fail
|
||||
case Miss(update) => update(value)
|
||||
}
|
||||
|
||||
cache(store)(key) match {
|
||||
case Hit(read) => assert(read === value); ()
|
||||
case Miss(_) => fail
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private def testCache[K, V](f: (Cache[K, V], CacheStore) => Unit)(
|
||||
implicit cache: Cache[K, V]
|
||||
): Unit =
|
||||
IO.withTemporaryDirectory { tmp =>
|
||||
val store = new FileBasedStore(tmp / "cache-store", Converter)
|
||||
f(cache, store)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
package sbt.util
|
||||
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe._
|
||||
import sjsonnew._, support.scalajson.unsafe._
|
||||
import org.scalatest.FlatSpec
|
||||
import sbt.io.IO
|
||||
|
||||
class FileInfoSpec extends FlatSpec {
|
||||
val file = new java.io.File(".").getAbsoluteFile
|
||||
val fileInfo: ModifiedFileInfo = FileModified(file, IO.getModifiedTimeOrZero(file))
|
||||
val filesInfo = FilesInfo(Set(fileInfo))
|
||||
|
||||
it should "round trip" in assertRoundTrip(filesInfo)
|
||||
|
||||
def assertRoundTrip[A: JsonWriter: JsonReader](x: A) = {
|
||||
val jsonString: String = toJsonString(x)
|
||||
val jValue: JValue = Parser.parseUnsafe(jsonString)
|
||||
val y: A = Converter.fromJson[A](jValue).get
|
||||
assert(x === y)
|
||||
}
|
||||
|
||||
def assertJsonString[A: JsonWriter](x: A, s: String) = assert(toJsonString(x) === s)
|
||||
|
||||
def toJsonString[A: JsonWriter](x: A): String = CompactPrinter(Converter.toJson(x).get)
|
||||
}
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
package sbt.util
|
||||
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
|
||||
import CacheImplicits._
|
||||
|
||||
import sjsonnew.{ Builder, deserializationError, IsoString, JsonFormat, Unbuilder }
|
||||
import sjsonnew.support.scalajson.unsafe.{ CompactPrinter, Converter, Parser }
|
||||
|
||||
import sjsonnew.shaded.scalajson.ast.unsafe.JValue
|
||||
import org.scalatest.FlatSpec
|
||||
|
||||
class SingletonCacheSpec extends FlatSpec {
|
||||
|
||||
case class ComplexType(val x: Int, y: String, z: List[Int])
|
||||
object ComplexType {
|
||||
implicit val format: JsonFormat[ComplexType] =
|
||||
new JsonFormat[ComplexType] {
|
||||
override def read[J](jsOpt: Option[J], unbuilder: Unbuilder[J]): ComplexType = {
|
||||
jsOpt match {
|
||||
case Some(js) =>
|
||||
unbuilder.beginObject(js)
|
||||
val x = unbuilder.readField[Int]("x")
|
||||
val y = unbuilder.readField[String]("y")
|
||||
val z = unbuilder.readField[List[Int]]("z")
|
||||
unbuilder.endObject()
|
||||
ComplexType(x, y, z)
|
||||
|
||||
case None =>
|
||||
deserializationError("Exception JObject but found None")
|
||||
}
|
||||
}
|
||||
|
||||
override def write[J](obj: ComplexType, builder: Builder[J]): Unit = {
|
||||
builder.beginObject()
|
||||
builder.addField("x", obj.x)
|
||||
builder.addField("y", obj.y)
|
||||
builder.addField("z", obj.z)
|
||||
builder.endObject()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
implicit val isoString: IsoString[JValue] =
|
||||
IsoString.iso(CompactPrinter.apply, Parser.parseUnsafe)
|
||||
|
||||
"A singleton cache" should "throw an exception if read without being written previously" in {
|
||||
testCache[Int] {
|
||||
case (cache, store) =>
|
||||
intercept[Exception] {
|
||||
cache.read(store)
|
||||
}
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
it should "write a very simple value" in {
|
||||
testCache[Int] {
|
||||
case (cache, store) =>
|
||||
cache.write(store, 5)
|
||||
}
|
||||
}
|
||||
|
||||
it should "return the simple value that has been previously written" in {
|
||||
testCache[Int] {
|
||||
case (cache, store) =>
|
||||
val value = 5
|
||||
cache.write(store, value)
|
||||
val read = cache.read(store)
|
||||
|
||||
assert(read === value); ()
|
||||
}
|
||||
}
|
||||
|
||||
it should "write a complex value" in {
|
||||
testCache[ComplexType] {
|
||||
case (cache, store) =>
|
||||
val value = ComplexType(1, "hello, world!", (1 to 10 by 3).toList)
|
||||
cache.write(store, value)
|
||||
val read = cache.read(store)
|
||||
|
||||
assert(read === value); ()
|
||||
}
|
||||
}
|
||||
|
||||
private def testCache[T](f: (SingletonCache[T], CacheStore) => Unit)(
|
||||
implicit cache: SingletonCache[T]
|
||||
): Unit =
|
||||
IO.withTemporaryDirectory { tmp =>
|
||||
val store = new FileBasedStore(tmp / "cache-store", Converter)
|
||||
f(cache, store)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
Simple Build Tool: Tracking Component
|
||||
Copyright 2009, 2010 Mark Harrah
|
||||
Licensed under BSD-style license (see LICENSE)
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
object ChangeReport {
|
||||
def modified[T](files: Set[T]): ChangeReport[T] =
|
||||
new EmptyChangeReport[T] {
|
||||
override def checked = files
|
||||
override def modified = files
|
||||
override def markAllModified = this
|
||||
}
|
||||
|
||||
def unmodified[T](files: Set[T]): ChangeReport[T] =
|
||||
new EmptyChangeReport[T] {
|
||||
override def checked = files
|
||||
override def unmodified = files
|
||||
}
|
||||
}
|
||||
|
||||
/** The result of comparing some current set of objects against a previous set of objects.*/
|
||||
trait ChangeReport[T] {
|
||||
|
||||
/** The set of all of the objects in the current set.*/
|
||||
def checked: Set[T]
|
||||
|
||||
/** All of the objects that are in the same state in the current and reference sets.*/
|
||||
def unmodified: Set[T]
|
||||
|
||||
/**
|
||||
* All checked objects that are not in the same state as the reference. This includes objects that are in both
|
||||
* sets but have changed and files that are only in one set.
|
||||
*/
|
||||
def modified: Set[T] // all changes, including added
|
||||
|
||||
/** All objects that are only in the current set.*/
|
||||
def added: Set[T]
|
||||
|
||||
/** All objects only in the previous set*/
|
||||
def removed: Set[T]
|
||||
def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other)
|
||||
|
||||
/**
|
||||
* Generate a new report with this report's unmodified set included in the new report's modified set. The new report's
|
||||
* unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report.
|
||||
*/
|
||||
def markAllModified: ChangeReport[T] =
|
||||
new ChangeReport[T] {
|
||||
def checked = ChangeReport.this.checked
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = ChangeReport.this.checked
|
||||
def added = ChangeReport.this.added
|
||||
def removed = ChangeReport.this.removed
|
||||
override def markAllModified = this
|
||||
}
|
||||
|
||||
override def toString = {
|
||||
val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed")
|
||||
val sets = List(checked, modified, unmodified, added, removed)
|
||||
val keyValues = labels.zip(sets).map { case (label, set) => label + ": " + set.mkString(", ") }
|
||||
keyValues.mkString("Change report:\n\t", "\n\t", "")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class EmptyChangeReport[T] extends ChangeReport[T] {
|
||||
def checked = Set.empty[T]
|
||||
def unmodified = Set.empty[T]
|
||||
def modified = Set.empty[T]
|
||||
def added = Set.empty[T]
|
||||
def removed = Set.empty[T]
|
||||
override def toString = "No changes"
|
||||
}
|
||||
|
||||
private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T])
|
||||
extends ChangeReport[T] {
|
||||
lazy val checked = a.checked ++ b.checked
|
||||
lazy val unmodified = a.unmodified ++ b.unmodified
|
||||
lazy val modified = a.modified ++ b.modified
|
||||
lazy val added = a.added ++ b.added
|
||||
lazy val removed = a.removed ++ b.removed
|
||||
}
|
||||
|
|
@ -0,0 +1,151 @@
|
|||
package sbt.util
|
||||
|
||||
import java.io.File
|
||||
|
||||
object FileFunction {
|
||||
type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File]
|
||||
private val defaultInStyle = FileInfo.lastModified
|
||||
private val defaultOutStyle = FileInfo.exists
|
||||
|
||||
/**
|
||||
* Generic change-detection helper used to help build / artifact generation /
|
||||
* etc. steps detect whether or not they need to run. Returns a function whose
|
||||
* input is a Set of input files, and subsequently executes the action function
|
||||
* (which does the actual work: compiles, generates resources, etc.), returning
|
||||
* a Set of output files that it generated.
|
||||
*
|
||||
* The input file and resulting output file state is cached in stores issued by
|
||||
* `storeFactory`. On each invocation, the state of the input and output
|
||||
* files from the previous run is compared against the cache, as is the set of
|
||||
* input files. If a change in file state / input files set is detected, the
|
||||
* action function is re-executed.
|
||||
*
|
||||
* @param cacheBaseDirectory The folder in which to store
|
||||
* @param action The work function, which receives a list of input files and returns a list of output files
|
||||
*/
|
||||
def cached(cacheBaseDirectory: File)(action: Set[File] => Set[File]): Set[File] => Set[File] =
|
||||
cached(cacheBaseDirectory, inStyle = defaultInStyle, outStyle = defaultOutStyle)(action)
|
||||
|
||||
/**
|
||||
* Generic change-detection helper used to help build / artifact generation /
|
||||
* etc. steps detect whether or not they need to run. Returns a function whose
|
||||
* input is a Set of input files, and subsequently executes the action function
|
||||
* (which does the actual work: compiles, generates resources, etc.), returning
|
||||
* a Set of output files that it generated.
|
||||
*
|
||||
* The input file and resulting output file state is cached in stores issued by
|
||||
* `storeFactory`. On each invocation, the state of the input and output
|
||||
* files from the previous run is compared against the cache, as is the set of
|
||||
* input files. If a change in file state / input files set is detected, the
|
||||
* action function is re-executed.
|
||||
*
|
||||
* @param cacheBaseDirectory The folder in which to store
|
||||
* @param inStyle The strategy by which to detect state change in the input files from the previous run
|
||||
* @param action The work function, which receives a list of input files and returns a list of output files
|
||||
*/
|
||||
def cached(cacheBaseDirectory: File, inStyle: FileInfo.Style)(
|
||||
action: Set[File] => Set[File]
|
||||
): Set[File] => Set[File] =
|
||||
cached(cacheBaseDirectory, inStyle = inStyle, outStyle = defaultOutStyle)(action)
|
||||
|
||||
/**
|
||||
* Generic change-detection helper used to help build / artifact generation /
|
||||
* etc. steps detect whether or not they need to run. Returns a function whose
|
||||
* input is a Set of input files, and subsequently executes the action function
|
||||
* (which does the actual work: compiles, generates resources, etc.), returning
|
||||
* a Set of output files that it generated.
|
||||
*
|
||||
* The input file and resulting output file state is cached in stores issued by
|
||||
* `storeFactory`. On each invocation, the state of the input and output
|
||||
* files from the previous run is compared against the cache, as is the set of
|
||||
* input files. If a change in file state / input files set is detected, the
|
||||
* action function is re-executed.
|
||||
*
|
||||
* @param cacheBaseDirectory The folder in which to store
|
||||
* @param inStyle The strategy by which to detect state change in the input files from the previous run
|
||||
* @param outStyle The strategy by which to detect state change in the output files from the previous run
|
||||
* @param action The work function, which receives a list of input files and returns a list of output files
|
||||
*/
|
||||
def cached(cacheBaseDirectory: File, inStyle: FileInfo.Style, outStyle: FileInfo.Style)(
|
||||
action: Set[File] => Set[File]
|
||||
): Set[File] => Set[File] =
|
||||
cached(CacheStoreFactory(cacheBaseDirectory), inStyle, outStyle)(
|
||||
(in, out) => action(in.checked)
|
||||
)
|
||||
|
||||
/**
|
||||
* Generic change-detection helper used to help build / artifact generation /
|
||||
* etc. steps detect whether or not they need to run. Returns a function whose
|
||||
* input is a Set of input files, and subsequently executes the action function
|
||||
* (which does the actual work: compiles, generates resources, etc.), returning
|
||||
* a Set of output files that it generated.
|
||||
*
|
||||
* The input file and resulting output file state is cached in stores issued by
|
||||
* `storeFactory`. On each invocation, the state of the input and output
|
||||
* files from the previous run is compared against the cache, as is the set of
|
||||
* input files. If a change in file state / input files set is detected, the
|
||||
* action function is re-executed.
|
||||
*
|
||||
* @param storeFactory The factory to use to get stores for the input and output files.
|
||||
* @param action The work function, which receives a list of input files and returns a list of output files
|
||||
*/
|
||||
def cached(storeFactory: CacheStoreFactory)(action: UpdateFunction): Set[File] => Set[File] =
|
||||
cached(storeFactory, inStyle = defaultInStyle, outStyle = defaultOutStyle)(action)
|
||||
|
||||
/**
|
||||
* Generic change-detection helper used to help build / artifact generation /
|
||||
* etc. steps detect whether or not they need to run. Returns a function whose
|
||||
* input is a Set of input files, and subsequently executes the action function
|
||||
* (which does the actual work: compiles, generates resources, etc.), returning
|
||||
* a Set of output files that it generated.
|
||||
*
|
||||
* The input file and resulting output file state is cached in stores issued by
|
||||
* `storeFactory`. On each invocation, the state of the input and output
|
||||
* files from the previous run is compared against the cache, as is the set of
|
||||
* input files. If a change in file state / input files set is detected, the
|
||||
* action function is re-executed.
|
||||
*
|
||||
* @param storeFactory The factory to use to get stores for the input and output files.
|
||||
* @param inStyle The strategy by which to detect state change in the input files from the previous run
|
||||
* @param action The work function, which receives a list of input files and returns a list of output files
|
||||
*/
|
||||
def cached(storeFactory: CacheStoreFactory, inStyle: FileInfo.Style)(
|
||||
action: UpdateFunction
|
||||
): Set[File] => Set[File] =
|
||||
cached(storeFactory, inStyle = inStyle, outStyle = defaultOutStyle)(action)
|
||||
|
||||
/**
|
||||
* Generic change-detection helper used to help build / artifact generation /
|
||||
* etc. steps detect whether or not they need to run. Returns a function whose
|
||||
* input is a Set of input files, and subsequently executes the action function
|
||||
* (which does the actual work: compiles, generates resources, etc.), returning
|
||||
* a Set of output files that it generated.
|
||||
*
|
||||
* The input file and resulting output file state is cached in stores issued by
|
||||
* `storeFactory`. On each invocation, the state of the input and output
|
||||
* files from the previous run is compared against the cache, as is the set of
|
||||
* input files. If a change in file state / input files set is detected, the
|
||||
* action function is re-executed.
|
||||
*
|
||||
* @param storeFactory The factory to use to get stores for the input and output files.
|
||||
* @param inStyle The strategy by which to detect state change in the input files from the previous run
|
||||
* @param outStyle The strategy by which to detect state change in the output files from the previous run
|
||||
* @param action The work function, which receives a list of input files and returns a list of output files
|
||||
*/
|
||||
def cached(storeFactory: CacheStoreFactory, inStyle: FileInfo.Style, outStyle: FileInfo.Style)(
|
||||
action: UpdateFunction
|
||||
): Set[File] => Set[File] = {
|
||||
lazy val inCache = Difference.inputs(storeFactory.make("in-cache"), inStyle)
|
||||
lazy val outCache = Difference.outputs(storeFactory.make("out-cache"), outStyle)
|
||||
inputs => {
|
||||
inCache(inputs) { inReport =>
|
||||
outCache { outReport =>
|
||||
if (inReport.modified.isEmpty && outReport.modified.isEmpty)
|
||||
outReport.checked
|
||||
else
|
||||
action(inReport, outReport)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,330 @@
|
|||
/* sbt -- Simple Build Tool
|
||||
* Copyright 2009, 2010 Mark Harrah
|
||||
*/
|
||||
package sbt.util
|
||||
|
||||
import scala.util.{ Failure, Try, Success }
|
||||
|
||||
import java.io.File
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
import sbt.internal.util.EmptyCacheError
|
||||
|
||||
import sjsonnew.JsonFormat
|
||||
import sjsonnew.support.murmurhash.Hasher
|
||||
|
||||
object Tracked {
|
||||
|
||||
/**
|
||||
* Creates a tracker that provides the last time it was evaluated.
|
||||
* If the function throws an exception.
|
||||
*/
|
||||
def tstamp(store: CacheStore): Timestamp = tstamp(store, true)
|
||||
|
||||
/**
|
||||
* Creates a tracker that provides the last time it was evaluated.
|
||||
* If the function throws an exception.
|
||||
*/
|
||||
def tstamp(cacheFile: File): Timestamp = tstamp(CacheStore(cacheFile))
|
||||
|
||||
/**
|
||||
* Creates a tracker that provides the last time it was evaluated.
|
||||
* If 'useStartTime' is true, the recorded time is the start of the evaluated function.
|
||||
* If 'useStartTime' is false, the recorded time is when the evaluated function completes.
|
||||
* In both cases, the timestamp is not updated if the function throws an exception.
|
||||
*/
|
||||
def tstamp(store: CacheStore, useStartTime: Boolean): Timestamp = {
|
||||
import CacheImplicits.LongJsonFormat
|
||||
new Timestamp(store, useStartTime)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a tracker that provides the last time it was evaluated.
|
||||
* If 'useStartTime' is true, the recorded time is the start of the evaluated function.
|
||||
* If 'useStartTime' is false, the recorded time is when the evaluated function completes.
|
||||
* In both cases, the timestamp is not updated if the function throws an exception.
|
||||
*/
|
||||
def tstamp(cacheFile: File, useStartTime: Boolean): Timestamp =
|
||||
tstamp(CacheStore(cacheFile), useStartTime)
|
||||
|
||||
/** Creates a tracker that provides the difference between a set of input files for successive invocations.*/
|
||||
def diffInputs(store: CacheStore, style: FileInfo.Style): Difference =
|
||||
Difference.inputs(store, style)
|
||||
|
||||
/** Creates a tracker that provides the difference between a set of input files for successive invocations.*/
|
||||
def diffInputs(cacheFile: File, style: FileInfo.Style): Difference =
|
||||
diffInputs(CacheStore(cacheFile), style)
|
||||
|
||||
/** Creates a tracker that provides the difference between a set of output files for successive invocations.*/
|
||||
def diffOutputs(store: CacheStore, style: FileInfo.Style): Difference =
|
||||
Difference.outputs(store, style)
|
||||
|
||||
/** Creates a tracker that provides the difference between a set of output files for successive invocations.*/
|
||||
def diffOutputs(cacheFile: File, style: FileInfo.Style): Difference =
|
||||
diffOutputs(CacheStore(cacheFile), style)
|
||||
|
||||
/** Creates a tracker that provides the output of the most recent invocation of the function */
|
||||
def lastOutput[I, O: JsonFormat](store: CacheStore)(f: (I, Option[O]) => O): I => O = { in =>
|
||||
val previous = Try { store.read[O] }.toOption
|
||||
val next = f(in, previous)
|
||||
store.write(next)
|
||||
next
|
||||
}
|
||||
|
||||
/** Creates a tracker that provides the output of the most recent invocation of the function */
|
||||
def lastOutput[I, O: JsonFormat](cacheFile: File)(f: (I, Option[O]) => O): I => O =
|
||||
lastOutput(CacheStore(cacheFile))(f)
|
||||
|
||||
/**
|
||||
* Creates a tracker that indicates whether the output returned from `p` has changed or not.
|
||||
*
|
||||
* {{{
|
||||
* val cachedTask = inputChanged(cache / "inputs") { (inChanged, in: Inputs) =>
|
||||
* Tracked.outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
|
||||
* if (inChanged || outChanged) {
|
||||
* doSomething(label, sources, classpath, outputDirectory, options, log)
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* cachedDoc(inputs)(() => exists(outputDirectory.allPaths.get.toSet))
|
||||
* }}}
|
||||
*/
|
||||
def outputChanged[A1: JsonFormat, A2](store: CacheStore)(
|
||||
f: (Boolean, A1) => A2
|
||||
): (() => A1) => A2 = p => {
|
||||
val cache: SingletonCache[Long] = {
|
||||
import CacheImplicits.LongJsonFormat
|
||||
implicitly
|
||||
}
|
||||
val initial = p()
|
||||
val help = new CacheHelp(cache)
|
||||
val changed = help.changed(store, initial)
|
||||
val result = f(changed, initial)
|
||||
if (changed) {
|
||||
help.save(store, p())
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a tracker that indicates whether the output returned from `p` has changed or not.
|
||||
*
|
||||
* {{{
|
||||
* val cachedTask = inputChanged(cache / "inputs") { (inChanged, in: Inputs) =>
|
||||
* Tracked.outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
|
||||
* if (inChanged || outChanged) {
|
||||
* doSomething(label, sources, classpath, outputDirectory, options, log)
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* cachedDoc(inputs)(() => exists(outputDirectory.allPaths.get.toSet))
|
||||
* }}}
|
||||
*/
|
||||
def outputChanged[A1: JsonFormat, A2](cacheFile: File)(f: (Boolean, A1) => A2): (() => A1) => A2 =
|
||||
outputChanged[A1, A2](CacheStore(cacheFile))(f)
|
||||
|
||||
/**
|
||||
* Creates a tracker that indicates whether the arguments given to f have changed since the most
|
||||
* recent invocation.
|
||||
*
|
||||
* {{{
|
||||
* val cachedTask = inputChanged(cache / "inputs") { (inChanged, in: Inputs) =>
|
||||
* Tracked.outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
|
||||
* if (inChanged || outChanged) {
|
||||
* doSomething(label, sources, classpath, outputDirectory, options, log)
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* cachedDoc(inputs)(() => exists(outputDirectory.allPaths.get.toSet))
|
||||
* }}}
|
||||
*/
|
||||
def inputChanged[I: JsonFormat: SingletonCache, O](store: CacheStore)(
|
||||
f: (Boolean, I) => O
|
||||
): I => O = { in =>
|
||||
val cache: SingletonCache[Long] = {
|
||||
import CacheImplicits.LongJsonFormat
|
||||
implicitly
|
||||
}
|
||||
val help = new CacheHelp(cache)
|
||||
val changed = help.changed(store, in)
|
||||
val result = f(changed, in)
|
||||
if (changed)
|
||||
help.save(store, in)
|
||||
result
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a tracker that indicates whether the arguments given to f have changed since the most
|
||||
* recent invocation.
|
||||
*
|
||||
* {{{
|
||||
* val cachedTask = inputChanged(cache / "inputs") { (inChanged, in: Inputs) =>
|
||||
* Tracked.outputChanged(cache / "output") { (outChanged, outputs: FilesInfo[PlainFileInfo]) =>
|
||||
* if (inChanged || outChanged) {
|
||||
* doSomething(label, sources, classpath, outputDirectory, options, log)
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* cachedDoc(inputs)(() => exists(outputDirectory.allPaths.get.toSet))
|
||||
* }}}
|
||||
*/
|
||||
def inputChanged[I: JsonFormat: SingletonCache, O](cacheFile: File)(
|
||||
f: (Boolean, I) => O
|
||||
): I => O =
|
||||
inputChanged(CacheStore(cacheFile))(f)
|
||||
|
||||
private final class CacheHelp[I: JsonFormat](val sc: SingletonCache[Long]) {
|
||||
import CacheImplicits.implicitHashWriter
|
||||
import CacheImplicits.LongJsonFormat
|
||||
def save(store: CacheStore, value: I): Unit = {
|
||||
Hasher.hash(value) match {
|
||||
case Success(keyHash) => store.write[Long](keyHash.toLong)
|
||||
case Failure(e) =>
|
||||
if (isStrictMode) throw e
|
||||
else ()
|
||||
}
|
||||
}
|
||||
|
||||
def changed(store: CacheStore, value: I): Boolean =
|
||||
Try { store.read[Long] } match {
|
||||
case Success(prev: Long) =>
|
||||
Hasher.hash(value) match {
|
||||
case Success(keyHash: Int) => keyHash.toLong != prev
|
||||
case Failure(e) =>
|
||||
if (isStrictMode) throw e
|
||||
else true
|
||||
}
|
||||
case Failure(_: EmptyCacheError) => true
|
||||
case Failure(e) =>
|
||||
if (isStrictMode) throw e
|
||||
else true
|
||||
}
|
||||
}
|
||||
|
||||
private[sbt] def isStrictMode: Boolean =
|
||||
java.lang.Boolean.getBoolean("sbt.strict")
|
||||
}
|
||||
|
||||
trait Tracked {
|
||||
|
||||
/** Cleans outputs and clears the cache.*/
|
||||
def clean(): Unit
|
||||
|
||||
}
|
||||
|
||||
class Timestamp(val store: CacheStore, useStartTime: Boolean)(implicit format: JsonFormat[Long])
|
||||
extends Tracked {
|
||||
def clean() = store.delete()
|
||||
|
||||
/**
|
||||
* Reads the previous timestamp, evaluates the provided function,
|
||||
* and then updates the timestamp if the function completes normally.
|
||||
*/
|
||||
def apply[T](f: Long => T): T = {
|
||||
val start = now()
|
||||
val result = f(readTimestamp)
|
||||
store.write(if (useStartTime) start else now())
|
||||
result
|
||||
}
|
||||
|
||||
private def now() = System.currentTimeMillis
|
||||
|
||||
def readTimestamp: Long =
|
||||
Try { store.read[Long] } getOrElse 0
|
||||
}
|
||||
|
||||
@deprecated("Use Tracked.inputChanged and Tracked.outputChanged instead", "1.0.1")
|
||||
class Changed[O: Equiv: JsonFormat](val store: CacheStore) extends Tracked {
|
||||
def clean() = store.delete()
|
||||
|
||||
def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value => {
|
||||
if (uptodate(value))
|
||||
ifUnchanged(value)
|
||||
else {
|
||||
update(value)
|
||||
ifChanged(value)
|
||||
}
|
||||
}
|
||||
|
||||
def update(value: O): Unit =
|
||||
store.write(value) //Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value))
|
||||
|
||||
def uptodate(value: O): Boolean = {
|
||||
val equiv: Equiv[O] = implicitly
|
||||
equiv.equiv(value, store.read[O])
|
||||
}
|
||||
}
|
||||
|
||||
object Difference {
|
||||
def constructor(
|
||||
defineClean: Boolean,
|
||||
filesAreOutputs: Boolean
|
||||
): (CacheStore, FileInfo.Style) => Difference =
|
||||
(store, style) => new Difference(store, style, defineClean, filesAreOutputs)
|
||||
|
||||
/**
|
||||
* Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice:
|
||||
* before and after running the function.
|
||||
*/
|
||||
val outputs = constructor(true, true)
|
||||
|
||||
/**
|
||||
* Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the
|
||||
* hash/last modified time of the files as they were prior to running the function.
|
||||
*/
|
||||
val inputs = constructor(false, false)
|
||||
|
||||
}
|
||||
|
||||
class Difference(
|
||||
val store: CacheStore,
|
||||
val style: FileInfo.Style,
|
||||
val defineClean: Boolean,
|
||||
val filesAreOutputs: Boolean
|
||||
) extends Tracked {
|
||||
def clean() = {
|
||||
if (defineClean) IO.delete(raw(cachedFilesInfo)) else ()
|
||||
clearCache()
|
||||
}
|
||||
|
||||
private def clearCache() = store.delete()
|
||||
|
||||
private def cachedFilesInfo = store.read(default = FilesInfo.empty[style.F])(style.formats).files
|
||||
private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file)
|
||||
|
||||
def apply[T](files: Set[File])(f: ChangeReport[File] => T): T = {
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(files, lastFilesInfo)(f)(_ => files)
|
||||
}
|
||||
|
||||
def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T = {
|
||||
val lastFilesInfo = cachedFilesInfo
|
||||
apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles)
|
||||
}
|
||||
|
||||
private def abs(files: Set[File]) = files.map(_.getAbsoluteFile)
|
||||
|
||||
private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(
|
||||
f: ChangeReport[File] => T
|
||||
)(extractFiles: T => Set[File]): T = {
|
||||
val lastFiles = raw(lastFilesInfo)
|
||||
val currentFiles = abs(files)
|
||||
val currentFilesInfo = style(currentFiles)
|
||||
|
||||
val report = new ChangeReport[File] {
|
||||
lazy val checked = currentFiles
|
||||
lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist.
|
||||
lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist.
|
||||
lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added
|
||||
lazy val unmodified = checked -- modified
|
||||
}
|
||||
|
||||
val result = f(report)
|
||||
val info = if (filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo
|
||||
|
||||
store.write(info)(style.formats)
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,180 @@
|
|||
package sbt.util
|
||||
|
||||
import org.scalatest.FlatSpec
|
||||
import sbt.io.IO
|
||||
import sbt.io.syntax._
|
||||
import sbt.util.CacheImplicits._
|
||||
|
||||
import scala.concurrent.Promise
|
||||
|
||||
class TrackedSpec extends FlatSpec {
|
||||
"lastOutput" should "store the last output" in {
|
||||
withStore { store =>
|
||||
val value = 5
|
||||
val otherValue = 10
|
||||
|
||||
val res0 =
|
||||
Tracked.lastOutput[Int, Int](store) {
|
||||
case (in, None) =>
|
||||
assert(in === value)
|
||||
in
|
||||
case (_, Some(_)) =>
|
||||
fail()
|
||||
}(implicitly)(value)
|
||||
assert(res0 === value)
|
||||
|
||||
val res1 =
|
||||
Tracked.lastOutput[Int, Int](store) {
|
||||
case (_, None) =>
|
||||
fail()
|
||||
case (in, Some(read)) =>
|
||||
assert(in === otherValue)
|
||||
assert(read === value)
|
||||
read
|
||||
}(implicitly)(otherValue)
|
||||
assert(res1 === value)
|
||||
|
||||
val res2 =
|
||||
Tracked.lastOutput[Int, Int](store) {
|
||||
case (_, None) =>
|
||||
fail()
|
||||
case (in, Some(read)) =>
|
||||
assert(in === otherValue)
|
||||
assert(read === value)
|
||||
read
|
||||
}(implicitly)(otherValue)
|
||||
assert(res2 === value)
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
"inputChanged" should "detect that the input has not changed" in {
|
||||
withStore { store =>
|
||||
val input0 = "foo"
|
||||
|
||||
val res0 =
|
||||
Tracked.inputChanged[String, String](store) {
|
||||
case (true, in) =>
|
||||
assert(in === input0)
|
||||
in
|
||||
case (false, _) =>
|
||||
fail()
|
||||
}(implicitly, implicitly)(input0)
|
||||
assert(res0 === input0)
|
||||
|
||||
val res1 =
|
||||
Tracked.inputChanged[String, String](store) {
|
||||
case (true, _) =>
|
||||
fail()
|
||||
case (false, in) =>
|
||||
assert(in === input0)
|
||||
in
|
||||
}(implicitly, implicitly)(input0)
|
||||
assert(res1 === input0)
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
it should "detect that the input has changed" in {
|
||||
withStore { store =>
|
||||
val input0 = 0
|
||||
val input1 = 1
|
||||
|
||||
val res0 =
|
||||
Tracked.inputChanged[Int, Int](store) {
|
||||
case (true, in) =>
|
||||
assert(in === input0)
|
||||
in
|
||||
case (false, _) =>
|
||||
fail()
|
||||
}(implicitly, implicitly)(input0)
|
||||
assert(res0 === input0)
|
||||
|
||||
val res1 =
|
||||
Tracked.inputChanged[Int, Int](store) {
|
||||
case (true, in) =>
|
||||
assert(in === input1)
|
||||
in
|
||||
case (false, _) =>
|
||||
fail()
|
||||
}(implicitly, implicitly)(input1)
|
||||
assert(res1 === input1)
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
"outputChanged" should "detect that the output has not changed" in {
|
||||
withStore { store =>
|
||||
val beforeCompletion: String = "before-completion"
|
||||
val afterCompletion: String = "after-completion"
|
||||
val sideEffectCompleted = Promise[Unit]
|
||||
val p0: () => String = () => {
|
||||
if (sideEffectCompleted.isCompleted) {
|
||||
afterCompletion
|
||||
} else {
|
||||
sideEffectCompleted.success(())
|
||||
beforeCompletion
|
||||
}
|
||||
}
|
||||
val firstExpectedResult = "first-result"
|
||||
val secondExpectedResult = "second-result"
|
||||
|
||||
val res0 =
|
||||
Tracked.outputChanged[String, String](store) {
|
||||
case (true, in) =>
|
||||
assert(in === beforeCompletion)
|
||||
firstExpectedResult
|
||||
case (false, _) =>
|
||||
fail()
|
||||
}(implicitly)(p0)
|
||||
assert(res0 === firstExpectedResult)
|
||||
|
||||
val res1 =
|
||||
Tracked.outputChanged[String, String](store) {
|
||||
case (true, _) =>
|
||||
fail()
|
||||
case (false, in) =>
|
||||
assert(in === afterCompletion)
|
||||
secondExpectedResult
|
||||
}(implicitly)(p0)
|
||||
assert(res1 === secondExpectedResult)
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
"tstamp tracker" should "have a timestamp of 0 on first invocation" in {
|
||||
withStore { store =>
|
||||
Tracked.tstamp(store) { last =>
|
||||
assert(last === 0)
|
||||
}
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
it should "provide the last time a function has been evaluated" in {
|
||||
withStore { store =>
|
||||
Tracked.tstamp(store) { last =>
|
||||
assert(last === 0)
|
||||
}
|
||||
|
||||
Tracked.tstamp(store) { last =>
|
||||
val difference = System.currentTimeMillis - last
|
||||
assert(difference < 1000)
|
||||
}
|
||||
|
||||
()
|
||||
}
|
||||
}
|
||||
|
||||
private def withStore(f: CacheStore => Unit): Unit =
|
||||
IO.withTemporaryDirectory { tmp =>
|
||||
val store = CacheStore(tmp / "cache-store")
|
||||
f(store)
|
||||
}
|
||||
|
||||
}
|
||||
Loading…
Reference in New Issue