From 4258189951a4fc42d08b51ee012c700290b6278f Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 1 May 2014 12:50:07 -0400 Subject: [PATCH] added scalariform --- cache/src/main/scala/sbt/Cache.scala | 459 +++--- cache/src/main/scala/sbt/CacheIO.scala | 73 +- cache/src/main/scala/sbt/FileInfo.scala | 176 +- cache/src/main/scala/sbt/SeparatedCache.scala | 105 +- .../src/main/scala/sbt/ChangeReport.scala | 123 +- .../tracking/src/main/scala/sbt/Tracked.scala | 352 ++-- .../main/scala/sbt/appmacro/ContextUtil.scala | 423 ++--- .../src/main/scala/sbt/appmacro/Convert.scala | 57 +- .../main/scala/sbt/appmacro/Instance.scala | 370 +++-- .../scala/sbt/appmacro/KListBuilder.scala | 113 +- .../scala/sbt/appmacro/MixedBuilder.scala | 23 +- .../scala/sbt/appmacro/TupleBuilder.scala | 81 +- .../scala/sbt/appmacro/TupleNBuilder.scala | 91 +- .../collection/src/main/scala/sbt/AList.scala | 391 +++-- .../src/main/scala/sbt/Attributes.scala | 293 ++-- .../src/main/scala/sbt/Classes.scala | 45 +- util/collection/src/main/scala/sbt/Dag.scala | 218 ++- .../collection/src/main/scala/sbt/HList.scala | 36 +- .../collection/src/main/scala/sbt/IDSet.scala | 72 +- .../collection/src/main/scala/sbt/INode.scala | 318 ++-- .../collection/src/main/scala/sbt/KList.scala | 79 +- util/collection/src/main/scala/sbt/PMap.scala | 170 +- .../collection/src/main/scala/sbt/Param.scala | 39 +- .../src/main/scala/sbt/Positions.scala | 8 +- .../src/main/scala/sbt/Settings.scala | 1124 +++++++------ util/collection/src/main/scala/sbt/Show.scala | 7 +- .../src/main/scala/sbt/Signal.scala | 148 +- .../src/main/scala/sbt/TypeFunctions.scala | 71 +- .../collection/src/main/scala/sbt/Types.scala | 9 +- util/collection/src/main/scala/sbt/Util.scala | 58 +- .../src/main/scala/sbt/LineReader.scala | 235 ++- .../main/scala/sbt/complete/Completions.scala | 243 ++- .../scala/sbt/complete/EditDistance.scala | 58 +- .../scala/sbt/complete/ExampleSource.scala | 61 +- .../src/main/scala/sbt/complete/History.scala | 63 +- .../scala/sbt/complete/HistoryCommands.scala | 115 +- .../scala/sbt/complete/JLineCompletion.scala | 281 ++-- .../src/main/scala/sbt/complete/Parser.scala | 1421 ++++++++--------- .../src/main/scala/sbt/complete/Parsers.scala | 372 +++-- .../scala/sbt/complete/ProcessError.scala | 53 +- .../scala/sbt/complete/TokenCompletions.scala | 57 +- .../main/scala/sbt/complete/TypeString.scala | 136 +- .../main/scala/sbt/complete/UpperBound.scala | 72 +- .../src/main/scala/sbt/ErrorHandling.scala | 59 +- .../control/src/main/scala/sbt/ExitHook.scala | 25 +- .../main/scala/sbt/MessageOnlyException.scala | 18 +- util/log/src/main/scala/sbt/BasicLogger.scala | 21 +- .../src/main/scala/sbt/BufferedLogger.scala | 169 +- .../src/main/scala/sbt/ConsoleLogger.scala | 305 ++-- util/log/src/main/scala/sbt/ConsoleOut.scala | 104 +- .../log/src/main/scala/sbt/FilterLogger.scala | 58 +- util/log/src/main/scala/sbt/FullLogger.scala | 49 +- .../src/main/scala/sbt/GlobalLogging.scala | 61 +- util/log/src/main/scala/sbt/Level.scala | 39 +- util/log/src/main/scala/sbt/LogEvent.scala | 7 +- util/log/src/main/scala/sbt/Logger.scala | 233 ++- .../log/src/main/scala/sbt/LoggerWriter.scala | 84 +- util/log/src/main/scala/sbt/MainLogging.scala | 81 +- util/log/src/main/scala/sbt/MultiLogger.scala | 83 +- util/log/src/main/scala/sbt/StackTrace.scala | 97 +- .../src/main/scala/sbt/logic/Logic.scala | 509 +++--- .../src/main/scala/sbt/InheritInput.scala | 19 +- util/process/src/main/scala/sbt/Process.scala | 349 ++-- .../src/main/scala/sbt/ProcessImpl.scala | 751 ++++----- util/process/src/main/scala/sbt/SyncVar.scala | 59 +- .../src/main/scala/sbt/Relation.scala | 255 +-- 66 files changed, 6007 insertions(+), 6127 deletions(-) diff --git a/cache/src/main/scala/sbt/Cache.scala b/cache/src/main/scala/sbt/Cache.scala index 725a103a8..c241394ba 100644 --- a/cache/src/main/scala/sbt/Cache.scala +++ b/cache/src/main/scala/sbt/Cache.scala @@ -3,271 +3,246 @@ */ package sbt -import sbinary.{CollectionTypes, DefaultProtocol, Format, Input, JavaFormats, Output => Out} -import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File, InputStream, OutputStream} -import java.net.{URI, URL} +import sbinary.{ CollectionTypes, DefaultProtocol, Format, Input, JavaFormats, Output => Out } +import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, File, InputStream, OutputStream } +import java.net.{ URI, URL } import Types.:+: -import DefaultProtocol.{asProduct2, asSingleton, BooleanFormat, ByteFormat, IntFormat, wrap} +import DefaultProtocol.{ asProduct2, asSingleton, BooleanFormat, ByteFormat, IntFormat, wrap } import scala.xml.NodeSeq -trait Cache[I,O] -{ - def apply(file: File)(i: I): Either[O, O => Unit] +trait Cache[I, O] { + def apply(file: File)(i: I): Either[O, O => Unit] } -trait SBinaryFormats extends CollectionTypes with JavaFormats -{ - implicit def urlFormat: Format[URL] = DefaultProtocol.UrlFormat - implicit def uriFormat: Format[URI] = DefaultProtocol.UriFormat +trait SBinaryFormats extends CollectionTypes with JavaFormats { + implicit def urlFormat: Format[URL] = DefaultProtocol.UrlFormat + implicit def uriFormat: Format[URI] = DefaultProtocol.UriFormat } -object Cache extends CacheImplicits -{ - def cache[I,O](implicit c: Cache[I,O]): Cache[I,O] = c +object Cache extends CacheImplicits { + def cache[I, O](implicit c: Cache[I, O]): Cache[I, O] = c - def cached[I,O](file: File)(f: I => O)(implicit cache: Cache[I,O]): I => O = - in => - cache(file)(in) match - { - case Left(value) => value - case Right(store) => - val out = f(in) - store(out) - out - } + def cached[I, O](file: File)(f: I => O)(implicit cache: Cache[I, O]): I => O = + in => + cache(file)(in) match { + case Left(value) => value + case Right(store) => + val out = f(in) + store(out) + out + } - def debug[I](label: String, c: InputCache[I]): InputCache[I] = - new InputCache[I] - { - type Internal = c.Internal - def convert(i: I) = c.convert(i) - def read(from: Input) = - { - val v = c.read(from) - println(label + ".read: " + v) - v - } - def write(to: Out, v: Internal) - { - println(label + ".write: " + v) - c.write(to, v) - } - def equiv: Equiv[Internal] = new Equiv[Internal] { - def equiv(a: Internal, b: Internal)= - { - val equ = c.equiv.equiv(a,b) - println(label + ".equiv(" + a + ", " + b +"): " + equ) - equ - } - } - } + def debug[I](label: String, c: InputCache[I]): InputCache[I] = + new InputCache[I] { + type Internal = c.Internal + def convert(i: I) = c.convert(i) + def read(from: Input) = + { + val v = c.read(from) + println(label + ".read: " + v) + v + } + def write(to: Out, v: Internal) { + println(label + ".write: " + v) + c.write(to, v) + } + def equiv: Equiv[Internal] = new Equiv[Internal] { + def equiv(a: Internal, b: Internal) = + { + val equ = c.equiv.equiv(a, b) + println(label + ".equiv(" + a + ", " + b + "): " + equ) + equ + } + } + } } trait CacheImplicits extends BasicCacheImplicits with SBinaryFormats with HListCacheImplicits with UnionImplicits -trait BasicCacheImplicits -{ - implicit def basicCache[I, O](implicit in: InputCache[I], outFormat: Format[O]): Cache[I,O] = - new BasicCache()(in, outFormat) - def basicInput[I](implicit eq: Equiv[I], fmt: Format[I]): InputCache[I] = InputCache.basicInputCache(fmt, eq) +trait BasicCacheImplicits { + implicit def basicCache[I, O](implicit in: InputCache[I], outFormat: Format[O]): Cache[I, O] = + new BasicCache()(in, outFormat) + def basicInput[I](implicit eq: Equiv[I], fmt: Format[I]): InputCache[I] = InputCache.basicInputCache(fmt, eq) - def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b } - - implicit def optInputCache[T](implicit t: InputCache[T]): InputCache[Option[T]] = - new InputCache[Option[T]] - { - type Internal = Option[t.Internal] - def convert(v: Option[T]): Internal = v.map(x => t.convert(x)) - def read(from: Input) = - { - val isDefined = BooleanFormat.reads(from) - if(isDefined) Some(t.read(from)) else None - } - def write(to: Out, j: Internal): Unit = - { - BooleanFormat.writes(to, j.isDefined) - j foreach { x => t.write(to, x) } - } - def equiv = optEquiv(t.equiv) - } - - def wrapEquiv[S,T](f: S => T)(implicit eqT: Equiv[T]): Equiv[S] = - new Equiv[S] { - def equiv(a: S, b: S) = - eqT.equiv( f(a), f(b) ) - } + def defaultEquiv[T]: Equiv[T] = new Equiv[T] { def equiv(a: T, b: T) = a == b } - implicit def optEquiv[T](implicit t: Equiv[T]): Equiv[Option[T]] = - new Equiv[Option[T]] { - def equiv(a: Option[T], b: Option[T]) = - (a,b) match - { - case (None, None) => true - case (Some(va), Some(vb)) => t.equiv(va, vb) - case _ => false - } - } - implicit def urlEquiv(implicit uriEq: Equiv[URI]): Equiv[URL] = wrapEquiv[URL, URI](_.toURI)(uriEq) - implicit def uriEquiv: Equiv[URI] = defaultEquiv - implicit def stringSetEquiv: Equiv[Set[String]] = defaultEquiv - implicit def stringMapEquiv: Equiv[Map[String, String]] = defaultEquiv + implicit def optInputCache[T](implicit t: InputCache[T]): InputCache[Option[T]] = + new InputCache[Option[T]] { + type Internal = Option[t.Internal] + def convert(v: Option[T]): Internal = v.map(x => t.convert(x)) + def read(from: Input) = + { + val isDefined = BooleanFormat.reads(from) + if (isDefined) Some(t.read(from)) else None + } + def write(to: Out, j: Internal): Unit = + { + BooleanFormat.writes(to, j.isDefined) + j foreach { x => t.write(to, x) } + } + def equiv = optEquiv(t.equiv) + } - def streamFormat[T](write: (T, OutputStream) => Unit, f: InputStream => T): Format[T] = - { - val toBytes = (t: T) => { val bos = new ByteArrayOutputStream; write(t, bos); bos.toByteArray } - val fromBytes = (bs: Array[Byte]) => f(new ByteArrayInputStream(bs)) - wrap(toBytes, fromBytes)(DefaultProtocol.ByteArrayFormat) - } - - implicit def xmlInputCache(implicit strEq: InputCache[String]): InputCache[NodeSeq] = wrapIn[NodeSeq, String](_.toString, strEq) + def wrapEquiv[S, T](f: S => T)(implicit eqT: Equiv[T]): Equiv[S] = + new Equiv[S] { + def equiv(a: S, b: S) = + eqT.equiv(f(a), f(b)) + } - implicit def seqCache[T](implicit t: InputCache[T]): InputCache[Seq[T]] = - new InputCache[Seq[T]] - { - type Internal = Seq[t.Internal] - def convert(v: Seq[T]) = v.map(x => t.convert(x)) - def read(from: Input) = - { - val size = IntFormat.reads(from) - def next(left: Int, acc: List[t.Internal]): Internal = - if(left <= 0) acc.reverse else next(left - 1, t.read(from) :: acc) - next(size, Nil) - } - def write(to: Out, vs: Internal) - { - val size = vs.length - IntFormat.writes(to, size) - for(v <- vs) t.write(to, v) - } - def equiv: Equiv[Internal] = seqEquiv(t.equiv) - } + implicit def optEquiv[T](implicit t: Equiv[T]): Equiv[Option[T]] = + new Equiv[Option[T]] { + def equiv(a: Option[T], b: Option[T]) = + (a, b) match { + case (None, None) => true + case (Some(va), Some(vb)) => t.equiv(va, vb) + case _ => false + } + } + implicit def urlEquiv(implicit uriEq: Equiv[URI]): Equiv[URL] = wrapEquiv[URL, URI](_.toURI)(uriEq) + implicit def uriEquiv: Equiv[URI] = defaultEquiv + implicit def stringSetEquiv: Equiv[Set[String]] = defaultEquiv + implicit def stringMapEquiv: Equiv[Map[String, String]] = defaultEquiv - implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = - wrapEquiv( (x: Array[T]) => x :Seq[T] )(seqEquiv[T](t)) + def streamFormat[T](write: (T, OutputStream) => Unit, f: InputStream => T): Format[T] = + { + val toBytes = (t: T) => { val bos = new ByteArrayOutputStream; write(t, bos); bos.toByteArray } + val fromBytes = (bs: Array[Byte]) => f(new ByteArrayInputStream(bs)) + wrap(toBytes, fromBytes)(DefaultProtocol.ByteArrayFormat) + } - implicit def seqEquiv[T](implicit t: Equiv[T]): Equiv[Seq[T]] = - new Equiv[Seq[T]] - { - def equiv(a: Seq[T], b: Seq[T]) = - a.length == b.length && - ((a,b).zipped forall t.equiv) - } - implicit def seqFormat[T](implicit t: Format[T]): Format[Seq[T]] = - wrap[Seq[T], List[T]](_.toList, _.toSeq)(DefaultProtocol.listFormat) - - def wrapIn[I,J](implicit f: I => J, jCache: InputCache[J]): InputCache[I] = - new InputCache[I] - { - type Internal = jCache.Internal - def convert(i: I) = jCache.convert(f(i)) - def read(from: Input) = jCache.read(from) - def write(to: Out, j: Internal) = jCache.write(to, j) - def equiv = jCache.equiv - } + implicit def xmlInputCache(implicit strEq: InputCache[String]): InputCache[NodeSeq] = wrapIn[NodeSeq, String](_.toString, strEq) - def singleton[T](t: T): InputCache[T] = - basicInput(trueEquiv, asSingleton(t)) + implicit def seqCache[T](implicit t: InputCache[T]): InputCache[Seq[T]] = + new InputCache[Seq[T]] { + type Internal = Seq[t.Internal] + def convert(v: Seq[T]) = v.map(x => t.convert(x)) + def read(from: Input) = + { + val size = IntFormat.reads(from) + def next(left: Int, acc: List[t.Internal]): Internal = + if (left <= 0) acc.reverse else next(left - 1, t.read(from) :: acc) + next(size, Nil) + } + def write(to: Out, vs: Internal) { + val size = vs.length + IntFormat.writes(to, size) + for (v <- vs) t.write(to, v) + } + def equiv: Equiv[Internal] = seqEquiv(t.equiv) + } - def trueEquiv[T] = new Equiv[T] { def equiv(a: T, b: T) = true } + implicit def arrEquiv[T](implicit t: Equiv[T]): Equiv[Array[T]] = + wrapEquiv((x: Array[T]) => x: Seq[T])(seqEquiv[T](t)) + + implicit def seqEquiv[T](implicit t: Equiv[T]): Equiv[Seq[T]] = + new Equiv[Seq[T]] { + def equiv(a: Seq[T], b: Seq[T]) = + a.length == b.length && + ((a, b).zipped forall t.equiv) + } + implicit def seqFormat[T](implicit t: Format[T]): Format[Seq[T]] = + wrap[Seq[T], List[T]](_.toList, _.toSeq)(DefaultProtocol.listFormat) + + def wrapIn[I, J](implicit f: I => J, jCache: InputCache[J]): InputCache[I] = + new InputCache[I] { + type Internal = jCache.Internal + def convert(i: I) = jCache.convert(f(i)) + def read(from: Input) = jCache.read(from) + def write(to: Out, j: Internal) = jCache.write(to, j) + def equiv = jCache.equiv + } + + def singleton[T](t: T): InputCache[T] = + basicInput(trueEquiv, asSingleton(t)) + + def trueEquiv[T] = new Equiv[T] { def equiv(a: T, b: T) = true } } -trait HListCacheImplicits -{ - implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = - new InputCache[H :+: T] - { - type Internal = (head.Internal, tail.Internal) - def convert(in: H :+: T) = (head.convert(in.head), tail.convert(in.tail)) - def read(from: Input) = - { - val h = head.read(from) - val t = tail.read(from) - (h, t) - } - def write(to: Out, j: Internal) - { - head.write(to, j._1) - tail.write(to, j._2) - } - def equiv = new Equiv[Internal] - { - def equiv(a: Internal, b: Internal) = - head.equiv.equiv(a._1, b._1) && - tail.equiv.equiv(a._2, b._2) - } - } - - implicit def hNilCache: InputCache[HNil] = Cache.singleton(HNil : HNil) +trait HListCacheImplicits { + implicit def hConsCache[H, T <: HList](implicit head: InputCache[H], tail: InputCache[T]): InputCache[H :+: T] = + new InputCache[H :+: T] { + type Internal = (head.Internal, tail.Internal) + def convert(in: H :+: T) = (head.convert(in.head), tail.convert(in.tail)) + def read(from: Input) = + { + val h = head.read(from) + val t = tail.read(from) + (h, t) + } + def write(to: Out, j: Internal) { + head.write(to, j._1) + tail.write(to, j._2) + } + def equiv = new Equiv[Internal] { + def equiv(a: Internal, b: Internal) = + head.equiv.equiv(a._1, b._1) && + tail.equiv.equiv(a._2, b._2) + } + } - implicit def hConsFormat[H, T <: HList](implicit head: Format[H], tail: Format[T]): Format[H :+: T] = new Format[H :+: T] { - def reads(from: Input) = - { - val h = head.reads(from) - val t = tail.reads(from) - HCons(h, t) - } - def writes(to: Out, hc: H :+: T) - { - head.writes(to, hc.head) - tail.writes(to, hc.tail) - } - } + implicit def hNilCache: InputCache[HNil] = Cache.singleton(HNil: HNil) - implicit def hNilFormat: Format[HNil] = asSingleton(HNil) + implicit def hConsFormat[H, T <: HList](implicit head: Format[H], tail: Format[T]): Format[H :+: T] = new Format[H :+: T] { + def reads(from: Input) = + { + val h = head.reads(from) + val t = tail.reads(from) + HCons(h, t) + } + def writes(to: Out, hc: H :+: T) { + head.writes(to, hc.head) + tail.writes(to, hc.tail) + } + } + + implicit def hNilFormat: Format[HNil] = asSingleton(HNil) } -trait UnionImplicits -{ - def unionInputCache[UB, HL <: HList](implicit uc: UnionCache[HL, UB]): InputCache[UB] = - new InputCache[UB] - { - type Internal = Found[_] - def convert(in: UB) = uc.find(in) - def read(in: Input) = - { - val index = ByteFormat.reads(in) - val (cache, clazz) = uc.at(index) - val value = cache.read(in) - new Found[cache.Internal](cache, clazz, value, index) - } - def write(to: Out, i: Internal) - { - def write0[I](f: Found[I]) - { - ByteFormat.writes(to, f.index.toByte) - f.cache.write(to, f.value) - } - write0(i) - } - def equiv: Equiv[Internal] = new Equiv[Internal] - { - def equiv(a: Internal, b: Internal) = - { - if(a.clazz == b.clazz) - force(a.cache.equiv, a.value, b.value) - else - false - } - def force[T <: UB, UB](e: Equiv[T], a: UB, b: UB) = e.equiv(a.asInstanceOf[T], b.asInstanceOf[T]) - } - } +trait UnionImplicits { + def unionInputCache[UB, HL <: HList](implicit uc: UnionCache[HL, UB]): InputCache[UB] = + new InputCache[UB] { + type Internal = Found[_] + def convert(in: UB) = uc.find(in) + def read(in: Input) = + { + val index = ByteFormat.reads(in) + val (cache, clazz) = uc.at(index) + val value = cache.read(in) + new Found[cache.Internal](cache, clazz, value, index) + } + def write(to: Out, i: Internal) { + def write0[I](f: Found[I]) { + ByteFormat.writes(to, f.index.toByte) + f.cache.write(to, f.value) + } + write0(i) + } + def equiv: Equiv[Internal] = new Equiv[Internal] { + def equiv(a: Internal, b: Internal) = + { + if (a.clazz == b.clazz) + force(a.cache.equiv, a.value, b.value) + else + false + } + def force[T <: UB, UB](e: Equiv[T], a: UB, b: UB) = e.equiv(a.asInstanceOf[T], b.asInstanceOf[T]) + } + } - implicit def unionCons[H <: UB, UB, T <: HList](implicit head: InputCache[H], mf: Manifest[H], t: UnionCache[T, UB]): UnionCache[H :+: T, UB] = - new UnionCache[H :+: T, UB] - { - val size = 1 + t.size - def c = mf.runtimeClass - def find(value: UB): Found[_] = - if(c.isInstance(value)) new Found[head.Internal](head, c, head.convert(value.asInstanceOf[H]), size - 1) else t.find(value) - def at(i: Int): (InputCache[_ <: UB], Class[_]) = if(size == i + 1) (head, c) else t.at(i) - } + implicit def unionCons[H <: UB, UB, T <: HList](implicit head: InputCache[H], mf: Manifest[H], t: UnionCache[T, UB]): UnionCache[H :+: T, UB] = + new UnionCache[H :+: T, UB] { + val size = 1 + t.size + def c = mf.runtimeClass + def find(value: UB): Found[_] = + if (c.isInstance(value)) new Found[head.Internal](head, c, head.convert(value.asInstanceOf[H]), size - 1) else t.find(value) + def at(i: Int): (InputCache[_ <: UB], Class[_]) = if (size == i + 1) (head, c) else t.at(i) + } - implicit def unionNil[UB]: UnionCache[HNil, UB] = new UnionCache[HNil, UB] { - def size = 0 - def find(value: UB) = sys.error("No valid sum type for " + value) - def at(i: Int) = sys.error("Invalid union index " + i) - } + implicit def unionNil[UB]: UnionCache[HNil, UB] = new UnionCache[HNil, UB] { + def size = 0 + def find(value: UB) = sys.error("No valid sum type for " + value) + def at(i: Int) = sys.error("Invalid union index " + i) + } - final class Found[I](val cache: InputCache[_] { type Internal = I }, val clazz: Class[_], val value: I, val index: Int) - sealed trait UnionCache[HL <: HList, UB] - { - def size: Int - def at(i: Int): (InputCache[_ <: UB], Class[_]) - def find(forValue: UB): Found[_] - } + final class Found[I](val cache: InputCache[_] { type Internal = I }, val clazz: Class[_], val value: I, val index: Int) + sealed trait UnionCache[HL <: HList, UB] { + def size: Int + def at(i: Int): (InputCache[_ <: UB], Class[_]) + def find(forValue: UB): Found[_] + } } \ No newline at end of file diff --git a/cache/src/main/scala/sbt/CacheIO.scala b/cache/src/main/scala/sbt/CacheIO.scala index ac698c24e..a50da7ee7 100644 --- a/cache/src/main/scala/sbt/CacheIO.scala +++ b/cache/src/main/scala/sbt/CacheIO.scala @@ -3,43 +3,42 @@ */ package sbt -import java.io.{File, FileNotFoundException} -import sbinary.{DefaultProtocol, Format, Operations} +import java.io.{ File, FileNotFoundException } +import sbinary.{ DefaultProtocol, Format, Operations } import scala.reflect.Manifest -object CacheIO -{ - def toBytes[T](format: Format[T])(value: T)(implicit mf: Manifest[Format[T]]): Array[Byte] = - toBytes[T](value)(format, mf) - def toBytes[T](value: T)(implicit format: Format[T], mf: Manifest[Format[T]]): Array[Byte] = - Operations.toByteArray(value)(stampedFormat(format)) - def fromBytes[T](format: Format[T], default: => T)(bytes: Array[Byte])(implicit mf: Manifest[Format[T]]): T = - fromBytes(default)(bytes)(format, mf) - def fromBytes[T](default: => T)(bytes: Array[Byte])(implicit format: Format[T], mf: Manifest[Format[T]]): T = - if(bytes.isEmpty) default else Operations.fromByteArray(bytes)(stampedFormat(format)) - - def fromFile[T](format: Format[T], default: => T)(file: File)(implicit mf: Manifest[Format[T]]): T = - fromFile(file, default)(format, mf) - def fromFile[T](file: File, default: => T)(implicit format: Format[T], mf: Manifest[Format[T]]): T = - fromFile[T](file) getOrElse default - def fromFile[T](file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Option[T] = - try { Some( Operations.fromFile(file)(stampedFormat(format)) ) } - catch { case e: Exception => None } - - def toFile[T](format: Format[T])(value: T)(file: File)(implicit mf: Manifest[Format[T]]): Unit = - toFile(value)(file)(format, mf) - def toFile[T](value: T)(file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Unit = - { - IO.createDirectory(file.getParentFile) - Operations.toFile(value)(file)(stampedFormat(format)) - } - def stampedFormat[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Format[T] = - { - import DefaultProtocol._ - withStamp(stamp(format))(format) - } - def stamp[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Int = typeHash(mf) - def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode - def manifest[T](implicit mf: Manifest[T]): Manifest[T] = mf - def objManifest[T](t: T)(implicit mf: Manifest[T]): Manifest[T] = mf +object CacheIO { + def toBytes[T](format: Format[T])(value: T)(implicit mf: Manifest[Format[T]]): Array[Byte] = + toBytes[T](value)(format, mf) + def toBytes[T](value: T)(implicit format: Format[T], mf: Manifest[Format[T]]): Array[Byte] = + Operations.toByteArray(value)(stampedFormat(format)) + def fromBytes[T](format: Format[T], default: => T)(bytes: Array[Byte])(implicit mf: Manifest[Format[T]]): T = + fromBytes(default)(bytes)(format, mf) + def fromBytes[T](default: => T)(bytes: Array[Byte])(implicit format: Format[T], mf: Manifest[Format[T]]): T = + if (bytes.isEmpty) default else Operations.fromByteArray(bytes)(stampedFormat(format)) + + def fromFile[T](format: Format[T], default: => T)(file: File)(implicit mf: Manifest[Format[T]]): T = + fromFile(file, default)(format, mf) + def fromFile[T](file: File, default: => T)(implicit format: Format[T], mf: Manifest[Format[T]]): T = + fromFile[T](file) getOrElse default + def fromFile[T](file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Option[T] = + try { Some(Operations.fromFile(file)(stampedFormat(format))) } + catch { case e: Exception => None } + + def toFile[T](format: Format[T])(value: T)(file: File)(implicit mf: Manifest[Format[T]]): Unit = + toFile(value)(file)(format, mf) + def toFile[T](value: T)(file: File)(implicit format: Format[T], mf: Manifest[Format[T]]): Unit = + { + IO.createDirectory(file.getParentFile) + Operations.toFile(value)(file)(stampedFormat(format)) + } + def stampedFormat[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Format[T] = + { + import DefaultProtocol._ + withStamp(stamp(format))(format) + } + def stamp[T](format: Format[T])(implicit mf: Manifest[Format[T]]): Int = typeHash(mf) + def typeHash[T](implicit mf: Manifest[T]) = mf.toString.hashCode + def manifest[T](implicit mf: Manifest[T]): Manifest[T] = mf + def objManifest[T](t: T)(implicit mf: Manifest[T]): Manifest[T] = mf } \ No newline at end of file diff --git a/cache/src/main/scala/sbt/FileInfo.scala b/cache/src/main/scala/sbt/FileInfo.scala index e4706c1fa..c735adcb0 100644 --- a/cache/src/main/scala/sbt/FileInfo.scala +++ b/cache/src/main/scala/sbt/FileInfo.scala @@ -3,26 +3,22 @@ */ package sbt -import java.io.{File, IOException} -import sbinary.{DefaultProtocol, Format} +import java.io.{ File, IOException } +import sbinary.{ DefaultProtocol, Format } import DefaultProtocol._ import scala.reflect.Manifest -sealed trait FileInfo extends NotNull -{ - val file: File +sealed trait FileInfo extends NotNull { + val file: File } -sealed trait HashFileInfo extends FileInfo -{ - val hash: List[Byte] +sealed trait HashFileInfo extends FileInfo { + val hash: List[Byte] } -sealed trait ModifiedFileInfo extends FileInfo -{ - val lastModified: Long +sealed trait ModifiedFileInfo extends FileInfo { + val lastModified: Long } -sealed trait PlainFileInfo extends FileInfo -{ - def exists: Boolean +sealed trait PlainFileInfo extends FileInfo { + def exists: Boolean } sealed trait HashModifiedFileInfo extends HashFileInfo with ModifiedFileInfo @@ -31,90 +27,80 @@ private final case class FileHash(file: File, hash: List[Byte]) extends HashFile private final case class FileModified(file: File, lastModified: Long) extends ModifiedFileInfo private final case class FileHashModified(file: File, hash: List[Byte], lastModified: Long) extends HashModifiedFileInfo -object FileInfo -{ - implicit def existsInputCache: InputCache[PlainFileInfo] = exists.infoInputCache - implicit def modifiedInputCache: InputCache[ModifiedFileInfo] = lastModified.infoInputCache - implicit def hashInputCache: InputCache[HashFileInfo] = hash.infoInputCache - implicit def fullInputCache: InputCache[HashModifiedFileInfo] = full.infoInputCache +object FileInfo { + implicit def existsInputCache: InputCache[PlainFileInfo] = exists.infoInputCache + implicit def modifiedInputCache: InputCache[ModifiedFileInfo] = lastModified.infoInputCache + implicit def hashInputCache: InputCache[HashFileInfo] = hash.infoInputCache + implicit def fullInputCache: InputCache[HashModifiedFileInfo] = full.infoInputCache - sealed trait Style - { - type F <: FileInfo - implicit def apply(file: File): F - implicit def unapply(info: F): File = info.file - implicit val format: Format[F] - import Cache._ - implicit def fileInfoEquiv: Equiv[F] = defaultEquiv - def infoInputCache: InputCache[F] = basicInput - implicit def fileInputCache: InputCache[File] = wrapIn[File,F] - } - object full extends Style - { - type F = HashModifiedFileInfo - implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified) - def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified) - implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), (make _).tupled) - } - object hash extends Style - { - type F = HashFileInfo - implicit def apply(file: File): HashFileInfo = make(file, computeHash(file)) - def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash) - implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), (make _).tupled) - private def computeHash(file: File): List[Byte] = try { Hash(file).toList } catch { case e: Exception => Nil } - } - object lastModified extends Style - { - type F = ModifiedFileInfo - implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified) - def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified) - implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), (make _).tupled) - } - object exists extends Style - { - type F = PlainFileInfo - implicit def apply(file: File): PlainFileInfo = make(file) - def make(file: File): PlainFileInfo = { val abs = file.getAbsoluteFile; PlainFile(abs, abs.exists) } - implicit val format: Format[PlainFileInfo] = asProduct2[PlainFileInfo, File, Boolean](PlainFile.apply)(x => (x.file, x.exists)) - } + sealed trait Style { + type F <: FileInfo + implicit def apply(file: File): F + implicit def unapply(info: F): File = info.file + implicit val format: Format[F] + import Cache._ + implicit def fileInfoEquiv: Equiv[F] = defaultEquiv + def infoInputCache: InputCache[F] = basicInput + implicit def fileInputCache: InputCache[File] = wrapIn[File, F] + } + object full extends Style { + type F = HashModifiedFileInfo + implicit def apply(file: File): HashModifiedFileInfo = make(file, Hash(file).toList, file.lastModified) + def make(file: File, hash: List[Byte], lastModified: Long): HashModifiedFileInfo = FileHashModified(file.getAbsoluteFile, hash, lastModified) + implicit val format: Format[HashModifiedFileInfo] = wrap(f => (f.file, f.hash, f.lastModified), (make _).tupled) + } + object hash extends Style { + type F = HashFileInfo + implicit def apply(file: File): HashFileInfo = make(file, computeHash(file)) + def make(file: File, hash: List[Byte]): HashFileInfo = FileHash(file.getAbsoluteFile, hash) + implicit val format: Format[HashFileInfo] = wrap(f => (f.file, f.hash), (make _).tupled) + private def computeHash(file: File): List[Byte] = try { Hash(file).toList } catch { case e: Exception => Nil } + } + object lastModified extends Style { + type F = ModifiedFileInfo + implicit def apply(file: File): ModifiedFileInfo = make(file, file.lastModified) + def make(file: File, lastModified: Long): ModifiedFileInfo = FileModified(file.getAbsoluteFile, lastModified) + implicit val format: Format[ModifiedFileInfo] = wrap(f => (f.file, f.lastModified), (make _).tupled) + } + object exists extends Style { + type F = PlainFileInfo + implicit def apply(file: File): PlainFileInfo = make(file) + def make(file: File): PlainFileInfo = { val abs = file.getAbsoluteFile; PlainFile(abs, abs.exists) } + implicit val format: Format[PlainFileInfo] = asProduct2[PlainFileInfo, File, Boolean](PlainFile.apply)(x => (x.file, x.exists)) + } } -final case class FilesInfo[F <: FileInfo] private(files: Set[F]) -object FilesInfo -{ - sealed abstract class Style - { - type F <: FileInfo - val fileStyle: FileInfo.Style { type F = Style.this.F } +final case class FilesInfo[F <: FileInfo] private (files: Set[F]) +object FilesInfo { + sealed abstract class Style { + type F <: FileInfo + val fileStyle: FileInfo.Style { type F = Style.this.F } - //def manifest: Manifest[F] = fileStyle.manifest - implicit def apply(files: Set[File]): FilesInfo[F] - implicit def unapply(info: FilesInfo[F]): Set[File] = info.files.map(_.file) - implicit val formats: Format[FilesInfo[F]] - val manifest: Manifest[Format[FilesInfo[F]]] - def empty: FilesInfo[F] = new FilesInfo[F](Set.empty) - import Cache._ - def infosInputCache: InputCache[FilesInfo[F]] = basicInput - implicit def filesInputCache: InputCache[Set[File]] = wrapIn[Set[File],FilesInfo[F]] - implicit def filesInfoEquiv: Equiv[FilesInfo[F]] = defaultEquiv - } - private final class BasicStyle[FI <: FileInfo](style: FileInfo.Style { type F = FI }) - (implicit val manifest: Manifest[Format[FilesInfo[FI]]]) extends Style - { - type F = FI - val fileStyle: FileInfo.Style { type F = FI } = style - private implicit val infoFormat: Format[FI] = fileStyle.format - implicit def apply(files: Set[File]): FilesInfo[F] = FilesInfo( files.map(_.getAbsoluteFile).map(fileStyle.apply) ) - implicit val formats: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs)) - } - lazy val full: Style { type F = HashModifiedFileInfo } = new BasicStyle(FileInfo.full) - lazy val hash: Style { type F = HashFileInfo } = new BasicStyle(FileInfo.hash) - lazy val lastModified: Style { type F = ModifiedFileInfo } = new BasicStyle(FileInfo.lastModified) - lazy val exists: Style { type F = PlainFileInfo } = new BasicStyle(FileInfo.exists) + //def manifest: Manifest[F] = fileStyle.manifest + implicit def apply(files: Set[File]): FilesInfo[F] + implicit def unapply(info: FilesInfo[F]): Set[File] = info.files.map(_.file) + implicit val formats: Format[FilesInfo[F]] + val manifest: Manifest[Format[FilesInfo[F]]] + def empty: FilesInfo[F] = new FilesInfo[F](Set.empty) + import Cache._ + def infosInputCache: InputCache[FilesInfo[F]] = basicInput + implicit def filesInputCache: InputCache[Set[File]] = wrapIn[Set[File], FilesInfo[F]] + implicit def filesInfoEquiv: Equiv[FilesInfo[F]] = defaultEquiv + } + private final class BasicStyle[FI <: FileInfo](style: FileInfo.Style { type F = FI })(implicit val manifest: Manifest[Format[FilesInfo[FI]]]) extends Style { + type F = FI + val fileStyle: FileInfo.Style { type F = FI } = style + private implicit val infoFormat: Format[FI] = fileStyle.format + implicit def apply(files: Set[File]): FilesInfo[F] = FilesInfo(files.map(_.getAbsoluteFile).map(fileStyle.apply)) + implicit val formats: Format[FilesInfo[F]] = wrap(_.files, (fs: Set[F]) => new FilesInfo(fs)) + } + lazy val full: Style { type F = HashModifiedFileInfo } = new BasicStyle(FileInfo.full) + lazy val hash: Style { type F = HashFileInfo } = new BasicStyle(FileInfo.hash) + lazy val lastModified: Style { type F = ModifiedFileInfo } = new BasicStyle(FileInfo.lastModified) + lazy val exists: Style { type F = PlainFileInfo } = new BasicStyle(FileInfo.exists) - implicit def existsInputsCache: InputCache[FilesInfo[PlainFileInfo]] = exists.infosInputCache - implicit def hashInputsCache: InputCache[FilesInfo[HashFileInfo]] = hash.infosInputCache - implicit def modifiedInputsCache: InputCache[FilesInfo[ModifiedFileInfo]] = lastModified.infosInputCache - implicit def fullInputsCache: InputCache[FilesInfo[HashModifiedFileInfo]] = full.infosInputCache + implicit def existsInputsCache: InputCache[FilesInfo[PlainFileInfo]] = exists.infosInputCache + implicit def hashInputsCache: InputCache[FilesInfo[HashFileInfo]] = hash.infosInputCache + implicit def modifiedInputsCache: InputCache[FilesInfo[ModifiedFileInfo]] = lastModified.infosInputCache + implicit def fullInputsCache: InputCache[FilesInfo[HashModifiedFileInfo]] = full.infosInputCache } \ No newline at end of file diff --git a/cache/src/main/scala/sbt/SeparatedCache.scala b/cache/src/main/scala/sbt/SeparatedCache.scala index a126229bd..9d11f1f3c 100644 --- a/cache/src/main/scala/sbt/SeparatedCache.scala +++ b/cache/src/main/scala/sbt/SeparatedCache.scala @@ -4,64 +4,59 @@ package sbt import Types.:+: -import sbinary.{DefaultProtocol, Format, Input, Output => Out} +import sbinary.{ DefaultProtocol, Format, Input, Output => Out } import DefaultProtocol.ByteFormat -import java.io.{File, InputStream, OutputStream} +import java.io.{ File, InputStream, OutputStream } -trait InputCache[I] -{ - type Internal - def convert(i: I): Internal - def read(from: Input): Internal - def write(to: Out, j: Internal): Unit - def equiv: Equiv[Internal] +trait InputCache[I] { + type Internal + def convert(i: I): Internal + def read(from: Input): Internal + def write(to: Out, j: Internal): Unit + def equiv: Equiv[Internal] } -object InputCache -{ - implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = - new InputCache[I] - { - type Internal = I - def convert(i: I) = i - def read(from: Input): I = fmt.reads(from) - def write(to: Out, i: I) = fmt.writes(to, i) - def equiv = eqv - } - def lzy[I](mkIn: => InputCache[I]): InputCache[I] = - new InputCache[I] - { - lazy val ic = mkIn - type Internal = ic.Internal - def convert(i: I) = ic convert i - def read(from: Input): ic.Internal = ic.read(from) - def write(to: Out, i: ic.Internal) = ic.write(to, i) - def equiv = ic.equiv - } +object InputCache { + implicit def basicInputCache[I](implicit fmt: Format[I], eqv: Equiv[I]): InputCache[I] = + new InputCache[I] { + type Internal = I + def convert(i: I) = i + def read(from: Input): I = fmt.reads(from) + def write(to: Out, i: I) = fmt.writes(to, i) + def equiv = eqv + } + def lzy[I](mkIn: => InputCache[I]): InputCache[I] = + new InputCache[I] { + lazy val ic = mkIn + type Internal = ic.Internal + def convert(i: I) = ic convert i + def read(from: Input): ic.Internal = ic.read(from) + def write(to: Out, i: ic.Internal) = ic.write(to, i) + def equiv = ic.equiv + } } -class BasicCache[I,O](implicit input: InputCache[I], outFormat: Format[O]) extends Cache[I,O] -{ - def apply(file: File)(in: I) = - { - val j = input.convert(in) - try { applyImpl(file, j) } - catch { case e: Exception => Right(update(file)(j)) } - } - protected def applyImpl(file: File, in: input.Internal) = - { - Using.fileInputStream(file) { stream => - val previousIn = input.read(stream) - if(input.equiv.equiv(in, previousIn)) - Left(outFormat.reads(stream)) - else - Right(update(file)(in)) - } - } - protected def update(file: File)(in: input.Internal) = (out: O) => - { - Using.fileOutputStream(false)(file) { stream => - input.write(stream, in) - outFormat.writes(stream, out) - } - } +class BasicCache[I, O](implicit input: InputCache[I], outFormat: Format[O]) extends Cache[I, O] { + def apply(file: File)(in: I) = + { + val j = input.convert(in) + try { applyImpl(file, j) } + catch { case e: Exception => Right(update(file)(j)) } + } + protected def applyImpl(file: File, in: input.Internal) = + { + Using.fileInputStream(file) { stream => + val previousIn = input.read(stream) + if (input.equiv.equiv(in, previousIn)) + Left(outFormat.reads(stream)) + else + Right(update(file)(in)) + } + } + protected def update(file: File)(in: input.Internal) = (out: O) => + { + Using.fileOutputStream(false)(file) { stream => + input.write(stream, in) + outFormat.writes(stream, out) + } + } } \ No newline at end of file diff --git a/cache/tracking/src/main/scala/sbt/ChangeReport.scala b/cache/tracking/src/main/scala/sbt/ChangeReport.scala index 634650f20..8502f9d3f 100644 --- a/cache/tracking/src/main/scala/sbt/ChangeReport.scala +++ b/cache/tracking/src/main/scala/sbt/ChangeReport.scala @@ -3,71 +3,68 @@ */ package sbt -object ChangeReport -{ - def modified[T](files: Set[T]) = - new EmptyChangeReport[T] - { - override def checked = files - override def modified = files - override def markAllModified = this - } - def unmodified[T](files: Set[T]) = - new EmptyChangeReport[T] - { - override def checked = files - override def unmodified = files - } +object ChangeReport { + def modified[T](files: Set[T]) = + new EmptyChangeReport[T] { + override def checked = files + override def modified = files + override def markAllModified = this + } + def unmodified[T](files: Set[T]) = + new EmptyChangeReport[T] { + override def checked = files + override def unmodified = files + } } /** The result of comparing some current set of objects against a previous set of objects.*/ -trait ChangeReport[T] extends NotNull -{ - /** The set of all of the objects in the current set.*/ - def checked: Set[T] - /** All of the objects that are in the same state in the current and reference sets.*/ - def unmodified: Set[T] - /** All checked objects that are not in the same state as the reference. This includes objects that are in both - * sets but have changed and files that are only in one set.*/ - def modified: Set[T] // all changes, including added - /** All objects that are only in the current set.*/ - def added: Set[T] - /** All objects only in the previous set*/ - def removed: Set[T] - def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other) - /** Generate a new report with this report's unmodified set included in the new report's modified set. The new report's - * unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report. */ - def markAllModified: ChangeReport[T] = - new ChangeReport[T] - { - def checked = ChangeReport.this.checked - def unmodified = Set.empty[T] - def modified = ChangeReport.this.checked - def added = ChangeReport.this.added - def removed = ChangeReport.this.removed - override def markAllModified = this - } - override def toString = - { - val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed") - val sets = List(checked, modified, unmodified, added, removed) - val keyValues = labels.zip(sets).map{ case (label, set) => label + ": " + set.mkString(", ") } - keyValues.mkString("Change report:\n\t", "\n\t", "") - } +trait ChangeReport[T] extends NotNull { + /** The set of all of the objects in the current set.*/ + def checked: Set[T] + /** All of the objects that are in the same state in the current and reference sets.*/ + def unmodified: Set[T] + /** + * All checked objects that are not in the same state as the reference. This includes objects that are in both + * sets but have changed and files that are only in one set. + */ + def modified: Set[T] // all changes, including added + /** All objects that are only in the current set.*/ + def added: Set[T] + /** All objects only in the previous set*/ + def removed: Set[T] + def +++(other: ChangeReport[T]): ChangeReport[T] = new CompoundChangeReport(this, other) + /** + * Generate a new report with this report's unmodified set included in the new report's modified set. The new report's + * unmodified set is empty. The new report's added, removed, and checked sets are the same as in this report. + */ + def markAllModified: ChangeReport[T] = + new ChangeReport[T] { + def checked = ChangeReport.this.checked + def unmodified = Set.empty[T] + def modified = ChangeReport.this.checked + def added = ChangeReport.this.added + def removed = ChangeReport.this.removed + override def markAllModified = this + } + override def toString = + { + val labels = List("Checked", "Modified", "Unmodified", "Added", "Removed") + val sets = List(checked, modified, unmodified, added, removed) + val keyValues = labels.zip(sets).map { case (label, set) => label + ": " + set.mkString(", ") } + keyValues.mkString("Change report:\n\t", "\n\t", "") + } } -class EmptyChangeReport[T] extends ChangeReport[T] -{ - def checked = Set.empty[T] - def unmodified = Set.empty[T] - def modified = Set.empty[T] - def added = Set.empty[T] - def removed = Set.empty[T] - override def toString = "No changes" +class EmptyChangeReport[T] extends ChangeReport[T] { + def checked = Set.empty[T] + def unmodified = Set.empty[T] + def modified = Set.empty[T] + def added = Set.empty[T] + def removed = Set.empty[T] + override def toString = "No changes" } -private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T]) extends ChangeReport[T] -{ - lazy val checked = a.checked ++ b.checked - lazy val unmodified = a.unmodified ++ b.unmodified - lazy val modified = a.modified ++ b.modified - lazy val added = a.added ++ b.added - lazy val removed = a.removed ++ b.removed +private class CompoundChangeReport[T](a: ChangeReport[T], b: ChangeReport[T]) extends ChangeReport[T] { + lazy val checked = a.checked ++ b.checked + lazy val unmodified = a.unmodified ++ b.unmodified + lazy val modified = a.modified ++ b.modified + lazy val added = a.added ++ b.added + lazy val removed = a.removed ++ b.removed } \ No newline at end of file diff --git a/cache/tracking/src/main/scala/sbt/Tracked.scala b/cache/tracking/src/main/scala/sbt/Tracked.scala index fb0747ed9..c851ef9a5 100644 --- a/cache/tracking/src/main/scala/sbt/Tracked.scala +++ b/cache/tracking/src/main/scala/sbt/Tracked.scala @@ -4,204 +4,202 @@ package sbt import java.io.File -import CacheIO.{fromFile, toFile} +import CacheIO.{ fromFile, toFile } import sbinary.Format import scala.reflect.Manifest import scala.collection.mutable -import IO.{delete, read, write} +import IO.{ delete, read, write } +object Tracked { + /** + * Creates a tracker that provides the last time it was evaluated. + * If 'useStartTime' is true, the recorded time is the start of the evaluated function. + * If 'useStartTime' is false, the recorded time is when the evaluated function completes. + * In both cases, the timestamp is not updated if the function throws an exception. + */ + def tstamp(cacheFile: File, useStartTime: Boolean = true): Timestamp = new Timestamp(cacheFile, useStartTime) + /** Creates a tracker that only evaluates a function when the input has changed.*/ + //def changed[O](cacheFile: File)(implicit format: Format[O], equiv: Equiv[O]): Changed[O] = + // new Changed[O](cacheFile) -object Tracked -{ - /** Creates a tracker that provides the last time it was evaluated. - * If 'useStartTime' is true, the recorded time is the start of the evaluated function. - * If 'useStartTime' is false, the recorded time is when the evaluated function completes. - * In both cases, the timestamp is not updated if the function throws an exception.*/ - def tstamp(cacheFile: File, useStartTime: Boolean = true): Timestamp = new Timestamp(cacheFile, useStartTime) - /** Creates a tracker that only evaluates a function when the input has changed.*/ - //def changed[O](cacheFile: File)(implicit format: Format[O], equiv: Equiv[O]): Changed[O] = - // new Changed[O](cacheFile) - - /** Creates a tracker that provides the difference between a set of input files for successive invocations.*/ - def diffInputs(cache: File, style: FilesInfo.Style): Difference = - Difference.inputs(cache, style) - /** Creates a tracker that provides the difference between a set of output files for successive invocations.*/ - def diffOutputs(cache: File, style: FilesInfo.Style): Difference = - Difference.outputs(cache, style) + /** Creates a tracker that provides the difference between a set of input files for successive invocations.*/ + def diffInputs(cache: File, style: FilesInfo.Style): Difference = + Difference.inputs(cache, style) + /** Creates a tracker that provides the difference between a set of output files for successive invocations.*/ + def diffOutputs(cache: File, style: FilesInfo.Style): Difference = + Difference.outputs(cache, style) - def lastOutput[I,O](cacheFile: File)(f: (I,Option[O]) => O)(implicit o: Format[O], mf: Manifest[Format[O]]): I => O = in => - { - val previous: Option[O] = fromFile[O](cacheFile) - val next = f(in, previous) - toFile(next)(cacheFile) - next - } + def lastOutput[I, O](cacheFile: File)(f: (I, Option[O]) => O)(implicit o: Format[O], mf: Manifest[Format[O]]): I => O = in => + { + val previous: Option[O] = fromFile[O](cacheFile) + val next = f(in, previous) + toFile(next)(cacheFile) + next + } - def inputChanged[I,O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): I => O = in => - { - val help = new CacheHelp(ic) - val conv = help.convert(in) - val changed = help.changed(cacheFile, conv) - val result = f(changed, in) - - if(changed) - help.save(cacheFile, conv) + def inputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): I => O = in => + { + val help = new CacheHelp(ic) + val conv = help.convert(in) + val changed = help.changed(cacheFile, conv) + val result = f(changed, in) - result - } - def outputChanged[I,O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): (() => I) => O = in => - { - val initial = in() - val help = new CacheHelp(ic) - val changed = help.changed(cacheFile, help.convert(initial)) - val result = f(changed, initial) - - if(changed) - help.save(cacheFile, help.convert(in())) + if (changed) + help.save(cacheFile, conv) - result - } - final class CacheHelp[I](val ic: InputCache[I]) - { - def convert(i: I): ic.Internal = ic.convert(i) - def save(cacheFile: File, value: ic.Internal): Unit = - Using.fileOutputStream()(cacheFile)(out => ic.write(out, value) ) - def changed(cacheFile: File, converted: ic.Internal): Boolean = - try { - val prev = Using.fileInputStream(cacheFile)(x => ic.read(x)) - !ic.equiv.equiv(converted, prev) - } catch { case e: Exception => true } - } + result + } + def outputChanged[I, O](cacheFile: File)(f: (Boolean, I) => O)(implicit ic: InputCache[I]): (() => I) => O = in => + { + val initial = in() + val help = new CacheHelp(ic) + val changed = help.changed(cacheFile, help.convert(initial)) + val result = f(changed, initial) + + if (changed) + help.save(cacheFile, help.convert(in())) + + result + } + final class CacheHelp[I](val ic: InputCache[I]) { + def convert(i: I): ic.Internal = ic.convert(i) + def save(cacheFile: File, value: ic.Internal): Unit = + Using.fileOutputStream()(cacheFile)(out => ic.write(out, value)) + def changed(cacheFile: File, converted: ic.Internal): Boolean = + try { + val prev = Using.fileInputStream(cacheFile)(x => ic.read(x)) + !ic.equiv.equiv(converted, prev) + } catch { case e: Exception => true } + } } -trait Tracked -{ - /** Cleans outputs and clears the cache.*/ - def clean(): Unit +trait Tracked { + /** Cleans outputs and clears the cache.*/ + def clean(): Unit } -class Timestamp(val cacheFile: File, useStartTime: Boolean) extends Tracked -{ - def clean() = delete(cacheFile) - /** Reads the previous timestamp, evaluates the provided function, - * and then updates the timestamp if the function completes normally.*/ - def apply[T](f: Long => T): T = - { - val start = now() - val result = f(readTimestamp) - write(cacheFile, (if(useStartTime) start else now()).toString) - result - } - private def now() = System.currentTimeMillis - def readTimestamp: Long = - try { read(cacheFile).toLong } - catch { case _: NumberFormatException | _: java.io.FileNotFoundException => 0 } +class Timestamp(val cacheFile: File, useStartTime: Boolean) extends Tracked { + def clean() = delete(cacheFile) + /** + * Reads the previous timestamp, evaluates the provided function, + * and then updates the timestamp if the function completes normally. + */ + def apply[T](f: Long => T): T = + { + val start = now() + val result = f(readTimestamp) + write(cacheFile, (if (useStartTime) start else now()).toString) + result + } + private def now() = System.currentTimeMillis + def readTimestamp: Long = + try { read(cacheFile).toLong } + catch { case _: NumberFormatException | _: java.io.FileNotFoundException => 0 } } -class Changed[O](val cacheFile: File)(implicit equiv: Equiv[O], format: Format[O]) extends Tracked -{ - def clean() = delete(cacheFile) - def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value => - { - if(uptodate(value)) - ifUnchanged(value) - else - { - update(value) - ifChanged(value) - } - } +class Changed[O](val cacheFile: File)(implicit equiv: Equiv[O], format: Format[O]) extends Tracked { + def clean() = delete(cacheFile) + def apply[O2](ifChanged: O => O2, ifUnchanged: O => O2): O => O2 = value => + { + if (uptodate(value)) + ifUnchanged(value) + else { + update(value) + ifChanged(value) + } + } - def update(value: O): Unit = Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value)) - def uptodate(value: O): Boolean = - try { - Using.fileInputStream(cacheFile) { - stream => equiv.equiv(value, format.reads(stream)) - } - } catch { - case _: Exception => false - } + def update(value: O): Unit = Using.fileOutputStream(false)(cacheFile)(stream => format.writes(stream, value)) + def uptodate(value: O): Boolean = + try { + Using.fileInputStream(cacheFile) { + stream => equiv.equiv(value, format.reads(stream)) + } + } catch { + case _: Exception => false + } } -object Difference -{ - def constructor(defineClean: Boolean, filesAreOutputs: Boolean): (File, FilesInfo.Style) => Difference = - (cache, style) => new Difference(cache, style, defineClean, filesAreOutputs) +object Difference { + def constructor(defineClean: Boolean, filesAreOutputs: Boolean): (File, FilesInfo.Style) => Difference = + (cache, style) => new Difference(cache, style, defineClean, filesAreOutputs) - /** Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the - * hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice: - * before and after running the function.*/ - val outputs = constructor(true, true) - /** Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the - * hash/last modified time of the files as they were prior to running the function.*/ - val inputs = constructor(false, false) + /** + * Provides a constructor for a Difference that removes the files from the previous run on a call to 'clean' and saves the + * hash/last modified time of the files as they are after running the function. This means that this information must be evaluated twice: + * before and after running the function. + */ + val outputs = constructor(true, true) + /** + * Provides a constructor for a Difference that does nothing on a call to 'clean' and saves the + * hash/last modified time of the files as they were prior to running the function. + */ + val inputs = constructor(false, false) } -class Difference(val cache: File, val style: FilesInfo.Style, val defineClean: Boolean, val filesAreOutputs: Boolean) extends Tracked -{ - def clean() = - { - if(defineClean) delete(raw(cachedFilesInfo)) else () - clearCache() - } - private def clearCache() = delete(cache) - - private def cachedFilesInfo = fromFile(style.formats, style.empty)(cache)(style.manifest).files - private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file) - - def apply[T](files: Set[File])(f: ChangeReport[File] => T): T = - { - val lastFilesInfo = cachedFilesInfo - apply(files, lastFilesInfo)(f)(_ => files) - } - - def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T = - { - val lastFilesInfo = cachedFilesInfo - apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles) - } - - private def abs(files: Set[File]) = files.map(_.getAbsoluteFile) - private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(f: ChangeReport[File] => T)(extractFiles: T => Set[File]): T = - { - val lastFiles = raw(lastFilesInfo) - val currentFiles = abs(files) - val currentFilesInfo = style(currentFiles) +class Difference(val cache: File, val style: FilesInfo.Style, val defineClean: Boolean, val filesAreOutputs: Boolean) extends Tracked { + def clean() = + { + if (defineClean) delete(raw(cachedFilesInfo)) else () + clearCache() + } + private def clearCache() = delete(cache) - val report = new ChangeReport[File] - { - lazy val checked = currentFiles - lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist. - lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist. - lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added - lazy val unmodified = checked -- modified - } + private def cachedFilesInfo = fromFile(style.formats, style.empty)(cache)(style.manifest).files + private def raw(fs: Set[style.F]): Set[File] = fs.map(_.file) - val result = f(report) - val info = if(filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo - toFile(style.formats)(info)(cache)(style.manifest) - result - } + def apply[T](files: Set[File])(f: ChangeReport[File] => T): T = + { + val lastFilesInfo = cachedFilesInfo + apply(files, lastFilesInfo)(f)(_ => files) + } + + def apply[T](f: ChangeReport[File] => T)(implicit toFiles: T => Set[File]): T = + { + val lastFilesInfo = cachedFilesInfo + apply(raw(lastFilesInfo), lastFilesInfo)(f)(toFiles) + } + + private def abs(files: Set[File]) = files.map(_.getAbsoluteFile) + private[this] def apply[T](files: Set[File], lastFilesInfo: Set[style.F])(f: ChangeReport[File] => T)(extractFiles: T => Set[File]): T = + { + val lastFiles = raw(lastFilesInfo) + val currentFiles = abs(files) + val currentFilesInfo = style(currentFiles) + + val report = new ChangeReport[File] { + lazy val checked = currentFiles + lazy val removed = lastFiles -- checked // all files that were included previously but not this time. This is independent of whether the files exist. + lazy val added = checked -- lastFiles // all files included now but not previously. This is independent of whether the files exist. + lazy val modified = raw(lastFilesInfo -- currentFilesInfo.files) ++ added + lazy val unmodified = checked -- modified + } + + val result = f(report) + val info = if (filesAreOutputs) style(abs(extractFiles(result))) else currentFilesInfo + toFile(style.formats)(info)(cache)(style.manifest) + result + } } object FileFunction { - type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File] - - def cached(cacheBaseDirectory: File, inStyle: FilesInfo.Style = FilesInfo.lastModified, outStyle: FilesInfo.Style = FilesInfo.exists)(action: Set[File] => Set[File]): Set[File] => Set[File] = - cached(cacheBaseDirectory)(inStyle, outStyle)( (in, out) => action(in.checked) ) - - def cached(cacheBaseDirectory: File)(inStyle: FilesInfo.Style, outStyle: FilesInfo.Style)(action: UpdateFunction): Set[File] => Set[File] = - { - import Path._ - lazy val inCache = Difference.inputs(cacheBaseDirectory / "in-cache", inStyle) - lazy val outCache = Difference.outputs(cacheBaseDirectory / "out-cache", outStyle) - inputs => - { - inCache(inputs) { inReport => - outCache { outReport => - if(inReport.modified.isEmpty && outReport.modified.isEmpty) - outReport.checked - else - action(inReport, outReport) - } - } - } - } + type UpdateFunction = (ChangeReport[File], ChangeReport[File]) => Set[File] + + def cached(cacheBaseDirectory: File, inStyle: FilesInfo.Style = FilesInfo.lastModified, outStyle: FilesInfo.Style = FilesInfo.exists)(action: Set[File] => Set[File]): Set[File] => Set[File] = + cached(cacheBaseDirectory)(inStyle, outStyle)((in, out) => action(in.checked)) + + def cached(cacheBaseDirectory: File)(inStyle: FilesInfo.Style, outStyle: FilesInfo.Style)(action: UpdateFunction): Set[File] => Set[File] = + { + import Path._ + lazy val inCache = Difference.inputs(cacheBaseDirectory / "in-cache", inStyle) + lazy val outCache = Difference.outputs(cacheBaseDirectory / "out-cache", outStyle) + inputs => + { + inCache(inputs) { inReport => + outCache { outReport => + if (inReport.modified.isEmpty && outReport.modified.isEmpty) + outReport.checked + else + action(inReport, outReport) + } + } + } + } } \ No newline at end of file diff --git a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala index fe1baa696..29a962de7 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/ContextUtil.scala @@ -1,245 +1,260 @@ package sbt package appmacro - import scala.reflect._ - import macros._ - import scala.tools.nsc.Global - import ContextUtil.{DynamicDependencyError, DynamicReferenceError} +import scala.reflect._ +import macros._ +import scala.tools.nsc.Global +import ContextUtil.{ DynamicDependencyError, DynamicReferenceError } object ContextUtil { - final val DynamicDependencyError = "Illegal dynamic dependency" - final val DynamicReferenceError = "Illegal dynamic reference" + final val DynamicDependencyError = "Illegal dynamic dependency" + final val DynamicReferenceError = "Illegal dynamic reference" - /** Constructs an object with utility methods for operating in the provided macro context `c`. - * Callers should explicitly specify the type parameter as `c.type` in order to preserve the path dependent types. */ - def apply[C <: Context with Singleton](c: C): ContextUtil[C] = new ContextUtil(c) + /** + * Constructs an object with utility methods for operating in the provided macro context `c`. + * Callers should explicitly specify the type parameter as `c.type` in order to preserve the path dependent types. + */ + def apply[C <: Context with Singleton](c: C): ContextUtil[C] = new ContextUtil(c) + /** + * Helper for implementing a no-argument macro that is introduced via an implicit. + * This method removes the implicit conversion and evaluates the function `f` on the target of the conversion. + * + * Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this + * method, the result of this method is `f()`. + */ + def selectMacroImpl[T: c.WeakTypeTag](c: Context)(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = + { + import c.universe._ + c.macroApplication match { + case s @ Select(Apply(_, t :: Nil), tp) => f(c.Expr[Any](t), s.pos) + case x => unexpectedTree(x) + } + } - /** Helper for implementing a no-argument macro that is introduced via an implicit. - * This method removes the implicit conversion and evaluates the function `f` on the target of the conversion. - * - * Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this - * method, the result of this method is `f()`. */ - def selectMacroImpl[T: c.WeakTypeTag](c: Context)(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = - { - import c.universe._ - c.macroApplication match { - case s @ Select(Apply(_, t :: Nil), tp) => f( c.Expr[Any](t), s.pos ) - case x => unexpectedTree(x) - } - } - - def unexpectedTree[C <: Context](tree: C#Tree): Nothing = sys.error("Unexpected macro application tree (" + tree.getClass + "): " + tree) + def unexpectedTree[C <: Context](tree: C#Tree): Nothing = sys.error("Unexpected macro application tree (" + tree.getClass + "): " + tree) } // TODO 2.11 Remove this after dropping 2.10.x support. private object HasCompat { val compat = ??? }; import HasCompat._ -/** Utility methods for macros. Several methods assume that the context's universe is a full compiler (`scala.tools.nsc.Global`). -* This is not thread safe due to the underlying Context and related data structures not being thread safe. -* Use `ContextUtil[c.type](c)` to construct. */ -final class ContextUtil[C <: Context](val ctx: C) -{ - import ctx.universe.{Apply=>ApplyTree,_} - import compat._ +/** + * Utility methods for macros. Several methods assume that the context's universe is a full compiler (`scala.tools.nsc.Global`). + * This is not thread safe due to the underlying Context and related data structures not being thread safe. + * Use `ContextUtil[c.type](c)` to construct. + */ +final class ContextUtil[C <: Context](val ctx: C) { + import ctx.universe.{ Apply => ApplyTree, _ } + import compat._ - val powerContext = ctx.asInstanceOf[reflect.macros.runtime.Context] - val global: powerContext.universe.type = powerContext.universe - def callsiteTyper: global.analyzer.Typer = powerContext.callsiteTyper - val initialOwner: Symbol = callsiteTyper.context.owner.asInstanceOf[ctx.universe.Symbol] + val powerContext = ctx.asInstanceOf[reflect.macros.runtime.Context] + val global: powerContext.universe.type = powerContext.universe + def callsiteTyper: global.analyzer.Typer = powerContext.callsiteTyper + val initialOwner: Symbol = callsiteTyper.context.owner.asInstanceOf[ctx.universe.Symbol] - lazy val alistType = ctx.typeOf[AList[KList]] - lazy val alist: Symbol = alistType.typeSymbol.companionSymbol - lazy val alistTC: Type = alistType.typeConstructor + lazy val alistType = ctx.typeOf[AList[KList]] + lazy val alist: Symbol = alistType.typeSymbol.companionSymbol + lazy val alistTC: Type = alistType.typeConstructor - /** Modifiers for a local val.*/ - lazy val localModifiers = Modifiers(NoFlags) + /** Modifiers for a local val.*/ + lazy val localModifiers = Modifiers(NoFlags) - def getPos(sym: Symbol) = if(sym eq null) NoPosition else sym.pos + def getPos(sym: Symbol) = if (sym eq null) NoPosition else sym.pos - /** Constructs a unique term name with the given prefix within this Context. - * (The current implementation uses Context.fresh, which increments*/ - def freshTermName(prefix: String) = newTermName(ctx.fresh("$" + prefix)) + /** + * Constructs a unique term name with the given prefix within this Context. + * (The current implementation uses Context.fresh, which increments + */ + def freshTermName(prefix: String) = newTermName(ctx.fresh("$" + prefix)) - /** Constructs a new, synthetic, local ValDef Type `tpe`, a unique name, - * Position `pos`, an empty implementation (no rhs), and owned by `owner`. */ - def freshValDef(tpe: Type, pos: Position, owner: Symbol): ValDef = - { - val SYNTHETIC = (1 << 21).toLong.asInstanceOf[FlagSet] - val sym = owner.newTermSymbol(freshTermName("q"), pos, SYNTHETIC) - setInfo(sym, tpe) - val vd = ValDef(sym, EmptyTree) - vd.setPos(pos) - vd - } + /** + * Constructs a new, synthetic, local ValDef Type `tpe`, a unique name, + * Position `pos`, an empty implementation (no rhs), and owned by `owner`. + */ + def freshValDef(tpe: Type, pos: Position, owner: Symbol): ValDef = + { + val SYNTHETIC = (1 << 21).toLong.asInstanceOf[FlagSet] + val sym = owner.newTermSymbol(freshTermName("q"), pos, SYNTHETIC) + setInfo(sym, tpe) + val vd = ValDef(sym, EmptyTree) + vd.setPos(pos) + vd + } - lazy val parameterModifiers = Modifiers(Flag.PARAM) + lazy val parameterModifiers = Modifiers(Flag.PARAM) - /** Collects all definitions in the tree for use in checkReferences. - * This excludes definitions in wrapped expressions because checkReferences won't allow nested dereferencing anyway. */ - def collectDefs(tree: Tree, isWrapper: (String, Type, Tree) => Boolean): collection.Set[Symbol] = - { - val defs = new collection.mutable.HashSet[Symbol] - // adds the symbols for all non-Ident subtrees to `defs`. - val process = new Traverser { - override def traverse(t: Tree) = t match { - case _: Ident => () - case ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) if isWrapper(nme.decoded, tpe.tpe, qual) => () - case tree => - if(tree.symbol ne null) defs += tree.symbol; - super.traverse(tree) - } - } - process.traverse(tree) - defs - } + /** + * Collects all definitions in the tree for use in checkReferences. + * This excludes definitions in wrapped expressions because checkReferences won't allow nested dereferencing anyway. + */ + def collectDefs(tree: Tree, isWrapper: (String, Type, Tree) => Boolean): collection.Set[Symbol] = + { + val defs = new collection.mutable.HashSet[Symbol] + // adds the symbols for all non-Ident subtrees to `defs`. + val process = new Traverser { + override def traverse(t: Tree) = t match { + case _: Ident => () + case ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) if isWrapper(nme.decoded, tpe.tpe, qual) => () + case tree => + if (tree.symbol ne null) defs += tree.symbol; + super.traverse(tree) + } + } + process.traverse(tree) + defs + } - /** A reference is illegal if it is to an M instance defined within the scope of the macro call. - * As an approximation, disallow referenced to any local definitions `defs`. */ - def illegalReference(defs: collection.Set[Symbol], sym: Symbol): Boolean = - sym != null && sym != NoSymbol && defs.contains(sym) + /** + * A reference is illegal if it is to an M instance defined within the scope of the macro call. + * As an approximation, disallow referenced to any local definitions `defs`. + */ + def illegalReference(defs: collection.Set[Symbol], sym: Symbol): Boolean = + sym != null && sym != NoSymbol && defs.contains(sym) - /** A function that checks the provided tree for illegal references to M instances defined in the - * expression passed to the macro and for illegal dereferencing of M instances. */ - def checkReferences(defs: collection.Set[Symbol], isWrapper: (String, Type, Tree) => Boolean): Tree => Unit = { - case s @ ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) => - if(isWrapper(nme.decoded, tpe.tpe, qual)) ctx.error(s.pos, DynamicDependencyError) - case id @ Ident(name) if illegalReference(defs, id.symbol) => ctx.error(id.pos, DynamicReferenceError + ": " + name) - case _ => () - } + /** + * A function that checks the provided tree for illegal references to M instances defined in the + * expression passed to the macro and for illegal dereferencing of M instances. + */ + def checkReferences(defs: collection.Set[Symbol], isWrapper: (String, Type, Tree) => Boolean): Tree => Unit = { + case s @ ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) => + if (isWrapper(nme.decoded, tpe.tpe, qual)) ctx.error(s.pos, DynamicDependencyError) + case id @ Ident(name) if illegalReference(defs, id.symbol) => ctx.error(id.pos, DynamicReferenceError + ": " + name) + case _ => () + } - /** Constructs a ValDef with a parameter modifier, a unique name, with the provided Type and with an empty rhs. */ - def freshMethodParameter(tpe: Type): ValDef = - ValDef(parameterModifiers, freshTermName("p"), TypeTree(tpe), EmptyTree) + /** Constructs a ValDef with a parameter modifier, a unique name, with the provided Type and with an empty rhs. */ + def freshMethodParameter(tpe: Type): ValDef = + ValDef(parameterModifiers, freshTermName("p"), TypeTree(tpe), EmptyTree) - /** Constructs a ValDef with local modifiers and a unique name. */ - def localValDef(tpt: Tree, rhs: Tree): ValDef = - ValDef(localModifiers, freshTermName("q"), tpt, rhs) + /** Constructs a ValDef with local modifiers and a unique name. */ + def localValDef(tpt: Tree, rhs: Tree): ValDef = + ValDef(localModifiers, freshTermName("q"), tpt, rhs) - /** Constructs a tuple value of the right TupleN type from the provided inputs.*/ - def mkTuple(args: List[Tree]): Tree = - global.gen.mkTuple(args.asInstanceOf[List[global.Tree]]).asInstanceOf[ctx.universe.Tree] + /** Constructs a tuple value of the right TupleN type from the provided inputs.*/ + def mkTuple(args: List[Tree]): Tree = + global.gen.mkTuple(args.asInstanceOf[List[global.Tree]]).asInstanceOf[ctx.universe.Tree] - def setSymbol[Tree](t: Tree, sym: Symbol): Unit = - t.asInstanceOf[global.Tree].setSymbol(sym.asInstanceOf[global.Symbol]) - def setInfo[Tree](sym: Symbol, tpe: Type): Unit = - sym.asInstanceOf[global.Symbol].setInfo(tpe.asInstanceOf[global.Type]) + def setSymbol[Tree](t: Tree, sym: Symbol): Unit = + t.asInstanceOf[global.Tree].setSymbol(sym.asInstanceOf[global.Symbol]) + def setInfo[Tree](sym: Symbol, tpe: Type): Unit = + sym.asInstanceOf[global.Symbol].setInfo(tpe.asInstanceOf[global.Type]) - /** Creates a new, synthetic type variable with the specified `owner`. */ - def newTypeVariable(owner: Symbol, prefix: String = "T0"): TypeSymbol = - owner.asInstanceOf[global.Symbol].newSyntheticTypeParam(prefix, 0L).asInstanceOf[ctx.universe.TypeSymbol] + /** Creates a new, synthetic type variable with the specified `owner`. */ + def newTypeVariable(owner: Symbol, prefix: String = "T0"): TypeSymbol = + owner.asInstanceOf[global.Symbol].newSyntheticTypeParam(prefix, 0L).asInstanceOf[ctx.universe.TypeSymbol] - /** The type representing the type constructor `[X] X` */ - lazy val idTC: Type = - { - val tvar = newTypeVariable(NoSymbol) - polyType(tvar :: Nil, refVar(tvar)) - } - /** A Type that references the given type variable. */ - def refVar(variable: TypeSymbol): Type = variable.toTypeConstructor - /** Constructs a new, synthetic type variable that is a type constructor. For example, in type Y[L[x]], L is such a type variable. */ - def newTCVariable(owner: Symbol): TypeSymbol = - { - val tc = newTypeVariable(owner) - val arg = newTypeVariable(tc, "x") - tc.setTypeSignature(PolyType(arg :: Nil, emptyTypeBounds)) - tc - } - /** >: Nothing <: Any */ - def emptyTypeBounds: TypeBounds = TypeBounds(definitions.NothingClass.toType, definitions.AnyClass.toType) + /** The type representing the type constructor `[X] X` */ + lazy val idTC: Type = + { + val tvar = newTypeVariable(NoSymbol) + polyType(tvar :: Nil, refVar(tvar)) + } + /** A Type that references the given type variable. */ + def refVar(variable: TypeSymbol): Type = variable.toTypeConstructor + /** Constructs a new, synthetic type variable that is a type constructor. For example, in type Y[L[x]], L is such a type variable. */ + def newTCVariable(owner: Symbol): TypeSymbol = + { + val tc = newTypeVariable(owner) + val arg = newTypeVariable(tc, "x") + tc.setTypeSignature(PolyType(arg :: Nil, emptyTypeBounds)) + tc + } + /** >: Nothing <: Any */ + def emptyTypeBounds: TypeBounds = TypeBounds(definitions.NothingClass.toType, definitions.AnyClass.toType) - /** Creates a new anonymous function symbol with Position `pos`. */ - def functionSymbol(pos: Position): Symbol = - callsiteTyper.context.owner.newAnonymousFunctionValue(pos.asInstanceOf[global.Position]).asInstanceOf[ctx.universe.Symbol] + /** Creates a new anonymous function symbol with Position `pos`. */ + def functionSymbol(pos: Position): Symbol = + callsiteTyper.context.owner.newAnonymousFunctionValue(pos.asInstanceOf[global.Position]).asInstanceOf[ctx.universe.Symbol] - def functionType(args: List[Type], result: Type): Type = - { - val tpe = global.definitions.functionType(args.asInstanceOf[List[global.Type]], result.asInstanceOf[global.Type]) - tpe.asInstanceOf[Type] - } + def functionType(args: List[Type], result: Type): Type = + { + val tpe = global.definitions.functionType(args.asInstanceOf[List[global.Type]], result.asInstanceOf[global.Type]) + tpe.asInstanceOf[Type] + } - /** Create a Tree that references the `val` represented by `vd`, copying attributes from `replaced`. */ - def refVal(replaced: Tree, vd: ValDef): Tree = - treeCopy.Ident(replaced, vd.name).setSymbol(vd.symbol) + /** Create a Tree that references the `val` represented by `vd`, copying attributes from `replaced`. */ + def refVal(replaced: Tree, vd: ValDef): Tree = + treeCopy.Ident(replaced, vd.name).setSymbol(vd.symbol) - /** Creates a Function tree using `functionSym` as the Symbol and changing `initialOwner` to `functionSym` in `body`.*/ - def createFunction(params: List[ValDef], body: Tree, functionSym: Symbol): Tree = - { - changeOwner(body, initialOwner, functionSym) - val f = Function(params, body) - setSymbol(f, functionSym) - f - } + /** Creates a Function tree using `functionSym` as the Symbol and changing `initialOwner` to `functionSym` in `body`.*/ + def createFunction(params: List[ValDef], body: Tree, functionSym: Symbol): Tree = + { + changeOwner(body, initialOwner, functionSym) + val f = Function(params, body) + setSymbol(f, functionSym) + f + } - def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit = - new ChangeOwnerAndModuleClassTraverser(prev.asInstanceOf[global.Symbol], next.asInstanceOf[global.Symbol]).traverse(tree.asInstanceOf[global.Tree]) + def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit = + new ChangeOwnerAndModuleClassTraverser(prev.asInstanceOf[global.Symbol], next.asInstanceOf[global.Symbol]).traverse(tree.asInstanceOf[global.Tree]) - // Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged. - private[this] class ChangeOwnerAndModuleClassTraverser(oldowner: global.Symbol, newowner: global.Symbol) extends global.ChangeOwnerTraverser(oldowner, newowner) - { - override def traverse(tree: global.Tree) { - tree match { - case _: global.DefTree => change(tree.symbol.moduleClass) - case _ => - } - super.traverse(tree) - } - } + // Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged. + private[this] class ChangeOwnerAndModuleClassTraverser(oldowner: global.Symbol, newowner: global.Symbol) extends global.ChangeOwnerTraverser(oldowner, newowner) { + override def traverse(tree: global.Tree) { + tree match { + case _: global.DefTree => change(tree.symbol.moduleClass) + case _ => + } + super.traverse(tree) + } + } - /** Returns the Symbol that references the statically accessible singleton `i`. */ - def singleton[T <: AnyRef with Singleton](i: T)(implicit it: ctx.TypeTag[i.type]): Symbol = - it.tpe match { - case SingleType(_, sym) if !sym.isFreeTerm && sym.isStatic => sym - case x => sys.error("Instance must be static (was " + x + ").") - } + /** Returns the Symbol that references the statically accessible singleton `i`. */ + def singleton[T <: AnyRef with Singleton](i: T)(implicit it: ctx.TypeTag[i.type]): Symbol = + it.tpe match { + case SingleType(_, sym) if !sym.isFreeTerm && sym.isStatic => sym + case x => sys.error("Instance must be static (was " + x + ").") + } - def select(t: Tree, name: String): Tree = Select(t, newTermName(name)) + def select(t: Tree, name: String): Tree = Select(t, newTermName(name)) - /** Returns the symbol for the non-private method named `name` for the class/module `obj`. */ - def method(obj: Symbol, name: String): Symbol = { - val ts: Type = obj.typeSignature - val m: global.Symbol = ts.asInstanceOf[global.Type].nonPrivateMember(global.newTermName(name)) - m.asInstanceOf[Symbol] - } + /** Returns the symbol for the non-private method named `name` for the class/module `obj`. */ + def method(obj: Symbol, name: String): Symbol = { + val ts: Type = obj.typeSignature + val m: global.Symbol = ts.asInstanceOf[global.Type].nonPrivateMember(global.newTermName(name)) + m.asInstanceOf[Symbol] + } - /** Returns a Type representing the type constructor tcp.. For example, given - * `object Demo { type M[x] = List[x] }`, the call `extractTC(Demo, "M")` will return a type representing - * the type constructor `[x] List[x]`. - **/ - def extractTC(tcp: AnyRef with Singleton, name: String)(implicit it: ctx.TypeTag[tcp.type]): ctx.Type = - { - val itTpe = it.tpe.asInstanceOf[global.Type] - val m = itTpe.nonPrivateMember(global.newTypeName(name)) - val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type] - assert(tc != NoType && tc.takesTypeArgs, "Invalid type constructor: " + tc) - tc - } + /** + * Returns a Type representing the type constructor tcp.. For example, given + * `object Demo { type M[x] = List[x] }`, the call `extractTC(Demo, "M")` will return a type representing + * the type constructor `[x] List[x]`. + */ + def extractTC(tcp: AnyRef with Singleton, name: String)(implicit it: ctx.TypeTag[tcp.type]): ctx.Type = + { + val itTpe = it.tpe.asInstanceOf[global.Type] + val m = itTpe.nonPrivateMember(global.newTypeName(name)) + val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type] + assert(tc != NoType && tc.takesTypeArgs, "Invalid type constructor: " + tc) + tc + } - /** Substitutes wrappers in tree `t` with the result of `subWrapper`. - * A wrapper is a Tree of the form `f[T](v)` for which isWrapper(, , .target) returns true. - * Typically, `f` is a `Select` or `Ident`. - * The wrapper is replaced with the result of `subWrapper(, , )` */ - def transformWrappers(t: Tree, subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]): Tree = - { - // the main tree transformer that replaces calls to InputWrapper.wrap(x) with - // plain Idents that reference the actual input value - object appTransformer extends Transformer - { - override def transform(tree: Tree): Tree = - tree match { - case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => - subWrapper(nme.decoded, targ.tpe, qual, tree) match { - case Converted.Success(t, finalTx) => - changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150 - finalTx(t) - case Converted.Failure(p,m) => ctx.abort(p, m) - case _: Converted.NotApplicable[_] => super.transform(tree) - } - case _ => super.transform(tree) - } - } - appTransformer.atOwner(initialOwner) { - appTransformer.transform(t) - } - } + /** + * Substitutes wrappers in tree `t` with the result of `subWrapper`. + * A wrapper is a Tree of the form `f[T](v)` for which isWrapper(, , .target) returns true. + * Typically, `f` is a `Select` or `Ident`. + * The wrapper is replaced with the result of `subWrapper(, , )` + */ + def transformWrappers(t: Tree, subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]): Tree = + { + // the main tree transformer that replaces calls to InputWrapper.wrap(x) with + // plain Idents that reference the actual input value + object appTransformer extends Transformer { + override def transform(tree: Tree): Tree = + tree match { + case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) => + subWrapper(nme.decoded, targ.tpe, qual, tree) match { + case Converted.Success(t, finalTx) => + changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150 + finalTx(t) + case Converted.Failure(p, m) => ctx.abort(p, m) + case _: Converted.NotApplicable[_] => super.transform(tree) + } + case _ => super.transform(tree) + } + } + appTransformer.atOwner(initialOwner) { + appTransformer.transform(t) + } + } } diff --git a/util/appmacro/src/main/scala/sbt/appmacro/Convert.scala b/util/appmacro/src/main/scala/sbt/appmacro/Convert.scala index 6dedf776b..3a2e562a6 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/Convert.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/Convert.scala @@ -1,38 +1,37 @@ package sbt package appmacro - import scala.reflect._ - import macros._ - import Types.idFun +import scala.reflect._ +import macros._ +import Types.idFun -abstract class Convert -{ - def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] - def asPredicate(c: Context): (String, c.Type, c.Tree) => Boolean = - (n,tpe,tree) => { - val tag = c.WeakTypeTag(tpe) - apply(c)(n,tree)(tag).isSuccess - } +abstract class Convert { + def apply[T: c.WeakTypeTag](c: Context)(nme: String, in: c.Tree): Converted[c.type] + def asPredicate(c: Context): (String, c.Type, c.Tree) => Boolean = + (n, tpe, tree) => { + val tag = c.WeakTypeTag(tpe) + apply(c)(n, tree)(tag).isSuccess + } } sealed trait Converted[C <: Context with Singleton] { - def isSuccess: Boolean - def transform(f: C#Tree => C#Tree): Converted[C] + def isSuccess: Boolean + def transform(f: C#Tree => C#Tree): Converted[C] } object Converted { - def NotApplicable[C <: Context with Singleton] = new NotApplicable[C] - final case class Failure[C <: Context with Singleton](position: C#Position, message: String) extends Converted[C] { - def isSuccess = false - def transform(f: C#Tree => C#Tree): Converted[C] = new Failure(position, message) - } - final class NotApplicable[C <: Context with Singleton] extends Converted[C] { - def isSuccess = false - def transform(f: C#Tree => C#Tree): Converted[C] = this - } - final case class Success[C <: Context with Singleton](tree: C#Tree, finalTransform: C#Tree => C#Tree) extends Converted[C] { - def isSuccess = true - def transform(f: C#Tree => C#Tree): Converted[C] = Success(f(tree), finalTransform) - } - object Success { - def apply[C <: Context with Singleton](tree: C#Tree): Success[C] = Success(tree, idFun) - } + def NotApplicable[C <: Context with Singleton] = new NotApplicable[C] + final case class Failure[C <: Context with Singleton](position: C#Position, message: String) extends Converted[C] { + def isSuccess = false + def transform(f: C#Tree => C#Tree): Converted[C] = new Failure(position, message) + } + final class NotApplicable[C <: Context with Singleton] extends Converted[C] { + def isSuccess = false + def transform(f: C#Tree => C#Tree): Converted[C] = this + } + final case class Success[C <: Context with Singleton](tree: C#Tree, finalTransform: C#Tree => C#Tree) extends Converted[C] { + def isSuccess = true + def transform(f: C#Tree => C#Tree): Converted[C] = Success(f(tree), finalTransform) + } + object Success { + def apply[C <: Context with Singleton](tree: C#Tree): Success[C] = Success(tree, idFun) + } } \ No newline at end of file diff --git a/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala b/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala index 043ad8731..7a63feca5 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/Instance.scala @@ -1,214 +1,210 @@ package sbt package appmacro - import Classes.Applicative - import Types.Id +import Classes.Applicative +import Types.Id -/** The separate hierarchy from Applicative/Monad is for two reasons. -* -* 1. The type constructor is represented as an abstract type because a TypeTag cannot represent a type constructor directly. -* 2. The applicative interface is uncurried. -*/ -trait Instance -{ - type M[x] - def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit a: AList[K]): M[Z] - def map[S,T](in: M[S], f: S => T): M[T] - def pure[T](t: () => T): M[T] +/** + * The separate hierarchy from Applicative/Monad is for two reasons. + * + * 1. The type constructor is represented as an abstract type because a TypeTag cannot represent a type constructor directly. + * 2. The applicative interface is uncurried. + */ +trait Instance { + type M[x] + def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit a: AList[K]): M[Z] + def map[S, T](in: M[S], f: S => T): M[T] + def pure[T](t: () => T): M[T] } -trait MonadInstance extends Instance -{ - def flatten[T](in: M[M[T]]): M[T] +trait MonadInstance extends Instance { + def flatten[T](in: M[M[T]]): M[T] } - import scala.reflect._ - import macros._ - import reflect.internal.annotations.compileTimeOnly +import scala.reflect._ +import macros._ +import reflect.internal.annotations.compileTimeOnly -object Instance -{ - final val ApplyName = "app" - final val FlattenName = "flatten" - final val PureName = "pure" - final val MapName = "map" - final val InstanceTCName = "M" +object Instance { + final val ApplyName = "app" + final val FlattenName = "flatten" + final val PureName = "pure" + final val MapName = "map" + final val InstanceTCName = "M" - final class Input[U <: Universe with Singleton](val tpe: U#Type, val expr: U#Tree, val local: U#ValDef) - trait Transform[C <: Context with Singleton, N[_]] { - def apply(in: C#Tree): C#Tree - } - def idTransform[C <: Context with Singleton]: Transform[C,Id] = new Transform[C,Id] { - def apply(in: C#Tree): C#Tree = in - } + final class Input[U <: Universe with Singleton](val tpe: U#Type, val expr: U#Tree, val local: U#ValDef) + trait Transform[C <: Context with Singleton, N[_]] { + def apply(in: C#Tree): C#Tree + } + def idTransform[C <: Context with Singleton]: Transform[C, Id] = new Transform[C, Id] { + def apply(in: C#Tree): C#Tree = in + } - /** Implementation of a macro that provides a direct syntax for applicative functors and monads. - * It is intended to be used in conjunction with another macro that conditions the inputs. - * - * This method processes the Tree `t` to find inputs of the form `wrap[T]( input )` - * This form is typically constructed by another macro that pretends to be able to get a value of type `T` - * from a value convertible to `M[T]`. This `wrap(input)` form has two main purposes. - * First, it identifies the inputs that should be transformed. - * Second, it allows the input trees to be wrapped for later conversion into the appropriate `M[T]` type by `convert`. - * This wrapping is necessary because applying the first macro must preserve the original type, - * but it is useful to delay conversion until the outer, second macro is called. The `wrap` method accomplishes this by - * allowing the original `Tree` and `Type` to be hidden behind the raw `T` type. This method will remove the call to `wrap` - * so that it is not actually called at runtime. - * - * Each `input` in each expression of the form `wrap[T]( input )` is transformed by `convert`. - * This transformation converts the input Tree to a Tree of type `M[T]`. - * The original wrapped expression `wrap(input)` is replaced by a reference to a new local `val $x: T`, where `$x` is a fresh name. - * These converted inputs are passed to `builder` as well as the list of these synthetic `ValDef`s. - * The `TupleBuilder` instance constructs a tuple (Tree) from the inputs and defines the right hand side of the vals - * that unpacks the tuple containing the results of the inputs. - * - * The constructed tuple of inputs and the code that unpacks the results of the inputs are then passed to the `i`, - * which is an implementation of `Instance` that is statically accessible. - * An Instance defines a applicative functor associated with a specific type constructor and, if it implements MonadInstance as well, a monad. - * Typically, it will be either a top-level module or a stable member of a top-level module (such as a val or a nested module). - * The `with Singleton` part of the type verifies some cases at macro compilation time, - * while the full check for static accessibility is done at macro expansion time. - * Note: Ideally, the types would verify that `i: MonadInstance` when `t.isRight`. - * With the various dependent types involved, this is not worth it. - * - * The `t` argument is the argument of the macro that will be transformed as described above. - * If the macro that calls this method is for a multi-input map (app followed by map), - * `t` should be the argument wrapped in Left. - * If this is for multi-input flatMap (app followed by flatMap), - * this should be the argument wrapped in Right. - */ - def contImpl[T,N[_]](c: Context, i: Instance with Singleton, convert: Convert, builder: TupleBuilder)(t: Either[c.Expr[T], c.Expr[i.M[T]]], inner: Transform[c.type,N])( - implicit tt: c.WeakTypeTag[T], nt: c.WeakTypeTag[N[T]], it: c.TypeTag[i.type]): c.Expr[i.M[N[T]]] = - { - import c.universe.{Apply=>ApplyTree,_} + /** + * Implementation of a macro that provides a direct syntax for applicative functors and monads. + * It is intended to be used in conjunction with another macro that conditions the inputs. + * + * This method processes the Tree `t` to find inputs of the form `wrap[T]( input )` + * This form is typically constructed by another macro that pretends to be able to get a value of type `T` + * from a value convertible to `M[T]`. This `wrap(input)` form has two main purposes. + * First, it identifies the inputs that should be transformed. + * Second, it allows the input trees to be wrapped for later conversion into the appropriate `M[T]` type by `convert`. + * This wrapping is necessary because applying the first macro must preserve the original type, + * but it is useful to delay conversion until the outer, second macro is called. The `wrap` method accomplishes this by + * allowing the original `Tree` and `Type` to be hidden behind the raw `T` type. This method will remove the call to `wrap` + * so that it is not actually called at runtime. + * + * Each `input` in each expression of the form `wrap[T]( input )` is transformed by `convert`. + * This transformation converts the input Tree to a Tree of type `M[T]`. + * The original wrapped expression `wrap(input)` is replaced by a reference to a new local `val $x: T`, where `$x` is a fresh name. + * These converted inputs are passed to `builder` as well as the list of these synthetic `ValDef`s. + * The `TupleBuilder` instance constructs a tuple (Tree) from the inputs and defines the right hand side of the vals + * that unpacks the tuple containing the results of the inputs. + * + * The constructed tuple of inputs and the code that unpacks the results of the inputs are then passed to the `i`, + * which is an implementation of `Instance` that is statically accessible. + * An Instance defines a applicative functor associated with a specific type constructor and, if it implements MonadInstance as well, a monad. + * Typically, it will be either a top-level module or a stable member of a top-level module (such as a val or a nested module). + * The `with Singleton` part of the type verifies some cases at macro compilation time, + * while the full check for static accessibility is done at macro expansion time. + * Note: Ideally, the types would verify that `i: MonadInstance` when `t.isRight`. + * With the various dependent types involved, this is not worth it. + * + * The `t` argument is the argument of the macro that will be transformed as described above. + * If the macro that calls this method is for a multi-input map (app followed by map), + * `t` should be the argument wrapped in Left. + * If this is for multi-input flatMap (app followed by flatMap), + * this should be the argument wrapped in Right. + */ + def contImpl[T, N[_]](c: Context, i: Instance with Singleton, convert: Convert, builder: TupleBuilder)(t: Either[c.Expr[T], c.Expr[i.M[T]]], inner: Transform[c.type, N])( + implicit tt: c.WeakTypeTag[T], nt: c.WeakTypeTag[N[T]], it: c.TypeTag[i.type]): c.Expr[i.M[N[T]]] = + { + import c.universe.{ Apply => ApplyTree, _ } - val util = ContextUtil[c.type](c) - val mTC: Type = util.extractTC(i, InstanceTCName) - val mttpe: Type = appliedType(mTC, nt.tpe :: Nil).normalize + val util = ContextUtil[c.type](c) + val mTC: Type = util.extractTC(i, InstanceTCName) + val mttpe: Type = appliedType(mTC, nt.tpe :: Nil).normalize - // the tree for the macro argument - val (tree, treeType) = t match { - case Left(l) => (l.tree, nt.tpe.normalize) - case Right(r) => (r.tree, mttpe) - } - // the Symbol for the anonymous function passed to the appropriate Instance.map/flatMap/pure method - // this Symbol needs to be known up front so that it can be used as the owner of synthetic vals - val functionSym = util.functionSymbol(tree.pos) + // the tree for the macro argument + val (tree, treeType) = t match { + case Left(l) => (l.tree, nt.tpe.normalize) + case Right(r) => (r.tree, mttpe) + } + // the Symbol for the anonymous function passed to the appropriate Instance.map/flatMap/pure method + // this Symbol needs to be known up front so that it can be used as the owner of synthetic vals + val functionSym = util.functionSymbol(tree.pos) - val instanceSym = util.singleton(i) - // A Tree that references the statically accessible Instance that provides the actual implementations of map, flatMap, ... - val instance = Ident(instanceSym) + val instanceSym = util.singleton(i) + // A Tree that references the statically accessible Instance that provides the actual implementations of map, flatMap, ... + val instance = Ident(instanceSym) - val isWrapper: (String, Type, Tree) => Boolean = convert.asPredicate(c) + val isWrapper: (String, Type, Tree) => Boolean = convert.asPredicate(c) - // Local definitions `defs` in the macro. This is used to ensure references are to M instances defined outside of the macro call. - // Also `refCount` is the number of references, which is used to create the private, synthetic method containing the body - val defs = util.collectDefs(tree, isWrapper) - val checkQual: Tree => Unit = util.checkReferences(defs, isWrapper) + // Local definitions `defs` in the macro. This is used to ensure references are to M instances defined outside of the macro call. + // Also `refCount` is the number of references, which is used to create the private, synthetic method containing the body + val defs = util.collectDefs(tree, isWrapper) + val checkQual: Tree => Unit = util.checkReferences(defs, isWrapper) - type In = Input[c.universe.type] - var inputs = List[In]() + type In = Input[c.universe.type] + var inputs = List[In]() + // transforms the original tree into calls to the Instance functions pure, map, ..., + // resulting in a value of type M[T] + def makeApp(body: Tree): Tree = + inputs match { + case Nil => pure(body) + case x :: Nil => single(body, x) + case xs => arbArity(body, xs) + } - // transforms the original tree into calls to the Instance functions pure, map, ..., - // resulting in a value of type M[T] - def makeApp(body: Tree): Tree = - inputs match { - case Nil => pure(body) - case x :: Nil => single(body, x) - case xs => arbArity(body, xs) - } + // no inputs, so construct M[T] via Instance.pure or pure+flatten + def pure(body: Tree): Tree = + { + val typeApplied = TypeApply(util.select(instance, PureName), TypeTree(treeType) :: Nil) + val f = util.createFunction(Nil, body, functionSym) + val p = ApplyTree(typeApplied, f :: Nil) + if (t.isLeft) p else flatten(p) + } + // m should have type M[M[T]] + // the returned Tree will have type M[T] + def flatten(m: Tree): Tree = + { + val typedFlatten = TypeApply(util.select(instance, FlattenName), TypeTree(tt.tpe) :: Nil) + ApplyTree(typedFlatten, m :: Nil) + } - // no inputs, so construct M[T] via Instance.pure or pure+flatten - def pure(body: Tree): Tree = - { - val typeApplied = TypeApply(util.select(instance, PureName), TypeTree(treeType) :: Nil) - val f = util.createFunction(Nil, body, functionSym) - val p = ApplyTree(typeApplied, f :: Nil) - if(t.isLeft) p else flatten(p) - } - // m should have type M[M[T]] - // the returned Tree will have type M[T] - def flatten(m: Tree): Tree = - { - val typedFlatten = TypeApply(util.select(instance, FlattenName), TypeTree(tt.tpe) :: Nil) - ApplyTree(typedFlatten, m :: Nil) - } + // calls Instance.map or flatmap directly, skipping the intermediate Instance.app that is unnecessary for a single input + def single(body: Tree, input: In): Tree = + { + val variable = input.local + val param = treeCopy.ValDef(variable, util.parameterModifiers, variable.name, variable.tpt, EmptyTree) + val typeApplied = TypeApply(util.select(instance, MapName), variable.tpt :: TypeTree(treeType) :: Nil) + val f = util.createFunction(param :: Nil, body, functionSym) + val mapped = ApplyTree(typeApplied, input.expr :: f :: Nil) + if (t.isLeft) mapped else flatten(mapped) + } - // calls Instance.map or flatmap directly, skipping the intermediate Instance.app that is unnecessary for a single input - def single(body: Tree, input: In): Tree = - { - val variable = input.local - val param = treeCopy.ValDef(variable, util.parameterModifiers, variable.name, variable.tpt, EmptyTree) - val typeApplied = TypeApply(util.select(instance, MapName), variable.tpt :: TypeTree(treeType) :: Nil) - val f = util.createFunction(param :: Nil, body, functionSym) - val mapped = ApplyTree(typeApplied, input.expr :: f :: Nil) - if(t.isLeft) mapped else flatten(mapped) - } + // calls Instance.app to get the values for all inputs and then calls Instance.map or flatMap to evaluate the body + def arbArity(body: Tree, inputs: List[In]): Tree = + { + val result = builder.make(c)(mTC, inputs) + val param = util.freshMethodParameter(appliedType(result.representationC, util.idTC :: Nil)) + val bindings = result.extract(param) + val f = util.createFunction(param :: Nil, Block(bindings, body), functionSym) + val ttt = TypeTree(treeType) + val typedApp = TypeApply(util.select(instance, ApplyName), TypeTree(result.representationC) :: ttt :: Nil) + val app = ApplyTree(ApplyTree(typedApp, result.input :: f :: Nil), result.alistInstance :: Nil) + if (t.isLeft) app else flatten(app) + } - // calls Instance.app to get the values for all inputs and then calls Instance.map or flatMap to evaluate the body - def arbArity(body: Tree, inputs: List[In]): Tree = - { - val result = builder.make(c)(mTC, inputs) - val param = util.freshMethodParameter( appliedType(result.representationC, util.idTC :: Nil) ) - val bindings = result.extract(param) - val f = util.createFunction(param :: Nil, Block(bindings, body), functionSym) - val ttt = TypeTree(treeType) - val typedApp = TypeApply(util.select(instance, ApplyName), TypeTree(result.representationC) :: ttt :: Nil) - val app = ApplyTree(ApplyTree(typedApp, result.input :: f :: Nil), result.alistInstance :: Nil) - if(t.isLeft) app else flatten(app) - } + // Called when transforming the tree to add an input. + // For `qual` of type M[A], and a `selection` qual.value, + // the call is addType(Type A, Tree qual) + // The result is a Tree representing a reference to + // the bound value of the input. + def addType(tpe: Type, qual: Tree, selection: Tree): Tree = + { + qual.foreach(checkQual) + val vd = util.freshValDef(tpe, qual.pos, functionSym) + inputs ::= new Input(tpe, qual, vd) + util.refVal(selection, vd) + } + def sub(name: String, tpe: Type, qual: Tree, replace: Tree): Converted[c.type] = + { + val tag = c.WeakTypeTag[T](tpe) + convert[T](c)(name, qual)(tag) transform { tree => + addType(tpe, tree, replace) + } + } - // Called when transforming the tree to add an input. - // For `qual` of type M[A], and a `selection` qual.value, - // the call is addType(Type A, Tree qual) - // The result is a Tree representing a reference to - // the bound value of the input. - def addType(tpe: Type, qual: Tree, selection: Tree): Tree = - { - qual.foreach(checkQual) - val vd = util.freshValDef(tpe, qual.pos, functionSym) - inputs ::= new Input(tpe, qual, vd) - util.refVal(selection, vd) - } - def sub(name: String, tpe: Type, qual: Tree, replace: Tree): Converted[c.type] = - { - val tag = c.WeakTypeTag[T](tpe) - convert[T](c)(name, qual)(tag) transform { tree => - addType(tpe, tree, replace) - } - } + // applies the transformation + val tx = util.transformWrappers(tree, (n, tpe, t, replace) => sub(n, tpe, t, replace)) + // resetting attributes must be: a) local b) done here and not wider or else there are obscure errors + val tr = makeApp(inner(tx)) + c.Expr[i.M[N[T]]](tr) + } - // applies the transformation - val tx = util.transformWrappers(tree, (n,tpe,t,replace) => sub(n,tpe,t,replace)) - // resetting attributes must be: a) local b) done here and not wider or else there are obscure errors - val tr = makeApp( inner(tx) ) - c.Expr[i.M[N[T]]](tr) - } + import Types._ - import Types._ + implicit def applicativeInstance[A[_]](implicit ap: Applicative[A]): Instance { type M[x] = A[x] } = new Instance { + type M[x] = A[x] + def app[K[L[x]], Z](in: K[A], f: K[Id] => Z)(implicit a: AList[K]) = a.apply[A, Z](in, f) + def map[S, T](in: A[S], f: S => T) = ap.map(f, in) + def pure[S](s: () => S): M[S] = ap.pure(s()) + } - implicit def applicativeInstance[A[_]](implicit ap: Applicative[A]): Instance { type M[x] = A[x] } = new Instance - { - type M[x] = A[x] - def app[ K[L[x]], Z ](in: K[A], f: K[Id] => Z)(implicit a: AList[K]) = a.apply[A,Z](in, f) - def map[S,T](in: A[S], f: S => T) = ap.map(f, in) - def pure[S](s: () => S): M[S] = ap.pure(s()) - } - - type AI[A[_]] = Instance { type M[x] = A[x] } - def compose[A[_], B[_]](implicit a: AI[A], b: AI[B]): Instance { type M[x] = A[B[x]] } = new Composed[A,B](a,b) - // made a public, named, unsealed class because of trouble with macros and inference when the Instance is not an object - class Composed[A[_], B[_]](a: AI[A], b: AI[B]) extends Instance - { - type M[x] = A[B[x]] - def pure[S](s: () => S): A[B[S]] = a.pure(() => b.pure(s)) - def map[S,T](in: M[S], f: S => T): M[T] = a.map(in, (bv: B[S]) => b.map(bv, f)) - def app[ K[L[x]], Z ](in: K[M], f: K[Id] => Z)(implicit alist: AList[K]): A[B[Z]] = - { - val g: K[B] => B[Z] = in => b.app[K, Z](in, f) - type Split[ L[x] ] = K[ (L ∙ B)#l ] - a.app[Split, B[Z]](in, g)(AList.asplit(alist)) - } - } + type AI[A[_]] = Instance { type M[x] = A[x] } + def compose[A[_], B[_]](implicit a: AI[A], b: AI[B]): Instance { type M[x] = A[B[x]] } = new Composed[A, B](a, b) + // made a public, named, unsealed class because of trouble with macros and inference when the Instance is not an object + class Composed[A[_], B[_]](a: AI[A], b: AI[B]) extends Instance { + type M[x] = A[B[x]] + def pure[S](s: () => S): A[B[S]] = a.pure(() => b.pure(s)) + def map[S, T](in: M[S], f: S => T): M[T] = a.map(in, (bv: B[S]) => b.map(bv, f)) + def app[K[L[x]], Z](in: K[M], f: K[Id] => Z)(implicit alist: AList[K]): A[B[Z]] = + { + val g: K[B] => B[Z] = in => b.app[K, Z](in, f) + type Split[L[x]] = K[(L ∙ B)#l] + a.app[Split, B[Z]](in, g)(AList.asplit(alist)) + } + } } diff --git a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala index d9dbebe42..b5c2878f3 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/KListBuilder.scala @@ -1,72 +1,71 @@ package sbt package appmacro - import Types.Id - import scala.tools.nsc.Global - import scala.reflect._ - import macros._ +import Types.Id +import scala.tools.nsc.Global +import scala.reflect._ +import macros._ /** A `TupleBuilder` that uses a KList as the tuple representation.*/ -object KListBuilder extends TupleBuilder -{ - // TODO 2.11 Remove this after dropping 2.10.x support. - private object HasCompat { val compat = ??? }; import HasCompat._ +object KListBuilder extends TupleBuilder { + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ - def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] - { - val ctx: c.type = c - val util = ContextUtil[c.type](c) - import c.universe.{Apply=>ApplyTree,_} - import compat._ - import util._ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { + val ctx: c.type = c + val util = ContextUtil[c.type](c) + import c.universe.{ Apply => ApplyTree, _ } + import compat._ + import util._ - val knilType = c.typeOf[KNil] - val knil = Ident(knilType.typeSymbol.companionSymbol) - val kconsTpe = c.typeOf[KCons[Int,KNil,List]] - val kcons = kconsTpe.typeSymbol.companionSymbol - val mTC: Type = mt.asInstanceOf[c.universe.Type] - val kconsTC: Type = kconsTpe.typeConstructor + val knilType = c.typeOf[KNil] + val knil = Ident(knilType.typeSymbol.companionSymbol) + val kconsTpe = c.typeOf[KCons[Int, KNil, List]] + val kcons = kconsTpe.typeSymbol.companionSymbol + val mTC: Type = mt.asInstanceOf[c.universe.Type] + val kconsTC: Type = kconsTpe.typeConstructor - /** This is the L in the type function [L[x]] ... */ - val tcVariable: TypeSymbol = newTCVariable(util.initialOwner) + /** This is the L in the type function [L[x]] ... */ + val tcVariable: TypeSymbol = newTCVariable(util.initialOwner) - /** Instantiates KCons[h, t <: KList[L], L], where L is the type constructor variable */ - def kconsType(h: Type, t: Type): Type = - appliedType(kconsTC, h :: t :: refVar(tcVariable) :: Nil) + /** Instantiates KCons[h, t <: KList[L], L], where L is the type constructor variable */ + def kconsType(h: Type, t: Type): Type = + appliedType(kconsTC, h :: t :: refVar(tcVariable) :: Nil) - def bindKList(prev: ValDef, revBindings: List[ValDef], params: List[ValDef]): List[ValDef] = - params match - { - case (x @ ValDef(mods, name, tpt, _)) :: xs => - val rhs = select(Ident(prev.name), "head") - val head = treeCopy.ValDef(x, mods, name, tpt, rhs) - util.setSymbol(head, x.symbol) - val tail = localValDef(TypeTree(), select(Ident(prev.name), "tail")) - val base = head :: revBindings - bindKList(tail, if(xs.isEmpty) base else tail :: base, xs) - case Nil => revBindings.reverse - } + def bindKList(prev: ValDef, revBindings: List[ValDef], params: List[ValDef]): List[ValDef] = + params match { + case (x @ ValDef(mods, name, tpt, _)) :: xs => + val rhs = select(Ident(prev.name), "head") + val head = treeCopy.ValDef(x, mods, name, tpt, rhs) + util.setSymbol(head, x.symbol) + val tail = localValDef(TypeTree(), select(Ident(prev.name), "tail")) + val base = head :: revBindings + bindKList(tail, if (xs.isEmpty) base else tail :: base, xs) + case Nil => revBindings.reverse + } - private[this] def makeKList(revInputs: Inputs[c.universe.type], klist: Tree, klistType: Type): Tree = - revInputs match { - case in :: tail => - val next = ApplyTree(TypeApply(Ident(kcons), TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil), in.expr :: klist :: Nil) - makeKList(tail, next, appliedType(kconsTC, in.tpe :: klistType :: mTC :: Nil)) - case Nil => klist - } + private[this] def makeKList(revInputs: Inputs[c.universe.type], klist: Tree, klistType: Type): Tree = + revInputs match { + case in :: tail => + val next = ApplyTree(TypeApply(Ident(kcons), TypeTree(in.tpe) :: TypeTree(klistType) :: TypeTree(mTC) :: Nil), in.expr :: klist :: Nil) + makeKList(tail, next, appliedType(kconsTC, in.tpe :: klistType :: mTC :: Nil)) + case Nil => klist + } - /** The input trees combined in a KList */ - val klist = makeKList(inputs.reverse, knil, knilType) + /** The input trees combined in a KList */ + val klist = makeKList(inputs.reverse, knil, knilType) - /** The input types combined in a KList type. The main concern is tracking the heterogeneous types. - * The type constructor is tcVariable, so that it can be applied to [X] X or M later. - * When applied to `M`, this type gives the type of the `input` KList. */ - val klistType: Type = (inputs :\ knilType)( (in, klist) => kconsType(in.tpe, klist) ) + /** + * The input types combined in a KList type. The main concern is tracking the heterogeneous types. + * The type constructor is tcVariable, so that it can be applied to [X] X or M later. + * When applied to `M`, this type gives the type of the `input` KList. + */ + val klistType: Type = (inputs :\ knilType)((in, klist) => kconsType(in.tpe, klist)) - val representationC = PolyType(tcVariable :: Nil, klistType) - val resultType = appliedType(representationC, idTC :: Nil) - val input = klist - val alistInstance: ctx.universe.Tree = TypeApply(select(Ident(alist), "klist"), TypeTree(representationC) :: Nil) - def extract(param: ValDef) = bindKList(param, Nil, inputs.map(_.local)) - } + val representationC = PolyType(tcVariable :: Nil, klistType) + val resultType = appliedType(representationC, idTC :: Nil) + val input = klist + val alistInstance: ctx.universe.Tree = TypeApply(select(Ident(alist), "klist"), TypeTree(representationC) :: Nil) + def extract(param: ValDef) = bindKList(param, Nil, inputs.map(_.local)) + } } diff --git a/util/appmacro/src/main/scala/sbt/appmacro/MixedBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/MixedBuilder.scala index e58adb2b0..019dc8b20 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/MixedBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/MixedBuilder.scala @@ -1,16 +1,17 @@ package sbt package appmacro - import scala.reflect._ - import macros._ +import scala.reflect._ +import macros._ -/** A builder that uses `TupleN` as the representation for small numbers of inputs (up to `TupleNBuilder.MaxInputs`) -* and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs.*/ -object MixedBuilder extends TupleBuilder -{ - def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = - { - val delegate = if(inputs.size > TupleNBuilder.MaxInputs) KListBuilder else TupleNBuilder - delegate.make(c)(mt, inputs) - } +/** + * A builder that uses `TupleN` as the representation for small numbers of inputs (up to `TupleNBuilder.MaxInputs`) + * and `KList` for larger numbers of inputs. This builder cannot handle fewer than 2 inputs. + */ +object MixedBuilder extends TupleBuilder { + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = + { + val delegate = if (inputs.size > TupleNBuilder.MaxInputs) KListBuilder else TupleNBuilder + delegate.make(c)(mt, inputs) + } } \ No newline at end of file diff --git a/util/appmacro/src/main/scala/sbt/appmacro/TupleBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/TupleBuilder.scala index f6442cb02..a6ea2d84c 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/TupleBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/TupleBuilder.scala @@ -1,56 +1,57 @@ package sbt package appmacro - import Types.Id - import scala.tools.nsc.Global - import scala.reflect._ - import macros._ +import Types.Id +import scala.tools.nsc.Global +import scala.reflect._ +import macros._ -/** -* A `TupleBuilder` abstracts the work of constructing a tuple data structure such as a `TupleN` or `KList` -* and extracting values from it. The `Instance` macro implementation will (roughly) traverse the tree of its argument -* and ultimately obtain a list of expressions with type `M[T]` for different types `T`. -* The macro constructs an `Input` value for each of these expressions that contains the `Type` for `T`, -* the `Tree` for the expression, and a `ValDef` that will hold the value for the input. -* -* `TupleBuilder.apply` is provided with the list of `Input`s and is expected to provide three values in the returned BuilderResult. -* First, it returns the constructed tuple data structure Tree in `input`. -* Next, it provides the type constructor `representationC` that, when applied to M, gives the type of tuple data structure. -* For example, a builder that constructs a `Tuple3` for inputs `M[Int]`, `M[Boolean]`, and `M[String]` -* would provide a Type representing `[L[x]] (L[Int], L[Boolean], L[String])`. The `input` method -* would return a value whose type is that type constructor applied to M, or `(M[Int], M[Boolean], M[String])`. -* -* Finally, the `extract` method provides a list of vals that extract information from the applied input. -* The type of the applied input is the type constructor applied to `Id` (`[X] X`). -* The returned list of ValDefs should be the ValDefs from `inputs`, but with non-empty right-hand sides. -*/ +/** + * A `TupleBuilder` abstracts the work of constructing a tuple data structure such as a `TupleN` or `KList` + * and extracting values from it. The `Instance` macro implementation will (roughly) traverse the tree of its argument + * and ultimately obtain a list of expressions with type `M[T]` for different types `T`. + * The macro constructs an `Input` value for each of these expressions that contains the `Type` for `T`, + * the `Tree` for the expression, and a `ValDef` that will hold the value for the input. + * + * `TupleBuilder.apply` is provided with the list of `Input`s and is expected to provide three values in the returned BuilderResult. + * First, it returns the constructed tuple data structure Tree in `input`. + * Next, it provides the type constructor `representationC` that, when applied to M, gives the type of tuple data structure. + * For example, a builder that constructs a `Tuple3` for inputs `M[Int]`, `M[Boolean]`, and `M[String]` + * would provide a Type representing `[L[x]] (L[Int], L[Boolean], L[String])`. The `input` method + * would return a value whose type is that type constructor applied to M, or `(M[Int], M[Boolean], M[String])`. + * + * Finally, the `extract` method provides a list of vals that extract information from the applied input. + * The type of the applied input is the type constructor applied to `Id` (`[X] X`). + * The returned list of ValDefs should be the ValDefs from `inputs`, but with non-empty right-hand sides. + */ trait TupleBuilder { - /** A convenience alias for a list of inputs (associated with a Universe of type U). */ - type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]] + /** A convenience alias for a list of inputs (associated with a Universe of type U). */ + type Inputs[U <: Universe with Singleton] = List[Instance.Input[U]] - /** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */ - def make(c: Context)(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] + /** Constructs a one-time use Builder for Context `c` and type constructor `tcType`. */ + def make(c: Context)(tcType: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] } -trait BuilderResult[C <: Context with Singleton] -{ - val ctx: C - import ctx.universe._ +trait BuilderResult[C <: Context with Singleton] { + val ctx: C + import ctx.universe._ - /** Represents the higher-order type constructor `[L[x]] ...` where `...` is the - * type of the data structure containing the added expressions, - * except that it is abstracted over the type constructor applied to each heterogeneous part of the type . */ - def representationC: PolyType + /** + * Represents the higher-order type constructor `[L[x]] ...` where `...` is the + * type of the data structure containing the added expressions, + * except that it is abstracted over the type constructor applied to each heterogeneous part of the type . + */ + def representationC: PolyType - /** The instance of AList for the input. For a `representationC` of `[L[x]]`, this `Tree` should have a `Type` of `AList[L]`*/ - def alistInstance: Tree + /** The instance of AList for the input. For a `representationC` of `[L[x]]`, this `Tree` should have a `Type` of `AList[L]`*/ + def alistInstance: Tree - /** Returns the completed value containing all expressions added to the builder. */ - def input: Tree + /** Returns the completed value containing all expressions added to the builder. */ + def input: Tree - /* The list of definitions that extract values from a value of type `$representationC[Id]`. + /* The list of definitions that extract values from a value of type `$representationC[Id]`. * The returned value should be identical to the `ValDef`s provided to the `TupleBuilder.make` method but with * non-empty right hand sides. Each `ValDef` may refer to `param` and previous `ValDef`s in the list.*/ - def extract(param: ValDef): List[ValDef] + def extract(param: ValDef): List[ValDef] } diff --git a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala index 28fa581a4..232174c81 100644 --- a/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala +++ b/util/appmacro/src/main/scala/sbt/appmacro/TupleNBuilder.scala @@ -1,57 +1,56 @@ package sbt package appmacro - import Types.Id - import scala.tools.nsc.Global - import scala.reflect._ - import macros._ +import Types.Id +import scala.tools.nsc.Global +import scala.reflect._ +import macros._ -/** A builder that uses a TupleN as the tuple representation. -* It is limited to tuples of size 2 to `MaxInputs`. */ -object TupleNBuilder extends TupleBuilder -{ - /** The largest number of inputs that this builder can handle. */ - final val MaxInputs = 11 - final val TupleMethodName = "tuple" +/** + * A builder that uses a TupleN as the tuple representation. + * It is limited to tuples of size 2 to `MaxInputs`. + */ +object TupleNBuilder extends TupleBuilder { + /** The largest number of inputs that this builder can handle. */ + final val MaxInputs = 11 + final val TupleMethodName = "tuple" - // TODO 2.11 Remove this after dropping 2.10.x support. - private object HasCompat { val compat = ??? }; import HasCompat._ + // TODO 2.11 Remove this after dropping 2.10.x support. + private object HasCompat { val compat = ??? }; import HasCompat._ - def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] - { - val util = ContextUtil[c.type](c) - import c.universe.{Apply=>ApplyTree,_} - import compat._ - import util._ + def make(c: Context)(mt: c.Type, inputs: Inputs[c.universe.type]): BuilderResult[c.type] = new BuilderResult[c.type] { + val util = ContextUtil[c.type](c) + import c.universe.{ Apply => ApplyTree, _ } + import compat._ + import util._ - val global: Global = c.universe.asInstanceOf[Global] - val mTC: Type = mt.asInstanceOf[c.universe.Type] + val global: Global = c.universe.asInstanceOf[Global] + val mTC: Type = mt.asInstanceOf[c.universe.Type] - val ctx: c.type = c - val representationC: PolyType = { - val tcVariable: Symbol = newTCVariable(util.initialOwner) - val tupleTypeArgs = inputs.map(in => typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]) - val tuple = global.definitions.tupleType(tupleTypeArgs) - PolyType(tcVariable :: Nil, tuple.asInstanceOf[Type] ) - } - val resultType = appliedType(representationC, idTC :: Nil) + val ctx: c.type = c + val representationC: PolyType = { + val tcVariable: Symbol = newTCVariable(util.initialOwner) + val tupleTypeArgs = inputs.map(in => typeRef(NoPrefix, tcVariable, in.tpe :: Nil).asInstanceOf[global.Type]) + val tuple = global.definitions.tupleType(tupleTypeArgs) + PolyType(tcVariable :: Nil, tuple.asInstanceOf[Type]) + } + val resultType = appliedType(representationC, idTC :: Nil) - val input: Tree = mkTuple(inputs.map(_.expr)) - val alistInstance: Tree = { - val selectTree = select(Ident(alist), TupleMethodName + inputs.size.toString) - TypeApply(selectTree, inputs.map(in => TypeTree(in.tpe))) - } - def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1) + val input: Tree = mkTuple(inputs.map(_.expr)) + val alistInstance: Tree = { + val selectTree = select(Ident(alist), TupleMethodName + inputs.size.toString) + TypeApply(selectTree, inputs.map(in => TypeTree(in.tpe))) + } + def extract(param: ValDef): List[ValDef] = bindTuple(param, Nil, inputs.map(_.local), 1) - def bindTuple(param: ValDef, revBindings: List[ValDef], params: List[ValDef], i: Int): List[ValDef] = - params match - { - case (x @ ValDef(mods, name, tpt, _)) :: xs => - val rhs = select(Ident(param.name), "_" + i.toString) - val newVal = treeCopy.ValDef(x, mods, name, tpt, rhs) - util.setSymbol(newVal, x.symbol) - bindTuple(param, newVal :: revBindings, xs, i+1) - case Nil => revBindings.reverse - } - } + def bindTuple(param: ValDef, revBindings: List[ValDef], params: List[ValDef], i: Int): List[ValDef] = + params match { + case (x @ ValDef(mods, name, tpt, _)) :: xs => + val rhs = select(Ident(param.name), "_" + i.toString) + val newVal = treeCopy.ValDef(x, mods, name, tpt, rhs) + util.setSymbol(newVal, x.symbol) + bindTuple(param, newVal :: revBindings, xs, i + 1) + case Nil => revBindings.reverse + } + } } diff --git a/util/collection/src/main/scala/sbt/AList.scala b/util/collection/src/main/scala/sbt/AList.scala index 1bc361e0d..10e1454e7 100644 --- a/util/collection/src/main/scala/sbt/AList.scala +++ b/util/collection/src/main/scala/sbt/AList.scala @@ -1,217 +1,212 @@ package sbt - import Classes.Applicative - import Types._ +import Classes.Applicative +import Types._ -/** An abstraction over a higher-order type constructor `K[x[y]]` with the purpose of abstracting -* over heterogeneous sequences like `KList` and `TupleN` with elements with a common type -* constructor as well as homogeneous sequences `Seq[M[T]]`. */ -trait AList[K[L[x]] ] -{ - def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N] - def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[K[P]] - def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A +/** + * An abstraction over a higher-order type constructor `K[x[y]]` with the purpose of abstracting + * over heterogeneous sequences like `KList` and `TupleN` with elements with a common type + * constructor as well as homogeneous sequences `Seq[M[T]]`. + */ +trait AList[K[L[x]]] { + def transform[M[_], N[_]](value: K[M], f: M ~> N): K[N] + def traverse[M[_], N[_], P[_]](value: K[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[K[P]] + def foldr[M[_], A](value: K[M], f: (M[_], A) => A, init: A): A - def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil) - def apply[M[_], C](value: K[M], f: K[Id] => C)(implicit a: Applicative[M]): M[C] = - a.map(f, traverse[M, M, Id](value, idK[M])(a)) + def toList[M[_]](value: K[M]): List[M[_]] = foldr[M, List[M[_]]](value, _ :: _, Nil) + def apply[M[_], C](value: K[M], f: K[Id] => C)(implicit a: Applicative[M]): M[C] = + a.map(f, traverse[M, M, Id](value, idK[M])(a)) } -object AList -{ - type Empty = AList[({ type l[L[x]] = Unit})#l] - /** AList for Unit, which represents a sequence that is always empty.*/ - val empty: Empty = new Empty { - def transform[M[_], N[_]](in: Unit, f: M ~> N) = () - def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init - override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = app.pure( f( () ) ) - def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure( () ) - } +object AList { + type Empty = AList[({ type l[L[x]] = Unit })#l] + /** AList for Unit, which represents a sequence that is always empty.*/ + val empty: Empty = new Empty { + def transform[M[_], N[_]](in: Unit, f: M ~> N) = () + def foldr[M[_], T](in: Unit, f: (M[_], T) => T, init: T) = init + override def apply[M[_], C](in: Unit, f: Unit => C)(implicit app: Applicative[M]): M[C] = app.pure(f(())) + def traverse[M[_], N[_], P[_]](in: Unit, f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Unit] = np.pure(()) + } - type SeqList[T] = AList[({ type l[L[x]] = List[L[T]] })#l] - /** AList for a homogeneous sequence. */ - def seq[T]: SeqList[T] = new SeqList[T] - { - def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T]) - def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)( (t, m) => f(m,t)) - override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = - { - def loop[V](in: List[M[T]], g: List[T] => V): M[V] = - in match { - case Nil => ap.pure(g(Nil)) - case x :: xs => - val h = (ts: List[T]) => (t: T) => g(t :: ts) - ap.apply( loop(xs, h), x ) - } - loop(s, f) - } - def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ??? - } - - /** AList for the abitrary arity data structure KList. */ - def klist[KL[M[_]] <: KList[M] { type Transform[N[_]] = KL[N] }]: AList[KL] = new AList[KL] { - def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f) - def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init) - override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app) - def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N,P](f)(np) - override def toList[M[_]](k: KL[M]) = k.toList - } - - /** AList for a single value. */ - type Single[A] = AList[({ type l[L[x]] = L[A]})#l] - def single[A]: Single[A] = new Single[A] { - def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a) - def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init) - def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a) - } - - type ASplit[K[L[x]], B[x]] = AList[ ({ type l[L[x]] = K[ (L ∙ B)#l] })#l ] - /** AList that operates on the outer type constructor `A` of a composition `[x] A[B[x]]` for type constructors `A` and `B`*/ - def asplit[ K[L[x]], B[x] ](base: AList[K]): ASplit[K,B] = new ASplit[K, B] - { - type Split[ L[x] ] = K[ (L ∙ B)#l ] - def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] = - base.transform[(M ∙ B)#l, (N ∙ B)#l](value, nestCon[M,N,B](f)) - - def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Split[P]] = + type SeqList[T] = AList[({ type l[L[x]] = List[L[T]] })#l] + /** AList for a homogeneous sequence. */ + def seq[T]: SeqList[T] = new SeqList[T] { + def transform[M[_], N[_]](s: List[M[T]], f: M ~> N) = s.map(f.fn[T]) + def foldr[M[_], A](s: List[M[T]], f: (M[_], A) => A, init: A): A = (init /: s.reverse)((t, m) => f(m, t)) + override def apply[M[_], C](s: List[M[T]], f: List[T] => C)(implicit ap: Applicative[M]): M[C] = { - val g = nestCon[M, (N ∙ P)#l, B](f) - base.traverse[(M ∙ B)#l, N, (P ∙ B)#l](value, g)(np) + def loop[V](in: List[M[T]], g: List[T] => V): M[V] = + in match { + case Nil => ap.pure(g(Nil)) + case x :: xs => + val h = (ts: List[T]) => (t: T) => g(t :: ts) + ap.apply(loop(xs, h), x) + } + loop(s, f) + } + def traverse[M[_], N[_], P[_]](s: List[M[T]], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[List[P[T]]] = ??? + } + + /** AList for the abitrary arity data structure KList. */ + def klist[KL[M[_]] <: KList[M] { type Transform[N[_]] = KL[N] }]: AList[KL] = new AList[KL] { + def transform[M[_], N[_]](k: KL[M], f: M ~> N) = k.transform(f) + def foldr[M[_], T](k: KL[M], f: (M[_], T) => T, init: T): T = k.foldr(f, init) + override def apply[M[_], C](k: KL[M], f: KL[Id] => C)(implicit app: Applicative[M]): M[C] = k.apply(f)(app) + def traverse[M[_], N[_], P[_]](k: KL[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KL[P]] = k.traverse[N, P](f)(np) + override def toList[M[_]](k: KL[M]) = k.toList + } + + /** AList for a single value. */ + type Single[A] = AList[({ type l[L[x]] = L[A] })#l] + def single[A]: Single[A] = new Single[A] { + def transform[M[_], N[_]](a: M[A], f: M ~> N) = f(a) + def foldr[M[_], T](a: M[A], f: (M[_], T) => T, init: T): T = f(a, init) + def traverse[M[_], N[_], P[_]](a: M[A], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[P[A]] = f(a) + } + + type ASplit[K[L[x]], B[x]] = AList[({ type l[L[x]] = K[(L ∙ B)#l] })#l] + /** AList that operates on the outer type constructor `A` of a composition `[x] A[B[x]]` for type constructors `A` and `B`*/ + def asplit[K[L[x]], B[x]](base: AList[K]): ASplit[K, B] = new ASplit[K, B] { + type Split[L[x]] = K[(L ∙ B)#l] + def transform[M[_], N[_]](value: Split[M], f: M ~> N): Split[N] = + base.transform[(M ∙ B)#l, (N ∙ B)#l](value, nestCon[M, N, B](f)) + + def traverse[M[_], N[_], P[_]](value: Split[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Split[P]] = + { + val g = nestCon[M, (N ∙ P)#l, B](f) + base.traverse[(M ∙ B)#l, N, (P ∙ B)#l](value, g)(np) } - def foldr[M[_], A](value: Split[M], f: (M[_], A) => A, init: A): A = - base.foldr[(M ∙ B)#l, A](value, f, init) - } + def foldr[M[_], A](value: Split[M], f: (M[_], A) => A, init: A): A = + base.foldr[(M ∙ B)#l, A](value, f, init) + } - // TODO: auto-generate - sealed trait T2K[A,B] { type l[L[x]] = (L[A], L[B]) } - type T2List[A,B] = AList[T2K[A,B]#l] - def tuple2[A, B]: T2List[A,B] = new T2List[A,B] - { - type T2[M[_]] = (M[A], M[B]) - def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2)) - def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init)) - def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T2[P]] = - { - val g = (Tuple2.apply[P[A], P[B]] _).curried - np.apply( np.map(g, f(t._1)), f(t._2) ) - } - } + // TODO: auto-generate + sealed trait T2K[A, B] { type l[L[x]] = (L[A], L[B]) } + type T2List[A, B] = AList[T2K[A, B]#l] + def tuple2[A, B]: T2List[A, B] = new T2List[A, B] { + type T2[M[_]] = (M[A], M[B]) + def transform[M[_], N[_]](t: T2[M], f: M ~> N): T2[N] = (f(t._1), f(t._2)) + def foldr[M[_], T](t: T2[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, init)) + def traverse[M[_], N[_], P[_]](t: T2[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T2[P]] = + { + val g = (Tuple2.apply[P[A], P[B]] _).curried + np.apply(np.map(g, f(t._1)), f(t._2)) + } + } - sealed trait T3K[A,B,C] { type l[L[x]] = (L[A], L[B], L[C]) } - type T3List[A,B,C] = AList[T3K[A,B,C]#l] - def tuple3[A, B, C]: T3List[A,B,C] = new T3List[A,B,C] - { - type T3[M[_]] = (M[A], M[B], M[C]) - def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3)) - def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init))) - def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T3[P]] = - { - val g = (Tuple3.apply[P[A],P[B],P[C]] _).curried - np.apply( np.apply( np.map(g, f(t._1)), f(t._2) ), f(t._3) ) - } - } + sealed trait T3K[A, B, C] { type l[L[x]] = (L[A], L[B], L[C]) } + type T3List[A, B, C] = AList[T3K[A, B, C]#l] + def tuple3[A, B, C]: T3List[A, B, C] = new T3List[A, B, C] { + type T3[M[_]] = (M[A], M[B], M[C]) + def transform[M[_], N[_]](t: T3[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3)) + def foldr[M[_], T](t: T3[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, init))) + def traverse[M[_], N[_], P[_]](t: T3[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T3[P]] = + { + val g = (Tuple3.apply[P[A], P[B], P[C]] _).curried + np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)) + } + } - sealed trait T4K[A,B,C,D] { type l[L[x]] = (L[A], L[B], L[C], L[D]) } - type T4List[A,B,C,D] = AList[T4K[A,B,C,D]#l] - def tuple4[A, B, C, D]: T4List[A,B,C,D] = new T4List[A,B,C,D] - { - type T4[M[_]] = (M[A], M[B], M[C], M[D]) - def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4)) - def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init)))) - def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T4[P]] = - { - val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried - np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)) - } - } + sealed trait T4K[A, B, C, D] { type l[L[x]] = (L[A], L[B], L[C], L[D]) } + type T4List[A, B, C, D] = AList[T4K[A, B, C, D]#l] + def tuple4[A, B, C, D]: T4List[A, B, C, D] = new T4List[A, B, C, D] { + type T4[M[_]] = (M[A], M[B], M[C], M[D]) + def transform[M[_], N[_]](t: T4[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4)) + def foldr[M[_], T](t: T4[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, init)))) + def traverse[M[_], N[_], P[_]](t: T4[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T4[P]] = + { + val g = (Tuple4.apply[P[A], P[B], P[C], P[D]] _).curried + np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)) + } + } - sealed trait T5K[A,B,C,D,E] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E]) } - type T5List[A,B,C,D,E] = AList[T5K[A,B,C,D,E]#l] - def tuple5[A, B, C, D, E]: T5List[A,B,C,D,E] = new T5List[A,B,C,D,E] { - type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E]) - def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5)) - def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init))))) - def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T5[P]] = - { - val g = (Tuple5.apply[P[A],P[B],P[C],P[D],P[E]] _ ).curried - np.apply( np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5) ) - } - } + sealed trait T5K[A, B, C, D, E] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E]) } + type T5List[A, B, C, D, E] = AList[T5K[A, B, C, D, E]#l] + def tuple5[A, B, C, D, E]: T5List[A, B, C, D, E] = new T5List[A, B, C, D, E] { + type T5[M[_]] = (M[A], M[B], M[C], M[D], M[E]) + def transform[M[_], N[_]](t: T5[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5)) + def foldr[M[_], T](t: T5[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, init))))) + def traverse[M[_], N[_], P[_]](t: T5[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T5[P]] = + { + val g = (Tuple5.apply[P[A], P[B], P[C], P[D], P[E]] _).curried + np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)) + } + } - sealed trait T6K[A,B,C,D,E,F] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F]) } - type T6List[A,B,C,D,E,F] = AList[T6K[A,B,C,D,E,F]#l] - def tuple6[A, B, C, D, E, F]: T6List[A,B,C,D,E,F] = new T6List[A,B,C,D,E,F] { - type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F]) - def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6)) - def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init)))))) - def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T6[P]] = - { - val g = (Tuple6.apply[P[A],P[B],P[C],P[D],P[E],P[F]] _ ).curried - np.apply( np.apply( np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)) - } - } + sealed trait T6K[A, B, C, D, E, F] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F]) } + type T6List[A, B, C, D, E, F] = AList[T6K[A, B, C, D, E, F]#l] + def tuple6[A, B, C, D, E, F]: T6List[A, B, C, D, E, F] = new T6List[A, B, C, D, E, F] { + type T6[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F]) + def transform[M[_], N[_]](t: T6[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6)) + def foldr[M[_], T](t: T6[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, init)))))) + def traverse[M[_], N[_], P[_]](t: T6[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T6[P]] = + { + val g = (Tuple6.apply[P[A], P[B], P[C], P[D], P[E], P[F]] _).curried + np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)) + } + } - sealed trait T7K[A,B,C,D,E,F,G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) } - type T7List[A,B,C,D,E,F,G] = AList[T7K[A,B,C,D,E,F,G]#l] - def tuple7[A,B,C,D,E,F,G]: T7List[A,B,C,D,E,F,G] = new T7List[A,B,C,D,E,F,G] { - type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G]) - def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7)) - def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init))))))) - def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T7[P]] = - { - val g = (Tuple7.apply[P[A],P[B],P[C],P[D],P[E],P[F],P[G]] _ ).curried - np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)) - } - } - sealed trait T8K[A,B,C,D,E,F,G,H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) } - type T8List[A,B,C,D,E,F,G,H] = AList[T8K[A,B,C,D,E,F,G,H]#l] - def tuple8[A,B,C,D,E,F,G,H]: T8List[A,B,C,D,E,F,G,H] = new T8List[A,B,C,D,E,F,G,H] { - type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H]) - def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8)) - def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init)))))))) - def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T8[P]] = - { - val g = (Tuple8.apply[P[A],P[B],P[C],P[D],P[E],P[F],P[G],P[H]] _ ).curried - np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)) - } - } + sealed trait T7K[A, B, C, D, E, F, G] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G]) } + type T7List[A, B, C, D, E, F, G] = AList[T7K[A, B, C, D, E, F, G]#l] + def tuple7[A, B, C, D, E, F, G]: T7List[A, B, C, D, E, F, G] = new T7List[A, B, C, D, E, F, G] { + type T7[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G]) + def transform[M[_], N[_]](t: T7[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7)) + def foldr[M[_], T](t: T7[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, init))))))) + def traverse[M[_], N[_], P[_]](t: T7[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T7[P]] = + { + val g = (Tuple7.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G]] _).curried + np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)) + } + } + sealed trait T8K[A, B, C, D, E, F, G, H] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H]) } + type T8List[A, B, C, D, E, F, G, H] = AList[T8K[A, B, C, D, E, F, G, H]#l] + def tuple8[A, B, C, D, E, F, G, H]: T8List[A, B, C, D, E, F, G, H] = new T8List[A, B, C, D, E, F, G, H] { + type T8[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H]) + def transform[M[_], N[_]](t: T8[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8)) + def foldr[M[_], T](t: T8[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, init)))))))) + def traverse[M[_], N[_], P[_]](t: T8[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T8[P]] = + { + val g = (Tuple8.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H]] _).curried + np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)) + } + } - sealed trait T9K[A,B,C,D,E,F,G,H,I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) } - type T9List[A,B,C,D,E,F,G,H,I] = AList[T9K[A,B,C,D,E,F,G,H,I]#l] - def tuple9[A,B,C,D,E,F,G,H,I]: T9List[A,B,C,D,E,F,G,H,I] = new T9List[A,B,C,D,E,F,G,H,I] { - type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I]) - def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9)) - def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init))))))))) - def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T9[P]] = - { - val g = (Tuple9.apply[P[A],P[B],P[C],P[D],P[E],P[F],P[G],P[H],P[I]] _ ).curried - np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)) - } - } + sealed trait T9K[A, B, C, D, E, F, G, H, I] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I]) } + type T9List[A, B, C, D, E, F, G, H, I] = AList[T9K[A, B, C, D, E, F, G, H, I]#l] + def tuple9[A, B, C, D, E, F, G, H, I]: T9List[A, B, C, D, E, F, G, H, I] = new T9List[A, B, C, D, E, F, G, H, I] { + type T9[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I]) + def transform[M[_], N[_]](t: T9[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9)) + def foldr[M[_], T](t: T9[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, init))))))))) + def traverse[M[_], N[_], P[_]](t: T9[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T9[P]] = + { + val g = (Tuple9.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I]] _).curried + np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)) + } + } - sealed trait T10K[A,B,C,D,E,F,G,H,I,J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) } - type T10List[A,B,C,D,E,F,G,H,I,J] = AList[T10K[A,B,C,D,E,F,G,H,I,J]#l] - def tuple10[A,B,C,D,E,F,G,H,I,J]: T10List[A,B,C,D,E,F,G,H,I,J] = new T10List[A,B,C,D,E,F,G,H,I,J] { - type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J]) - def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10)) - def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init)))))))))) - def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T10[P]] = - { - val g = (Tuple10.apply[P[A],P[B],P[C],P[D],P[E],P[F],P[G],P[H],P[I],P[J]] _ ).curried - np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)) - } - } + sealed trait T10K[A, B, C, D, E, F, G, H, I, J] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J]) } + type T10List[A, B, C, D, E, F, G, H, I, J] = AList[T10K[A, B, C, D, E, F, G, H, I, J]#l] + def tuple10[A, B, C, D, E, F, G, H, I, J]: T10List[A, B, C, D, E, F, G, H, I, J] = new T10List[A, B, C, D, E, F, G, H, I, J] { + type T10[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J]) + def transform[M[_], N[_]](t: T10[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10)) + def foldr[M[_], T](t: T10[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, init)))))))))) + def traverse[M[_], N[_], P[_]](t: T10[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T10[P]] = + { + val g = (Tuple10.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J]] _).curried + np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)) + } + } - sealed trait T11K[A,B,C,D,E,F,G,H,I,J,K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) } - type T11List[A,B,C,D,E,F,G,H,I,J,K] = AList[T11K[A,B,C,D,E,F,G,H,I,J,K]#l] - def tuple11[A,B,C,D,E,F,G,H,I,J,K]: T11List[A,B,C,D,E,F,G,H,I,J,K] = new T11List[A,B,C,D,E,F,G,H,I,J,K] { - type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K]) - def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11)) - def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11,init))))))))))) - def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T11[P]] = - { - val g = (Tuple11.apply[P[A],P[B],P[C],P[D],P[E],P[F],P[G],P[H],P[I],P[J],P[K]] _ ).curried - np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.apply( np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11)) - } - } + sealed trait T11K[A, B, C, D, E, F, G, H, I, J, K] { type l[L[x]] = (L[A], L[B], L[C], L[D], L[E], L[F], L[G], L[H], L[I], L[J], L[K]) } + type T11List[A, B, C, D, E, F, G, H, I, J, K] = AList[T11K[A, B, C, D, E, F, G, H, I, J, K]#l] + def tuple11[A, B, C, D, E, F, G, H, I, J, K]: T11List[A, B, C, D, E, F, G, H, I, J, K] = new T11List[A, B, C, D, E, F, G, H, I, J, K] { + type T11[M[_]] = (M[A], M[B], M[C], M[D], M[E], M[F], M[G], M[H], M[I], M[J], M[K]) + def transform[M[_], N[_]](t: T11[M], f: M ~> N) = (f(t._1), f(t._2), f(t._3), f(t._4), f(t._5), f(t._6), f(t._7), f(t._8), f(t._9), f(t._10), f(t._11)) + def foldr[M[_], T](t: T11[M], f: (M[_], T) => T, init: T): T = f(t._1, f(t._2, f(t._3, f(t._4, f(t._5, f(t._6, f(t._7, f(t._8, f(t._9, f(t._10, f(t._11, init))))))))))) + def traverse[M[_], N[_], P[_]](t: T11[M], f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[T11[P]] = + { + val g = (Tuple11.apply[P[A], P[B], P[C], P[D], P[E], P[F], P[G], P[H], P[I], P[J], P[K]] _).curried + np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.apply(np.map(g, f(t._1)), f(t._2)), f(t._3)), f(t._4)), f(t._5)), f(t._6)), f(t._7)), f(t._8)), f(t._9)), f(t._10)), f(t._11)) + } + } } diff --git a/util/collection/src/main/scala/sbt/Attributes.scala b/util/collection/src/main/scala/sbt/Attributes.scala index 456a74482..64f379012 100644 --- a/util/collection/src/main/scala/sbt/Attributes.scala +++ b/util/collection/src/main/scala/sbt/Attributes.scala @@ -10,190 +10,201 @@ import scala.reflect.Manifest // Because it is sealed and the only instances go through AttributeKey.apply, // a single AttributeKey instance cannot conform to AttributeKey[T] for different Ts -/** A key in an [[AttributeMap]] that constrains its associated value to be of type `T`. -* The key is uniquely defined by its [[label]] and type `T`, represented at runtime by [[manifest]]. */ +/** + * A key in an [[AttributeMap]] that constrains its associated value to be of type `T`. + * The key is uniquely defined by its [[label]] and type `T`, represented at runtime by [[manifest]]. + */ sealed trait AttributeKey[T] { - /** The runtime evidence for `T` */ - def manifest: Manifest[T] + /** The runtime evidence for `T` */ + def manifest: Manifest[T] - @deprecated("Should only be used for compatibility during the transition from hyphenated labels to camelCase labels.", "0.13.0") - def rawLabel: String + @deprecated("Should only be used for compatibility during the transition from hyphenated labels to camelCase labels.", "0.13.0") + def rawLabel: String - /** The label is the identifier for the key and is camelCase by convention. */ - def label: String + /** The label is the identifier for the key and is camelCase by convention. */ + def label: String - /** An optional, brief description of the key. */ - def description: Option[String] + /** An optional, brief description of the key. */ + def description: Option[String] - /** In environments that support delegation, looking up this key when it has no associated value will delegate to the values associated with these keys. - * The delegation proceeds in order the keys are returned here.*/ - def extend: Seq[AttributeKey[_]] + /** + * In environments that support delegation, looking up this key when it has no associated value will delegate to the values associated with these keys. + * The delegation proceeds in order the keys are returned here. + */ + def extend: Seq[AttributeKey[_]] - /** Specifies whether this key is a local, anonymous key (`true`) or not (`false`). - * This is typically only used for programmatic, intermediate keys that should not be referenced outside of a specific scope. */ - def isLocal: Boolean + /** + * Specifies whether this key is a local, anonymous key (`true`) or not (`false`). + * This is typically only used for programmatic, intermediate keys that should not be referenced outside of a specific scope. + */ + def isLocal: Boolean - /** Identifies the relative importance of a key among other keys.*/ - def rank: Int + /** Identifies the relative importance of a key among other keys.*/ + def rank: Int } private[sbt] abstract class SharedAttributeKey[T] extends AttributeKey[T] { - override final def toString = label - override final def hashCode = label.hashCode - override final def equals(o: Any) = (this eq o.asInstanceOf[AnyRef]) || (o match { - case a: SharedAttributeKey[t] => a.label == this.label && a.manifest == this.manifest - case _ => false - }) - final def isLocal: Boolean = false + override final def toString = label + override final def hashCode = label.hashCode + override final def equals(o: Any) = (this eq o.asInstanceOf[AnyRef]) || (o match { + case a: SharedAttributeKey[t] => a.label == this.label && a.manifest == this.manifest + case _ => false + }) + final def isLocal: Boolean = false } -object AttributeKey -{ - def apply[T](name: String)(implicit mf: Manifest[T]): AttributeKey[T] = - make(name, None, Nil, Int.MaxValue) +object AttributeKey { + def apply[T](name: String)(implicit mf: Manifest[T]): AttributeKey[T] = + make(name, None, Nil, Int.MaxValue) - def apply[T](name: String, rank: Int)(implicit mf: Manifest[T]): AttributeKey[T] = - make(name, None, Nil, rank) + def apply[T](name: String, rank: Int)(implicit mf: Manifest[T]): AttributeKey[T] = + make(name, None, Nil, rank) - def apply[T](name: String, description: String)(implicit mf: Manifest[T]): AttributeKey[T] = - apply(name, description, Nil) + def apply[T](name: String, description: String)(implicit mf: Manifest[T]): AttributeKey[T] = + apply(name, description, Nil) - def apply[T](name: String, description: String, rank: Int)(implicit mf: Manifest[T]): AttributeKey[T] = - apply(name, description, Nil, rank) + def apply[T](name: String, description: String, rank: Int)(implicit mf: Manifest[T]): AttributeKey[T] = + apply(name, description, Nil, rank) - def apply[T](name: String, description: String, extend: Seq[AttributeKey[_]])(implicit mf: Manifest[T]): AttributeKey[T] = - apply(name, description, extend, Int.MaxValue) + def apply[T](name: String, description: String, extend: Seq[AttributeKey[_]])(implicit mf: Manifest[T]): AttributeKey[T] = + apply(name, description, extend, Int.MaxValue) - def apply[T](name: String, description: String, extend: Seq[AttributeKey[_]], rank: Int)(implicit mf: Manifest[T]): AttributeKey[T] = - make(name, Some(description), extend, rank) + def apply[T](name: String, description: String, extend: Seq[AttributeKey[_]], rank: Int)(implicit mf: Manifest[T]): AttributeKey[T] = + make(name, Some(description), extend, rank) - private[this] def make[T](name: String, description0: Option[String], extend0: Seq[AttributeKey[_]], rank0: Int)(implicit mf: Manifest[T]): AttributeKey[T] = new SharedAttributeKey[T] { - def manifest = mf - def rawLabel = name - val label = Util.hyphenToCamel(name) - def description = description0 - def extend = extend0 - def rank = rank0 - } - private[sbt] def local[T](implicit mf: Manifest[T]): AttributeKey[T] = new AttributeKey[T] { - def manifest = mf - def rawLabel = LocalLabel - def label = LocalLabel - def description = None - def extend = Nil - override def toString = label - def isLocal: Boolean = true - def rank = Int.MaxValue - } - private[sbt] final val LocalLabel = "$local" + private[this] def make[T](name: String, description0: Option[String], extend0: Seq[AttributeKey[_]], rank0: Int)(implicit mf: Manifest[T]): AttributeKey[T] = new SharedAttributeKey[T] { + def manifest = mf + def rawLabel = name + val label = Util.hyphenToCamel(name) + def description = description0 + def extend = extend0 + def rank = rank0 + } + private[sbt] def local[T](implicit mf: Manifest[T]): AttributeKey[T] = new AttributeKey[T] { + def manifest = mf + def rawLabel = LocalLabel + def label = LocalLabel + def description = None + def extend = Nil + override def toString = label + def isLocal: Boolean = true + def rank = Int.MaxValue + } + private[sbt] final val LocalLabel = "$local" } -/** An immutable map where a key is the tuple `(String,T)` for a fixed type `T` and can only be associated with values of type `T`. -* It is therefore possible for this map to contain mappings for keys with the same label but different types. -* Excluding this possibility is the responsibility of the client if desired. */ -trait AttributeMap -{ - /** Gets the value of type `T` associated with the key `k`. - * If a key with the same label but different type is defined, this method will fail. */ - def apply[T](k: AttributeKey[T]): T +/** + * An immutable map where a key is the tuple `(String,T)` for a fixed type `T` and can only be associated with values of type `T`. + * It is therefore possible for this map to contain mappings for keys with the same label but different types. + * Excluding this possibility is the responsibility of the client if desired. + */ +trait AttributeMap { + /** + * Gets the value of type `T` associated with the key `k`. + * If a key with the same label but different type is defined, this method will fail. + */ + def apply[T](k: AttributeKey[T]): T - /** Gets the value of type `T` associated with the key `k` or `None` if no value is associated. - * If a key with the same label but a different type is defined, this method will return `None`. */ - def get[T](k: AttributeKey[T]): Option[T] + /** + * Gets the value of type `T` associated with the key `k` or `None` if no value is associated. + * If a key with the same label but a different type is defined, this method will return `None`. + */ + def get[T](k: AttributeKey[T]): Option[T] - /** Returns this map without the mapping for `k`. - * This method will not remove a mapping for a key with the same label but a different type. */ - def remove[T](k: AttributeKey[T]): AttributeMap + /** + * Returns this map without the mapping for `k`. + * This method will not remove a mapping for a key with the same label but a different type. + */ + def remove[T](k: AttributeKey[T]): AttributeMap - /** Returns true if this map contains a mapping for `k`. - * If a key with the same label but a different type is defined in this map, this method will return `false`. */ - def contains[T](k: AttributeKey[T]): Boolean + /** + * Returns true if this map contains a mapping for `k`. + * If a key with the same label but a different type is defined in this map, this method will return `false`. + */ + def contains[T](k: AttributeKey[T]): Boolean - /** Adds the mapping `k -> value` to this map, replacing any existing mapping for `k`. - * Any mappings for keys with the same label but different types are unaffected. */ - def put[T](k: AttributeKey[T], value: T): AttributeMap + /** + * Adds the mapping `k -> value` to this map, replacing any existing mapping for `k`. + * Any mappings for keys with the same label but different types are unaffected. + */ + def put[T](k: AttributeKey[T], value: T): AttributeMap - /** All keys with defined mappings. There may be multiple keys with the same `label`, but different types. */ - def keys: Iterable[AttributeKey[_]] + /** All keys with defined mappings. There may be multiple keys with the same `label`, but different types. */ + def keys: Iterable[AttributeKey[_]] - /** Adds the mappings in `o` to this map, with mappings in `o` taking precedence over existing mappings.*/ - def ++(o: Iterable[AttributeEntry[_]]): AttributeMap + /** Adds the mappings in `o` to this map, with mappings in `o` taking precedence over existing mappings.*/ + def ++(o: Iterable[AttributeEntry[_]]): AttributeMap - /** Combines the mappings in `o` with the mappings in this map, with mappings in `o` taking precedence over existing mappings.*/ - def ++(o: AttributeMap): AttributeMap + /** Combines the mappings in `o` with the mappings in this map, with mappings in `o` taking precedence over existing mappings.*/ + def ++(o: AttributeMap): AttributeMap - /** All mappings in this map. The [[AttributeEntry]] type preserves the typesafety of mappings, although the specific types are unknown.*/ - def entries: Iterable[AttributeEntry[_]] + /** All mappings in this map. The [[AttributeEntry]] type preserves the typesafety of mappings, although the specific types are unknown.*/ + def entries: Iterable[AttributeEntry[_]] - /** `true` if there are no mappings in this map, `false` if there are. */ - def isEmpty: Boolean + /** `true` if there are no mappings in this map, `false` if there are. */ + def isEmpty: Boolean } -object AttributeMap -{ - /** An [[AttributeMap]] without any mappings. */ - val empty: AttributeMap = new BasicAttributeMap(Map.empty) +object AttributeMap { + /** An [[AttributeMap]] without any mappings. */ + val empty: AttributeMap = new BasicAttributeMap(Map.empty) - /** Constructs an [[AttributeMap]] containing the given `entries`. */ - def apply(entries: Iterable[AttributeEntry[_]]): AttributeMap = empty ++ entries + /** Constructs an [[AttributeMap]] containing the given `entries`. */ + def apply(entries: Iterable[AttributeEntry[_]]): AttributeMap = empty ++ entries - /** Constructs an [[AttributeMap]] containing the given `entries`.*/ - def apply(entries: AttributeEntry[_]*): AttributeMap = empty ++ entries + /** Constructs an [[AttributeMap]] containing the given `entries`.*/ + def apply(entries: AttributeEntry[_]*): AttributeMap = empty ++ entries - /** Presents an `AttributeMap` as a natural transformation. */ - implicit def toNatTrans(map: AttributeMap): AttributeKey ~> Id = new (AttributeKey ~> Id) { - def apply[T](key: AttributeKey[T]): T = map(key) - } + /** Presents an `AttributeMap` as a natural transformation. */ + implicit def toNatTrans(map: AttributeMap): AttributeKey ~> Id = new (AttributeKey ~> Id) { + def apply[T](key: AttributeKey[T]): T = map(key) + } } -private class BasicAttributeMap(private val backing: Map[AttributeKey[_], Any]) extends AttributeMap -{ - def isEmpty: Boolean = backing.isEmpty - def apply[T](k: AttributeKey[T]) = backing(k).asInstanceOf[T] - def get[T](k: AttributeKey[T]) = backing.get(k).asInstanceOf[Option[T]] - def remove[T](k: AttributeKey[T]): AttributeMap = new BasicAttributeMap( backing - k ) - def contains[T](k: AttributeKey[T]) = backing.contains(k) - def put[T](k: AttributeKey[T], value: T): AttributeMap = new BasicAttributeMap( backing.updated(k, value) ) - def keys: Iterable[AttributeKey[_]] = backing.keys - def ++(o: Iterable[AttributeEntry[_]]): AttributeMap = - { - val newBacking = (backing /: o) { case (b, AttributeEntry(key, value)) => b.updated(key, value) } - new BasicAttributeMap(newBacking) - } - def ++(o: AttributeMap): AttributeMap = - o match { - case bam: BasicAttributeMap => new BasicAttributeMap(backing ++ bam.backing) - case _ => o ++ this - } - def entries: Iterable[AttributeEntry[_]] = - for( (k: AttributeKey[kt], v) <- backing) yield AttributeEntry(k, v.asInstanceOf[kt]) - override def toString = entries.mkString("(", ", ", ")") +private class BasicAttributeMap(private val backing: Map[AttributeKey[_], Any]) extends AttributeMap { + def isEmpty: Boolean = backing.isEmpty + def apply[T](k: AttributeKey[T]) = backing(k).asInstanceOf[T] + def get[T](k: AttributeKey[T]) = backing.get(k).asInstanceOf[Option[T]] + def remove[T](k: AttributeKey[T]): AttributeMap = new BasicAttributeMap(backing - k) + def contains[T](k: AttributeKey[T]) = backing.contains(k) + def put[T](k: AttributeKey[T], value: T): AttributeMap = new BasicAttributeMap(backing.updated(k, value)) + def keys: Iterable[AttributeKey[_]] = backing.keys + def ++(o: Iterable[AttributeEntry[_]]): AttributeMap = + { + val newBacking = (backing /: o) { case (b, AttributeEntry(key, value)) => b.updated(key, value) } + new BasicAttributeMap(newBacking) + } + def ++(o: AttributeMap): AttributeMap = + o match { + case bam: BasicAttributeMap => new BasicAttributeMap(backing ++ bam.backing) + case _ => o ++ this + } + def entries: Iterable[AttributeEntry[_]] = + for ((k: AttributeKey[kt], v) <- backing) yield AttributeEntry(k, v.asInstanceOf[kt]) + override def toString = entries.mkString("(", ", ", ")") } // type inference required less generality /** A map entry where `key` is constrained to only be associated with a fixed value of type `T`. */ -final case class AttributeEntry[T](key: AttributeKey[T], value: T) -{ - override def toString = key.label + ": " + value +final case class AttributeEntry[T](key: AttributeKey[T], value: T) { + override def toString = key.label + ": " + value } /** Associates a `metadata` map with `data`. */ -final case class Attributed[D](data: D)(val metadata: AttributeMap) -{ - /** Retrieves the associated value of `key` from the metadata. */ - def get[T](key: AttributeKey[T]): Option[T] = metadata.get(key) +final case class Attributed[D](data: D)(val metadata: AttributeMap) { + /** Retrieves the associated value of `key` from the metadata. */ + def get[T](key: AttributeKey[T]): Option[T] = metadata.get(key) - /** Defines a mapping `key -> value` in the metadata. */ - def put[T](key: AttributeKey[T], value: T): Attributed[D] = Attributed(data)(metadata.put(key, value)) + /** Defines a mapping `key -> value` in the metadata. */ + def put[T](key: AttributeKey[T], value: T): Attributed[D] = Attributed(data)(metadata.put(key, value)) - /** Transforms the data by applying `f`. */ - def map[T](f: D => T): Attributed[T] = Attributed(f(data))(metadata) + /** Transforms the data by applying `f`. */ + def map[T](f: D => T): Attributed[T] = Attributed(f(data))(metadata) } -object Attributed -{ - /** Extracts the underlying data from the sequence `in`. */ - def data[T](in: Seq[Attributed[T]]): Seq[T] = in.map(_.data) +object Attributed { + /** Extracts the underlying data from the sequence `in`. */ + def data[T](in: Seq[Attributed[T]]): Seq[T] = in.map(_.data) - /** Associates empty metadata maps with each entry of `in`.*/ - def blankSeq[T](in: Seq[T]): Seq[Attributed[T]] = in map blank + /** Associates empty metadata maps with each entry of `in`.*/ + def blankSeq[T](in: Seq[T]): Seq[Attributed[T]] = in map blank - /** Associates an empty metadata map with `data`. */ - def blank[T](data: T): Attributed[T] = Attributed(data)(AttributeMap.empty) + /** Associates an empty metadata map with `data`. */ + def blank[T](data: T): Attributed[T] = Attributed(data)(AttributeMap.empty) } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/Classes.scala b/util/collection/src/main/scala/sbt/Classes.scala index 74796c829..1db644f96 100644 --- a/util/collection/src/main/scala/sbt/Classes.scala +++ b/util/collection/src/main/scala/sbt/Classes.scala @@ -1,27 +1,24 @@ package sbt -object Classes -{ - trait Applicative[M[_]] - { - def apply[S,T](f: M[S => T], v: M[S]): M[T] - def pure[S](s: => S): M[S] - def map[S, T](f: S => T, v: M[S]): M[T] - } - trait Monad[M[_]] extends Applicative[M] - { - def flatten[T](m: M[M[T]]): M[T] - } - implicit val optionMonad: Monad[Option] = new Monad[Option] { - def apply[S,T](f: Option[S => T], v: Option[S]) = (f, v) match { case (Some(fv), Some(vv)) => Some(fv(vv)); case _ => None } - def pure[S](s: => S) = Some(s) - def map[S, T](f: S => T, v: Option[S]) = v map f - def flatten[T](m: Option[Option[T]]): Option[T] = m.flatten - } - implicit val listMonad: Monad[List] = new Monad[List] { - def apply[S,T](f: List[S => T], v: List[S]) = for(fv <- f; vv <- v) yield fv(vv) - def pure[S](s: => S) = s :: Nil - def map[S, T](f: S => T, v: List[S]) = v map f - def flatten[T](m: List[List[T]]): List[T] = m.flatten - } +object Classes { + trait Applicative[M[_]] { + def apply[S, T](f: M[S => T], v: M[S]): M[T] + def pure[S](s: => S): M[S] + def map[S, T](f: S => T, v: M[S]): M[T] + } + trait Monad[M[_]] extends Applicative[M] { + def flatten[T](m: M[M[T]]): M[T] + } + implicit val optionMonad: Monad[Option] = new Monad[Option] { + def apply[S, T](f: Option[S => T], v: Option[S]) = (f, v) match { case (Some(fv), Some(vv)) => Some(fv(vv)); case _ => None } + def pure[S](s: => S) = Some(s) + def map[S, T](f: S => T, v: Option[S]) = v map f + def flatten[T](m: Option[Option[T]]): Option[T] = m.flatten + } + implicit val listMonad: Monad[List] = new Monad[List] { + def apply[S, T](f: List[S => T], v: List[S]) = for (fv <- f; vv <- v) yield fv(vv) + def pure[S](s: => S) = s :: Nil + def map[S, T](f: S => T, v: List[S]) = v map f + def flatten[T](m: List[List[T]]): List[T] = m.flatten + } } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/Dag.scala b/util/collection/src/main/scala/sbt/Dag.scala index f0594ed50..7c0fd6f2c 100644 --- a/util/collection/src/main/scala/sbt/Dag.scala +++ b/util/collection/src/main/scala/sbt/Dag.scala @@ -3,130 +3,126 @@ */ package sbt; -trait Dag[Node <: Dag[Node]]{ - self : Node => +trait Dag[Node <: Dag[Node]] { + self: Node => - def dependencies : Iterable[Node] - def topologicalSort = Dag.topologicalSort(self)(_.dependencies) + def dependencies: Iterable[Node] + def topologicalSort = Dag.topologicalSort(self)(_.dependencies) } -object Dag -{ - import scala.collection.{mutable, JavaConverters} - import JavaConverters.asScalaSetConverter +object Dag { + import scala.collection.{ mutable, JavaConverters } + import JavaConverters.asScalaSetConverter - def topologicalSort[T](root: T)(dependencies: T => Iterable[T]): List[T] = topologicalSort(root :: Nil)(dependencies) + def topologicalSort[T](root: T)(dependencies: T => Iterable[T]): List[T] = topologicalSort(root :: Nil)(dependencies) - def topologicalSort[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = - { - val discovered = new mutable.HashSet[T] - val finished = (new java.util.LinkedHashSet[T]).asScala + def topologicalSort[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = + { + val discovered = new mutable.HashSet[T] + val finished = (new java.util.LinkedHashSet[T]).asScala - def visitAll(nodes: Iterable[T]) = nodes foreach visit - def visit(node : T){ - if (!discovered(node)) { - discovered(node) = true; - try { visitAll(dependencies(node)); } catch { case c: Cyclic => throw node :: c } - finished += node; - } - else if(!finished(node)) - throw new Cyclic(node) - } + def visitAll(nodes: Iterable[T]) = nodes foreach visit + def visit(node: T) { + if (!discovered(node)) { + discovered(node) = true; + try { visitAll(dependencies(node)); } catch { case c: Cyclic => throw node :: c } + finished += node; + } else if (!finished(node)) + throw new Cyclic(node) + } - visitAll(nodes); + visitAll(nodes); - finished.toList; - } - // doesn't check for cycles - def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] = topologicalSortUnchecked(node :: Nil)(dependencies) + finished.toList; + } + // doesn't check for cycles + def topologicalSortUnchecked[T](node: T)(dependencies: T => Iterable[T]): List[T] = topologicalSortUnchecked(node :: Nil)(dependencies) - def topologicalSortUnchecked[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = - { - val discovered = new mutable.HashSet[T] - var finished: List[T] = Nil + def topologicalSortUnchecked[T](nodes: Iterable[T])(dependencies: T => Iterable[T]): List[T] = + { + val discovered = new mutable.HashSet[T] + var finished: List[T] = Nil - def visitAll(nodes: Iterable[T]) = nodes foreach visit - def visit(node : T){ - if (!discovered(node)) { - discovered(node) = true; - visitAll(dependencies(node)) - finished ::= node; - } - } + def visitAll(nodes: Iterable[T]) = nodes foreach visit + def visit(node: T) { + if (!discovered(node)) { + discovered(node) = true; + visitAll(dependencies(node)) + finished ::= node; + } + } - visitAll(nodes); - finished; - } - final class Cyclic(val value: Any, val all: List[Any], val complete: Boolean) - extends Exception( "Cyclic reference involving " + - (if(complete) all.mkString("\n ", "\n ", "") else value) - ) - { - def this(value: Any) = this(value, value :: Nil, false) - override def toString = getMessage - def ::(a: Any): Cyclic = - if(complete) - this - else if(a == value) - new Cyclic(value, all, true) - else - new Cyclic(value, a :: all, false) - } + visitAll(nodes); + finished; + } + final class Cyclic(val value: Any, val all: List[Any], val complete: Boolean) + extends Exception("Cyclic reference involving " + + (if (complete) all.mkString("\n ", "\n ", "") else value) + ) { + def this(value: Any) = this(value, value :: Nil, false) + override def toString = getMessage + def ::(a: Any): Cyclic = + if (complete) + this + else if (a == value) + new Cyclic(value, all, true) + else + new Cyclic(value, a :: all, false) + } - /** A directed graph with edges labeled positive or negative. */ - private[sbt] trait DirectedSignedGraph[Node] - { - /** Directed edge type that tracks the sign and target (head) vertex. - * The sign can be obtained via [[isNegative]] and the target vertex via [[head]]. */ - type Arrow - /** List of initial nodes. */ - def nodes: List[Arrow] - /** Outgoing edges for `n`. */ - def dependencies(n: Node): List[Arrow] - /** `true` if the edge `a` is "negative", false if it is "positive". */ - def isNegative(a: Arrow): Boolean - /** The target of the directed edge `a`. */ - def head(a: Arrow): Node - } + /** A directed graph with edges labeled positive or negative. */ + private[sbt] trait DirectedSignedGraph[Node] { + /** + * Directed edge type that tracks the sign and target (head) vertex. + * The sign can be obtained via [[isNegative]] and the target vertex via [[head]]. + */ + type Arrow + /** List of initial nodes. */ + def nodes: List[Arrow] + /** Outgoing edges for `n`. */ + def dependencies(n: Node): List[Arrow] + /** `true` if the edge `a` is "negative", false if it is "positive". */ + def isNegative(a: Arrow): Boolean + /** The target of the directed edge `a`. */ + def head(a: Arrow): Node + } - /** Traverses a directed graph defined by `graph` looking for a cycle that includes a "negative" edge. - * The directed edges are weighted by the caller as "positive" or "negative". - * If a cycle containing a "negative" edge is detected, its member edges are returned in order. - * Otherwise, the empty list is returned. */ - private[sbt] def findNegativeCycle[Node](graph: DirectedSignedGraph[Node]): List[graph.Arrow] = - { - import scala.annotation.tailrec - import graph._ - val finished = new mutable.HashSet[Node] - val visited = new mutable.HashSet[Node] + /** + * Traverses a directed graph defined by `graph` looking for a cycle that includes a "negative" edge. + * The directed edges are weighted by the caller as "positive" or "negative". + * If a cycle containing a "negative" edge is detected, its member edges are returned in order. + * Otherwise, the empty list is returned. + */ + private[sbt] def findNegativeCycle[Node](graph: DirectedSignedGraph[Node]): List[graph.Arrow] = + { + import scala.annotation.tailrec + import graph._ + val finished = new mutable.HashSet[Node] + val visited = new mutable.HashSet[Node] - def visit(edges: List[Arrow], stack: List[Arrow]): List[Arrow] = edges match { - case Nil => Nil - case edge :: tail => - val node = head(edge) - if(!visited(node)) - { - visited += node - visit(dependencies(node), edge :: stack) match { - case Nil => - finished += node - visit(tail, stack) - case cycle => cycle - } - } - else if(!finished(node)) - { - // cycle. If a negative edge is involved, it is an error. - val between = edge :: stack.takeWhile(f => head(f) != node) - if(between exists isNegative) - between - else - visit(tail, stack) - } - else - visit(tail, stack) - } + def visit(edges: List[Arrow], stack: List[Arrow]): List[Arrow] = edges match { + case Nil => Nil + case edge :: tail => + val node = head(edge) + if (!visited(node)) { + visited += node + visit(dependencies(node), edge :: stack) match { + case Nil => + finished += node + visit(tail, stack) + case cycle => cycle + } + } else if (!finished(node)) { + // cycle. If a negative edge is involved, it is an error. + val between = edge :: stack.takeWhile(f => head(f) != node) + if (between exists isNegative) + between + else + visit(tail, stack) + } else + visit(tail, stack) + } - visit(graph.nodes, Nil) - } + visit(graph.nodes, Nil) + } } diff --git a/util/collection/src/main/scala/sbt/HList.scala b/util/collection/src/main/scala/sbt/HList.scala index cb76594d0..23f5488c6 100644 --- a/util/collection/src/main/scala/sbt/HList.scala +++ b/util/collection/src/main/scala/sbt/HList.scala @@ -5,30 +5,28 @@ package sbt import Types._ -/** A minimal heterogeneous list type. For background, see -* http://apocalisp.wordpress.com/2010/07/06/type-level-programming-in-scala-part-6a-heterogeneous-list basics/ */ -sealed trait HList -{ - type Wrap[M[_]] <: HList +/** + * A minimal heterogeneous list type. For background, see + * http://apocalisp.wordpress.com/2010/07/06/type-level-programming-in-scala-part-6a-heterogeneous-list basics/ + */ +sealed trait HList { + type Wrap[M[_]] <: HList } -sealed trait HNil extends HList -{ - type Wrap[M[_]] = HNil - def :+: [G](g: G): G :+: HNil = HCons(g, this) +sealed trait HNil extends HList { + type Wrap[M[_]] = HNil + def :+:[G](g: G): G :+: HNil = HCons(g, this) - override def toString = "HNil" + override def toString = "HNil" } object HNil extends HNil -final case class HCons[H, T <: HList](head : H, tail : T) extends HList -{ - type Wrap[M[_]] = M[H] :+: T#Wrap[M] - def :+: [G](g: G): G :+: H :+: T = HCons(g, this) +final case class HCons[H, T <: HList](head: H, tail: T) extends HList { + type Wrap[M[_]] = M[H] :+: T#Wrap[M] + def :+:[G](g: G): G :+: H :+: T = HCons(g, this) - override def toString = head + " :+: " + tail.toString + override def toString = head + " :+: " + tail.toString } -object HList -{ - // contains no type information: not even A - implicit def fromList[A](list: Traversable[A]): HList = ((HNil: HList) /: list) ( (hl,v) => HCons(v, hl) ) +object HList { + // contains no type information: not even A + implicit def fromList[A](list: Traversable[A]): HList = ((HNil: HList) /: list)((hl, v) => HCons(v, hl)) } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/IDSet.scala b/util/collection/src/main/scala/sbt/IDSet.scala index 43a0d6f16..4f5245a26 100644 --- a/util/collection/src/main/scala/sbt/IDSet.scala +++ b/util/collection/src/main/scala/sbt/IDSet.scala @@ -4,44 +4,42 @@ package sbt /** A mutable set interface that uses object identity to test for set membership.*/ -trait IDSet[T] -{ - def apply(t: T): Boolean - def contains(t: T): Boolean - def += (t: T): Unit - def ++=(t: Iterable[T]): Unit - def -= (t: T): Boolean - def all: collection.Iterable[T] - def toList: List[T] - def isEmpty: Boolean - def foreach(f: T => Unit): Unit - def process[S](t: T)(ifSeen: S)(ifNew: => S): S +trait IDSet[T] { + def apply(t: T): Boolean + def contains(t: T): Boolean + def +=(t: T): Unit + def ++=(t: Iterable[T]): Unit + def -=(t: T): Boolean + def all: collection.Iterable[T] + def toList: List[T] + def isEmpty: Boolean + def foreach(f: T => Unit): Unit + def process[S](t: T)(ifSeen: S)(ifNew: => S): S } -object IDSet -{ - implicit def toTraversable[T]: IDSet[T] => Traversable[T] = _.all - def apply[T](values: T*): IDSet[T] = apply(values) - def apply[T](values: Iterable[T]): IDSet[T] = - { - val s = create[T] - s ++= values - s - } - def create[T]: IDSet[T] = new IDSet[T] { - private[this] val backing = new java.util.IdentityHashMap[T, AnyRef] - private[this] val Dummy: AnyRef = "" +object IDSet { + implicit def toTraversable[T]: IDSet[T] => Traversable[T] = _.all + def apply[T](values: T*): IDSet[T] = apply(values) + def apply[T](values: Iterable[T]): IDSet[T] = + { + val s = create[T] + s ++= values + s + } + def create[T]: IDSet[T] = new IDSet[T] { + private[this] val backing = new java.util.IdentityHashMap[T, AnyRef] + private[this] val Dummy: AnyRef = "" - def apply(t: T) = contains(t) - def contains(t: T) = backing.containsKey(t) - def foreach(f: T => Unit) = all foreach f - def += (t: T) = backing.put(t, Dummy) - def ++=(t: Iterable[T]) = t foreach += - def -= (t:T) = if(backing.remove(t) eq null) false else true - def all = collection.JavaConversions.collectionAsScalaIterable(backing.keySet) - def toList = all.toList - def isEmpty = backing.isEmpty - def process[S](t: T)(ifSeen: S)(ifNew: => S) = if(contains(t)) ifSeen else { this += t ; ifNew } - override def toString = backing.toString - } + def apply(t: T) = contains(t) + def contains(t: T) = backing.containsKey(t) + def foreach(f: T => Unit) = all foreach f + def +=(t: T) = backing.put(t, Dummy) + def ++=(t: Iterable[T]) = t foreach += + def -=(t: T) = if (backing.remove(t) eq null) false else true + def all = collection.JavaConversions.collectionAsScalaIterable(backing.keySet) + def toList = all.toList + def isEmpty = backing.isEmpty + def process[S](t: T)(ifSeen: S)(ifNew: => S) = if (contains(t)) ifSeen else { this += t; ifNew } + override def toString = backing.toString + } } diff --git a/util/collection/src/main/scala/sbt/INode.scala b/util/collection/src/main/scala/sbt/INode.scala index 67b1c5b36..d56a22485 100644 --- a/util/collection/src/main/scala/sbt/INode.scala +++ b/util/collection/src/main/scala/sbt/INode.scala @@ -1,179 +1,177 @@ package sbt - import java.lang.Runnable - import java.util.concurrent.{atomic, Executor, LinkedBlockingQueue} - import atomic.{AtomicBoolean, AtomicInteger} - import Types.{:+:, ConstK, Id} +import java.lang.Runnable +import java.util.concurrent.{ atomic, Executor, LinkedBlockingQueue } +import atomic.{ AtomicBoolean, AtomicInteger } +import Types.{ :+:, ConstK, Id } object EvaluationState extends Enumeration { - val New, Blocked, Ready, Calling, Evaluated = Value + val New, Blocked, Ready, Calling, Evaluated = Value } -abstract class EvaluateSettings[Scope] -{ - protected val init: Init[Scope] - import init._ - protected def executor: Executor - protected def compiledSettings: Seq[Compiled[_]] +abstract class EvaluateSettings[Scope] { + protected val init: Init[Scope] + import init._ + protected def executor: Executor + protected def compiledSettings: Seq[Compiled[_]] - import EvaluationState.{Value => EvaluationState, _} + import EvaluationState.{ Value => EvaluationState, _ } - private[this] val complete = new LinkedBlockingQueue[Option[Throwable]] - private[this] val static = PMap.empty[ScopedKey, INode] - private[this] val allScopes: Set[Scope] = compiledSettings.map(_.key.scope).toSet - private[this] def getStatic[T](key: ScopedKey[T]): INode[T] = static get key getOrElse sys.error("Illegal reference to key " + key) + private[this] val complete = new LinkedBlockingQueue[Option[Throwable]] + private[this] val static = PMap.empty[ScopedKey, INode] + private[this] val allScopes: Set[Scope] = compiledSettings.map(_.key.scope).toSet + private[this] def getStatic[T](key: ScopedKey[T]): INode[T] = static get key getOrElse sys.error("Illegal reference to key " + key) - private[this] val transform: Initialize ~> INode = new (Initialize ~> INode) { def apply[T](i: Initialize[T]): INode[T] = i match { - case k: Keyed[s, T] => single(getStatic(k.scopedKey), k.transform) - case a: Apply[k,T] => new MixedNode[k,T]( a.alist.transform[Initialize, INode](a.inputs, transform), a.f, a.alist) - case b: Bind[s,T] => new BindNode[s,T]( transform(b.in), x => transform(b.f(x))) - case init.StaticScopes => strictConstant(allScopes.asInstanceOf[T]) // can't convince scalac that StaticScopes => T == Set[Scope] - case v: Value[T] => constant(v.value) - case v: ValidationCapture[T] => strictConstant(v.key) - case t: TransformCapture => strictConstant(t.f) - case o: Optional[s,T] => o.a match { - case None => constant( () => o.f(None) ) - case Some(i) => single[s,T](transform(i), x => o.f(Some(x))) - } - }} - private[this] lazy val roots: Seq[INode[_]] = compiledSettings flatMap { cs => - (cs.settings map { s => - val t = transform(s.init) - static(s.key) = t - t - }): Seq[INode[_]] - } - private[this] var running = new AtomicInteger - private[this] var cancel = new AtomicBoolean(false) + private[this] val transform: Initialize ~> INode = new (Initialize ~> INode) { + def apply[T](i: Initialize[T]): INode[T] = i match { + case k: Keyed[s, T] => single(getStatic(k.scopedKey), k.transform) + case a: Apply[k, T] => new MixedNode[k, T](a.alist.transform[Initialize, INode](a.inputs, transform), a.f, a.alist) + case b: Bind[s, T] => new BindNode[s, T](transform(b.in), x => transform(b.f(x))) + case init.StaticScopes => strictConstant(allScopes.asInstanceOf[T]) // can't convince scalac that StaticScopes => T == Set[Scope] + case v: Value[T] => constant(v.value) + case v: ValidationCapture[T] => strictConstant(v.key) + case t: TransformCapture => strictConstant(t.f) + case o: Optional[s, T] => o.a match { + case None => constant(() => o.f(None)) + case Some(i) => single[s, T](transform(i), x => o.f(Some(x))) + } + } + } + private[this] lazy val roots: Seq[INode[_]] = compiledSettings flatMap { cs => + (cs.settings map { s => + val t = transform(s.init) + static(s.key) = t + t + }): Seq[INode[_]] + } + private[this] var running = new AtomicInteger + private[this] var cancel = new AtomicBoolean(false) - def run(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = - { - assert(running.get() == 0, "Already running") - startWork() - roots.foreach( _.registerIfNew() ) - workComplete() - complete.take() foreach { ex => - cancel.set(true) - throw ex - } - getResults(delegates) - } - private[this] def getResults(implicit delegates: Scope => Seq[Scope]) = - (empty /: static.toTypedSeq) { case (ss, static.TPair(key, node)) => - if(key.key.isLocal) ss else ss.set(key.scope, key.key, node.get) - } - private[this] val getValue = new (INode ~> Id) { def apply[T](node: INode[T]) = node.get } + def run(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = + { + assert(running.get() == 0, "Already running") + startWork() + roots.foreach(_.registerIfNew()) + workComplete() + complete.take() foreach { ex => + cancel.set(true) + throw ex + } + getResults(delegates) + } + private[this] def getResults(implicit delegates: Scope => Seq[Scope]) = + (empty /: static.toTypedSeq) { + case (ss, static.TPair(key, node)) => + if (key.key.isLocal) ss else ss.set(key.scope, key.key, node.get) + } + private[this] val getValue = new (INode ~> Id) { def apply[T](node: INode[T]) = node.get } - private[this] def submitEvaluate(node: INode[_]) = submit(node.evaluate()) - private[this] def submitCallComplete[T](node: BindNode[_, T], value: T) = submit(node.callComplete(value)) - private[this] def submit(work: => Unit): Unit = - { - startWork() - executor.execute(new Runnable { def run = if(!cancel.get()) run0(work) }) - } - private[this] def run0(work: => Unit): Unit = - { - try { work } catch { case e: Throwable => complete.put( Some(e) ) } - workComplete() - } + private[this] def submitEvaluate(node: INode[_]) = submit(node.evaluate()) + private[this] def submitCallComplete[T](node: BindNode[_, T], value: T) = submit(node.callComplete(value)) + private[this] def submit(work: => Unit): Unit = + { + startWork() + executor.execute(new Runnable { def run = if (!cancel.get()) run0(work) }) + } + private[this] def run0(work: => Unit): Unit = + { + try { work } catch { case e: Throwable => complete.put(Some(e)) } + workComplete() + } - private[this] def startWork(): Unit = running.incrementAndGet() - private[this] def workComplete(): Unit = - if(running.decrementAndGet() == 0) - complete.put( None ) + private[this] def startWork(): Unit = running.incrementAndGet() + private[this] def workComplete(): Unit = + if (running.decrementAndGet() == 0) + complete.put(None) - private[this] sealed abstract class INode[T] - { - private[this] var state: EvaluationState = New - private[this] var value: T = _ - private[this] val blocking = new collection.mutable.ListBuffer[INode[_]] - private[this] var blockedOn: Int = 0 - private[this] val calledBy = new collection.mutable.ListBuffer[BindNode[_, T]] + private[this] sealed abstract class INode[T] { + private[this] var state: EvaluationState = New + private[this] var value: T = _ + private[this] val blocking = new collection.mutable.ListBuffer[INode[_]] + private[this] var blockedOn: Int = 0 + private[this] val calledBy = new collection.mutable.ListBuffer[BindNode[_, T]] - override def toString = getClass.getName + " (state=" + state + ",blockedOn=" + blockedOn + ",calledBy=" + calledBy.size + ",blocking=" + blocking.size + "): " + - keyString + override def toString = getClass.getName + " (state=" + state + ",blockedOn=" + blockedOn + ",calledBy=" + calledBy.size + ",blocking=" + blocking.size + "): " + + keyString - private[this] def keyString = - (static.toSeq.flatMap { case (key, value) => if(value eq this) init.showFullKey(key) :: Nil else Nil }).headOption getOrElse "non-static" + private[this] def keyString = + (static.toSeq.flatMap { case (key, value) => if (value eq this) init.showFullKey(key) :: Nil else Nil }).headOption getOrElse "non-static" - final def get: T = synchronized { - assert(value != null, toString + " not evaluated") - value - } - final def doneOrBlock(from: INode[_]): Boolean = synchronized { - val ready = state == Evaluated - if(!ready) blocking += from - registerIfNew() - ready - } - final def isDone: Boolean = synchronized { state == Evaluated } - final def isNew: Boolean = synchronized { state == New } - final def isCalling: Boolean = synchronized { state == Calling } - final def registerIfNew(): Unit = synchronized { if(state == New) register() } - private[this] def register() - { - assert(state == New, "Already registered and: " + toString) - val deps = dependsOn - blockedOn = deps.size - deps.count(_.doneOrBlock(this)) - if(blockedOn == 0) - schedule() - else - state = Blocked - } + final def get: T = synchronized { + assert(value != null, toString + " not evaluated") + value + } + final def doneOrBlock(from: INode[_]): Boolean = synchronized { + val ready = state == Evaluated + if (!ready) blocking += from + registerIfNew() + ready + } + final def isDone: Boolean = synchronized { state == Evaluated } + final def isNew: Boolean = synchronized { state == New } + final def isCalling: Boolean = synchronized { state == Calling } + final def registerIfNew(): Unit = synchronized { if (state == New) register() } + private[this] def register() { + assert(state == New, "Already registered and: " + toString) + val deps = dependsOn + blockedOn = deps.size - deps.count(_.doneOrBlock(this)) + if (blockedOn == 0) + schedule() + else + state = Blocked + } - final def schedule(): Unit = synchronized { - assert(state == New || state == Blocked, "Invalid state for schedule() call: " + toString) - state = Ready - submitEvaluate(this) - } - final def unblocked(): Unit = synchronized { - assert(state == Blocked, "Invalid state for unblocked() call: " + toString) - blockedOn -= 1 - assert(blockedOn >= 0, "Negative blockedOn: " + blockedOn + " for " + toString) - if(blockedOn == 0) schedule() - } - final def evaluate(): Unit = synchronized { evaluate0() } - protected final def makeCall(source: BindNode[_, T], target: INode[T]) { - assert(state == Ready, "Invalid state for call to makeCall: " + toString) - state = Calling - target.call(source) - } - protected final def setValue(v: T) { - assert(state != Evaluated, "Already evaluated (trying to set value to " + v + "): " + toString) - if(v == null) sys.error("Setting value cannot be null: " + keyString) - value = v - state = Evaluated - blocking foreach { _.unblocked() } - blocking.clear() - calledBy foreach { node => submitCallComplete(node, value) } - calledBy.clear() - } - final def call(by: BindNode[_, T]): Unit = synchronized { - registerIfNew() - state match { - case Evaluated => submitCallComplete(by, value) - case _ => calledBy += by - } - } - protected def dependsOn: Seq[INode[_]] - protected def evaluate0(): Unit - } + final def schedule(): Unit = synchronized { + assert(state == New || state == Blocked, "Invalid state for schedule() call: " + toString) + state = Ready + submitEvaluate(this) + } + final def unblocked(): Unit = synchronized { + assert(state == Blocked, "Invalid state for unblocked() call: " + toString) + blockedOn -= 1 + assert(blockedOn >= 0, "Negative blockedOn: " + blockedOn + " for " + toString) + if (blockedOn == 0) schedule() + } + final def evaluate(): Unit = synchronized { evaluate0() } + protected final def makeCall(source: BindNode[_, T], target: INode[T]) { + assert(state == Ready, "Invalid state for call to makeCall: " + toString) + state = Calling + target.call(source) + } + protected final def setValue(v: T) { + assert(state != Evaluated, "Already evaluated (trying to set value to " + v + "): " + toString) + if (v == null) sys.error("Setting value cannot be null: " + keyString) + value = v + state = Evaluated + blocking foreach { _.unblocked() } + blocking.clear() + calledBy foreach { node => submitCallComplete(node, value) } + calledBy.clear() + } + final def call(by: BindNode[_, T]): Unit = synchronized { + registerIfNew() + state match { + case Evaluated => submitCallComplete(by, value) + case _ => calledBy += by + } + } + protected def dependsOn: Seq[INode[_]] + protected def evaluate0(): Unit + } - private[this] def strictConstant[T](v: T): INode[T] = constant(() => v) - private[this] def constant[T](f: () => T): INode[T] = new MixedNode[ConstK[Unit]#l, T]((), _ => f(), AList.empty) - private[this] def single[S,T](in: INode[S], f: S => T): INode[T] = new MixedNode[ ({ type l[L[x]] = L[S] })#l, T](in, f, AList.single[S]) - private[this] final class BindNode[S,T](in: INode[S], f: S => INode[T]) extends INode[T] - { - protected def dependsOn = in :: Nil - protected def evaluate0(): Unit = makeCall(this, f(in.get) ) - def callComplete(value: T): Unit = synchronized { - assert(isCalling, "Invalid state for callComplete(" + value + "): " + toString) - setValue(value) - } - } - private[this] final class MixedNode[K[L[x]], T](in: K[INode], f: K[Id] => T, alist: AList[K]) extends INode[T] - { - protected def dependsOn = alist.toList(in) - protected def evaluate0(): Unit = setValue( f( alist.transform(in, getValue) ) ) - } + private[this] def strictConstant[T](v: T): INode[T] = constant(() => v) + private[this] def constant[T](f: () => T): INode[T] = new MixedNode[ConstK[Unit]#l, T]((), _ => f(), AList.empty) + private[this] def single[S, T](in: INode[S], f: S => T): INode[T] = new MixedNode[({ type l[L[x]] = L[S] })#l, T](in, f, AList.single[S]) + private[this] final class BindNode[S, T](in: INode[S], f: S => INode[T]) extends INode[T] { + protected def dependsOn = in :: Nil + protected def evaluate0(): Unit = makeCall(this, f(in.get)) + def callComplete(value: T): Unit = synchronized { + assert(isCalling, "Invalid state for callComplete(" + value + "): " + toString) + setValue(value) + } + } + private[this] final class MixedNode[K[L[x]], T](in: K[INode], f: K[Id] => T, alist: AList[K]) extends INode[T] { + protected def dependsOn = alist.toList(in) + protected def evaluate0(): Unit = setValue(f(alist.transform(in, getValue))) + } } diff --git a/util/collection/src/main/scala/sbt/KList.scala b/util/collection/src/main/scala/sbt/KList.scala index 7ecc6ba6a..0b09ac9b1 100644 --- a/util/collection/src/main/scala/sbt/KList.scala +++ b/util/collection/src/main/scala/sbt/KList.scala @@ -1,56 +1,53 @@ package sbt - import Types._ - import Classes.Applicative +import Types._ +import Classes.Applicative /** Heterogeneous list with each element having type M[T] for some type T.*/ -sealed trait KList[+M[_]] -{ - type Transform[N[_]] <: KList[N] +sealed trait KList[+M[_]] { + type Transform[N[_]] <: KList[N] - /** Apply the natural transformation `f` to each element. */ - def transform[N[_]](f: M ~> N): Transform[N] + /** Apply the natural transformation `f` to each element. */ + def transform[N[_]](f: M ~> N): Transform[N] - /** Folds this list using a function that operates on the homogeneous type of the elements of this list. */ - def foldr[T](f: (M[_], T) => T, init: T): T = init // had trouble defining it in KNil + /** Folds this list using a function that operates on the homogeneous type of the elements of this list. */ + def foldr[T](f: (M[_], T) => T, init: T): T = init // had trouble defining it in KNil - /** Applies `f` to the elements of this list in the applicative functor defined by `ap`. */ - def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] + /** Applies `f` to the elements of this list in the applicative functor defined by `ap`. */ + def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] - /** Equivalent to `transform(f) . apply(x => x)`, this is the essence of the iterator at the level of natural transformations.*/ - def traverse[N[_], P[_]](f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Transform[P]] + /** Equivalent to `transform(f) . apply(x => x)`, this is the essence of the iterator at the level of natural transformations.*/ + def traverse[N[_], P[_]](f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Transform[P]] - /** Discards the heterogeneous type information and constructs a plain List from this KList's elements. */ - def toList: List[M[_]] + /** Discards the heterogeneous type information and constructs a plain List from this KList's elements. */ + def toList: List[M[_]] } -final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KList[M] -{ - final type Transform[N[_]] = KCons[H, tail.Transform[N], N] +final case class KCons[H, +T <: KList[M], +M[_]](head: M[H], tail: T) extends KList[M] { + final type Transform[N[_]] = KCons[H, tail.Transform[N], N] - def transform[N[_]](f: M ~> N) = KCons(f(head), tail.transform(f)) - def toList: List[M[_]] = head :: tail.toList - def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] = - { - val g = (t: tail.Transform[Id]) => (h: H) =>f( KCons[H, tail.Transform[Id], Id](h, t) ) - ap.apply( tail.apply[N, H => Z](g), head ) - } - def traverse[N[_], P[_]](f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Transform[P]] = - { - val tt: N[tail.Transform[P]] = tail.traverse[N,P](f) - val g = (t: tail.Transform[P]) => (h: P[H]) => KCons(h, t) - np.apply(np.map(g, tt), f(head)) - } - def :^:[A,N[x] >: M[x]](h: N[A]) = KCons(h, this) - override def foldr[T](f: (M[_], T) => T, init: T): T = f(head, tail.foldr(f, init)) + def transform[N[_]](f: M ~> N) = KCons(f(head), tail.transform(f)) + def toList: List[M[_]] = head :: tail.toList + def apply[N[x] >: M[x], Z](f: Transform[Id] => Z)(implicit ap: Applicative[N]): N[Z] = + { + val g = (t: tail.Transform[Id]) => (h: H) => f(KCons[H, tail.Transform[Id], Id](h, t)) + ap.apply(tail.apply[N, H => Z](g), head) + } + def traverse[N[_], P[_]](f: M ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[Transform[P]] = + { + val tt: N[tail.Transform[P]] = tail.traverse[N, P](f) + val g = (t: tail.Transform[P]) => (h: P[H]) => KCons(h, t) + np.apply(np.map(g, tt), f(head)) + } + def :^:[A, N[x] >: M[x]](h: N[A]) = KCons(h, this) + override def foldr[T](f: (M[_], T) => T, init: T): T = f(head, tail.foldr(f, init)) } -sealed abstract class KNil extends KList[Nothing] -{ - final type Transform[N[_]] = KNil - final def transform[N[_]](f: Nothing ~> N): Transform[N] = KNil - final def toList = Nil - final def apply[N[x], Z](f: KNil => Z)(implicit ap: Applicative[N]): N[Z] = ap.pure(f(KNil)) - final def traverse[N[_], P[_]](f: Nothing ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KNil] = np.pure(KNil) +sealed abstract class KNil extends KList[Nothing] { + final type Transform[N[_]] = KNil + final def transform[N[_]](f: Nothing ~> N): Transform[N] = KNil + final def toList = Nil + final def apply[N[x], Z](f: KNil => Z)(implicit ap: Applicative[N]): N[Z] = ap.pure(f(KNil)) + final def traverse[N[_], P[_]](f: Nothing ~> (N ∙ P)#l)(implicit np: Applicative[N]): N[KNil] = np.pure(KNil) } case object KNil extends KNil { - def :^:[M[_], H](h: M[H]): KCons[H, KNil, M] = KCons(h, this) + def :^:[M[_], H](h: M[H]): KCons[H, KNil, M] = KCons(h, this) } diff --git a/util/collection/src/main/scala/sbt/PMap.scala b/util/collection/src/main/scala/sbt/PMap.scala index 67a8899cd..51c942112 100644 --- a/util/collection/src/main/scala/sbt/PMap.scala +++ b/util/collection/src/main/scala/sbt/PMap.scala @@ -3,112 +3,106 @@ */ package sbt - import collection.mutable +import collection.mutable -trait RMap[K[_], V[_]] -{ - def apply[T](k: K[T]): V[T] - def get[T](k: K[T]): Option[V[T]] - def contains[T](k: K[T]): Boolean - def toSeq: Seq[(K[_], V[_])] - def toTypedSeq: Seq[TPair[_]] = toSeq.map{ case (k: K[t],v) => TPair[t](k,v.asInstanceOf[V[t]]) } - def keys: Iterable[K[_]] - def values: Iterable[V[_]] - def isEmpty: Boolean +trait RMap[K[_], V[_]] { + def apply[T](k: K[T]): V[T] + def get[T](k: K[T]): Option[V[T]] + def contains[T](k: K[T]): Boolean + def toSeq: Seq[(K[_], V[_])] + def toTypedSeq: Seq[TPair[_]] = toSeq.map { case (k: K[t], v) => TPair[t](k, v.asInstanceOf[V[t]]) } + def keys: Iterable[K[_]] + def values: Iterable[V[_]] + def isEmpty: Boolean - final case class TPair[T](key: K[T], value: V[T]) + final case class TPair[T](key: K[T], value: V[T]) } -trait IMap[K[_], V[_]] extends (K ~> V) with RMap[K,V] -{ - def put[T](k: K[T], v: V[T]): IMap[K,V] - def remove[T](k: K[T]): IMap[K,V] - def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): IMap[K,V] - def mapValues[V2[_]](f: V ~> V2): IMap[K,V2] - def mapSeparate[VL[_], VR[_]](f: V ~> ({type l[T] = Either[VL[T], VR[T]]})#l ): (IMap[K,VL], IMap[K,VR]) +trait IMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] { + def put[T](k: K[T], v: V[T]): IMap[K, V] + def remove[T](k: K[T]): IMap[K, V] + def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): IMap[K, V] + def mapValues[V2[_]](f: V ~> V2): IMap[K, V2] + def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l): (IMap[K, VL], IMap[K, VR]) } -trait PMap[K[_], V[_]] extends (K ~> V) with RMap[K,V] -{ - def update[T](k: K[T], v: V[T]): Unit - def remove[T](k: K[T]): Option[V[T]] - def getOrUpdate[T](k: K[T], make: => V[T]): V[T] - def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] +trait PMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] { + def update[T](k: K[T], v: V[T]): Unit + def remove[T](k: K[T]): Option[V[T]] + def getOrUpdate[T](k: K[T], make: => V[T]): V[T] + def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] } -object PMap -{ - implicit def toFunction[K[_], V[_]](map: PMap[K,V]): K[_] => V[_] = k => map(k) - def empty[K[_], V[_]]: PMap[K,V] = new DelegatingPMap[K,V](new mutable.HashMap) +object PMap { + implicit def toFunction[K[_], V[_]](map: PMap[K, V]): K[_] => V[_] = k => map(k) + def empty[K[_], V[_]]: PMap[K, V] = new DelegatingPMap[K, V](new mutable.HashMap) } -object IMap -{ - /** - * Only suitable for K that is invariant in its type parameter. - * Option and List keys are not suitable, for example, - * because None <:< Option[String] and None <: Option[Int]. - */ - def empty[K[_], V[_]]: IMap[K,V] = new IMap0[K,V](Map.empty) +object IMap { + /** + * Only suitable for K that is invariant in its type parameter. + * Option and List keys are not suitable, for example, + * because None <:< Option[String] and None <: Option[Int]. + */ + def empty[K[_], V[_]]: IMap[K, V] = new IMap0[K, V](Map.empty) - private[this] class IMap0[K[_], V[_]](backing: Map[K[_], V[_]]) extends AbstractRMap[K,V] with IMap[K,V] - { - def get[T](k: K[T]): Option[V[T]] = ( backing get k ).asInstanceOf[Option[V[T]]] - def put[T](k: K[T], v: V[T]) = new IMap0[K,V]( backing.updated(k, v) ) - def remove[T](k: K[T]) = new IMap0[K,V]( backing - k ) + private[this] class IMap0[K[_], V[_]](backing: Map[K[_], V[_]]) extends AbstractRMap[K, V] with IMap[K, V] { + def get[T](k: K[T]): Option[V[T]] = (backing get k).asInstanceOf[Option[V[T]]] + def put[T](k: K[T], v: V[T]) = new IMap0[K, V](backing.updated(k, v)) + def remove[T](k: K[T]) = new IMap0[K, V](backing - k) - def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]) = - put(k, f(this get k getOrElse init)) + def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]) = + put(k, f(this get k getOrElse init)) - def mapValues[V2[_]](f: V ~> V2) = - new IMap0[K,V2](backing.mapValues(x => f(x)).toMap) + def mapValues[V2[_]](f: V ~> V2) = + new IMap0[K, V2](backing.mapValues(x => f(x)).toMap) - def mapSeparate[VL[_], VR[_]](f: V ~> ({type l[T] = Either[VL[T], VR[T]]})#l ) = - { - val mapped = backing.iterator.map { case (k,v) => f(v) match { - case Left(l) => Left((k, l)) - case Right(r) => Right((k, r)) - }} - val (l, r) = Util.separateE[(K[_],VL[_]), (K[_],VR[_])]( mapped.toList ) - (new IMap0[K,VL](l.toMap), new IMap0[K,VR](r.toMap)) - } + def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l) = + { + val mapped = backing.iterator.map { + case (k, v) => f(v) match { + case Left(l) => Left((k, l)) + case Right(r) => Right((k, r)) + } + } + val (l, r) = Util.separateE[(K[_], VL[_]), (K[_], VR[_])](mapped.toList) + (new IMap0[K, VL](l.toMap), new IMap0[K, VR](r.toMap)) + } - def toSeq = backing.toSeq - def keys = backing.keys - def values = backing.values - def isEmpty = backing.isEmpty + def toSeq = backing.toSeq + def keys = backing.keys + def values = backing.values + def isEmpty = backing.isEmpty - override def toString = backing.toString - } + override def toString = backing.toString + } } -abstract class AbstractRMap[K[_], V[_]] extends RMap[K,V] -{ - def apply[T](k: K[T]): V[T] = get(k).get - def contains[T](k: K[T]): Boolean = get(k).isDefined +abstract class AbstractRMap[K[_], V[_]] extends RMap[K, V] { + def apply[T](k: K[T]): V[T] = get(k).get + def contains[T](k: K[T]): Boolean = get(k).isDefined } /** -* Only suitable for K that is invariant in its type parameter. -* Option and List keys are not suitable, for example, -* because None <:< Option[String] and None <: Option[Int]. -*/ -class DelegatingPMap[K[_], V[_]](backing: mutable.Map[K[_], V[_]]) extends AbstractRMap[K,V] with PMap[K,V] -{ - def get[T](k: K[T]): Option[V[T]] = cast[T]( backing.get(k) ) - def update[T](k: K[T], v: V[T]) { backing(k) = v } - def remove[T](k: K[T]) = cast( backing.remove(k) ) - def getOrUpdate[T](k: K[T], make: => V[T]) = cast[T]( backing.getOrElseUpdate(k, make) ) - def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] = - { - val v = f(this get k getOrElse init) - update(k, v) - v - } - def toSeq = backing.toSeq - def keys = backing.keys - def values = backing.values - def isEmpty = backing.isEmpty + * Only suitable for K that is invariant in its type parameter. + * Option and List keys are not suitable, for example, + * because None <:< Option[String] and None <: Option[Int]. + */ +class DelegatingPMap[K[_], V[_]](backing: mutable.Map[K[_], V[_]]) extends AbstractRMap[K, V] with PMap[K, V] { + def get[T](k: K[T]): Option[V[T]] = cast[T](backing.get(k)) + def update[T](k: K[T], v: V[T]) { backing(k) = v } + def remove[T](k: K[T]) = cast(backing.remove(k)) + def getOrUpdate[T](k: K[T], make: => V[T]) = cast[T](backing.getOrElseUpdate(k, make)) + def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] = + { + val v = f(this get k getOrElse init) + update(k, v) + v + } + def toSeq = backing.toSeq + def keys = backing.keys + def values = backing.values + def isEmpty = backing.isEmpty - private[this] def cast[T](v: V[_]): V[T] = v.asInstanceOf[V[T]] - private[this] def cast[T](o: Option[V[_]]): Option[V[T]] = o map cast[T] + private[this] def cast[T](v: V[_]): V[T] = v.asInstanceOf[V[T]] + private[this] def cast[T](o: Option[V[_]]): Option[V[T]] = o map cast[T] - override def toString = backing.toString + override def toString = backing.toString } diff --git a/util/collection/src/main/scala/sbt/Param.scala b/util/collection/src/main/scala/sbt/Param.scala index 3271465d9..6f674efdc 100644 --- a/util/collection/src/main/scala/sbt/Param.scala +++ b/util/collection/src/main/scala/sbt/Param.scala @@ -6,26 +6,25 @@ package sbt import Types._ // Used to emulate ~> literals -trait Param[A[_], B[_]] -{ - type T - def in: A[T] - def ret(out: B[T]) - def ret: B[T] +trait Param[A[_], B[_]] { + type T + def in: A[T] + def ret(out: B[T]) + def ret: B[T] } -object Param -{ - implicit def pToT[A[_], B[_]](p: Param[A,B] => Unit): A~>B = new (A ~> B) { - def apply[s](a: A[s]): B[s] = { - val v: Param[A,B] { type T = s} = new Param[A,B] { type T = s - def in = a - private var r: B[T] = _ - def ret(b: B[T]) {r = b} - def ret: B[T] = r - } - p(v) - v.ret - } - } +object Param { + implicit def pToT[A[_], B[_]](p: Param[A, B] => Unit): A ~> B = new (A ~> B) { + def apply[s](a: A[s]): B[s] = { + val v: Param[A, B] { type T = s } = new Param[A, B] { + type T = s + def in = a + private var r: B[T] = _ + def ret(b: B[T]) { r = b } + def ret: B[T] = r + } + p(v) + v.ret + } + } } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/Positions.scala b/util/collection/src/main/scala/sbt/Positions.scala index f52c583b0..5d7e1915d 100755 --- a/util/collection/src/main/scala/sbt/Positions.scala +++ b/util/collection/src/main/scala/sbt/Positions.scala @@ -3,8 +3,8 @@ package sbt sealed trait SourcePosition sealed trait FilePosition extends SourcePosition { - def path: String - def startLine: Int + def path: String + def startLine: Int } case object NoPosition extends SourcePosition @@ -12,9 +12,9 @@ case object NoPosition extends SourcePosition final case class LinePosition(path: String, startLine: Int) extends FilePosition final case class LineRange(start: Int, end: Int) { - def shift(n: Int) = new LineRange(start + n, end + n) + def shift(n: Int) = new LineRange(start + n, end + n) } final case class RangePosition(path: String, range: LineRange) extends FilePosition { - def startLine = range.start + def startLine = range.start } diff --git a/util/collection/src/main/scala/sbt/Settings.scala b/util/collection/src/main/scala/sbt/Settings.scala index 7a6a7b7ee..96393f917 100644 --- a/util/collection/src/main/scala/sbt/Settings.scala +++ b/util/collection/src/main/scala/sbt/Settings.scala @@ -5,638 +5,630 @@ package sbt import Types._ -sealed trait Settings[Scope] -{ - def data: Map[Scope, AttributeMap] - def keys(scope: Scope): Set[AttributeKey[_]] - def scopes: Set[Scope] - def definingScope(scope: Scope, key: AttributeKey[_]): Option[Scope] - def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T] - def get[T](scope: Scope, key: AttributeKey[T]): Option[T] - def getDirect[T](scope: Scope, key: AttributeKey[T]): Option[T] - def set[T](scope: Scope, key: AttributeKey[T], value: T): Settings[Scope] +sealed trait Settings[Scope] { + def data: Map[Scope, AttributeMap] + def keys(scope: Scope): Set[AttributeKey[_]] + def scopes: Set[Scope] + def definingScope(scope: Scope, key: AttributeKey[_]): Option[Scope] + def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T] + def get[T](scope: Scope, key: AttributeKey[T]): Option[T] + def getDirect[T](scope: Scope, key: AttributeKey[T]): Option[T] + def set[T](scope: Scope, key: AttributeKey[T], value: T): Settings[Scope] } -private final class Settings0[Scope](val data: Map[Scope, AttributeMap], val delegates: Scope => Seq[Scope]) extends Settings[Scope] -{ - def scopes: Set[Scope] = data.keySet.toSet - def keys(scope: Scope) = data(scope).keys.toSet - def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T] = data.flatMap { case (scope, map) => map.keys.map(k => f(scope, k)) } toSeq; +private final class Settings0[Scope](val data: Map[Scope, AttributeMap], val delegates: Scope => Seq[Scope]) extends Settings[Scope] { + def scopes: Set[Scope] = data.keySet.toSet + def keys(scope: Scope) = data(scope).keys.toSet + def allKeys[T](f: (Scope, AttributeKey[_]) => T): Seq[T] = data.flatMap { case (scope, map) => map.keys.map(k => f(scope, k)) } toSeq; - def get[T](scope: Scope, key: AttributeKey[T]): Option[T] = - delegates(scope).toStream.flatMap(sc => getDirect(sc, key) ).headOption - def definingScope(scope: Scope, key: AttributeKey[_]): Option[Scope] = - delegates(scope).toStream.filter(sc => getDirect(sc, key).isDefined ).headOption + def get[T](scope: Scope, key: AttributeKey[T]): Option[T] = + delegates(scope).toStream.flatMap(sc => getDirect(sc, key)).headOption + def definingScope(scope: Scope, key: AttributeKey[_]): Option[Scope] = + delegates(scope).toStream.filter(sc => getDirect(sc, key).isDefined).headOption - def getDirect[T](scope: Scope, key: AttributeKey[T]): Option[T] = - (data get scope).flatMap(_ get key) + def getDirect[T](scope: Scope, key: AttributeKey[T]): Option[T] = + (data get scope).flatMap(_ get key) - def set[T](scope: Scope, key: AttributeKey[T], value: T): Settings[Scope] = - { - val map = (data get scope) getOrElse AttributeMap.empty - val newData = data.updated(scope, map.put(key, value)) - new Settings0(newData, delegates) - } + def set[T](scope: Scope, key: AttributeKey[T], value: T): Settings[Scope] = + { + val map = (data get scope) getOrElse AttributeMap.empty + val newData = data.updated(scope, map.put(key, value)) + new Settings0(newData, delegates) + } } // delegates should contain the input Scope as the first entry // this trait is intended to be mixed into an object -trait Init[Scope] -{ - /** The Show instance used when a detailed String needs to be generated. It is typically used when no context is available.*/ - def showFullKey: Show[ScopedKey[_]] +trait Init[Scope] { + /** The Show instance used when a detailed String needs to be generated. It is typically used when no context is available.*/ + def showFullKey: Show[ScopedKey[_]] - final case class ScopedKey[T](scope: Scope, key: AttributeKey[T]) extends KeyedInitialize[T] { - def scopedKey = this - } + final case class ScopedKey[T](scope: Scope, key: AttributeKey[T]) extends KeyedInitialize[T] { + def scopedKey = this + } - type SettingSeq[T] = Seq[Setting[T]] - type ScopedMap = IMap[ScopedKey, SettingSeq] - type CompiledMap = Map[ScopedKey[_], Compiled[_]] - type MapScoped = ScopedKey ~> ScopedKey - type ValidatedRef[T] = Either[Undefined, ScopedKey[T]] - type ValidatedInit[T] = Either[Seq[Undefined], Initialize[T]] - type ValidateRef = ScopedKey ~> ValidatedRef - type ScopeLocal = ScopedKey[_] => Seq[Setting[_]] - type MapConstant = ScopedKey ~> Option + type SettingSeq[T] = Seq[Setting[T]] + type ScopedMap = IMap[ScopedKey, SettingSeq] + type CompiledMap = Map[ScopedKey[_], Compiled[_]] + type MapScoped = ScopedKey ~> ScopedKey + type ValidatedRef[T] = Either[Undefined, ScopedKey[T]] + type ValidatedInit[T] = Either[Seq[Undefined], Initialize[T]] + type ValidateRef = ScopedKey ~> ValidatedRef + type ScopeLocal = ScopedKey[_] => Seq[Setting[_]] + type MapConstant = ScopedKey ~> Option - private[sbt] abstract class ValidateKeyRef { - def apply[T](key: ScopedKey[T], selfRefOk: Boolean): ValidatedRef[T] - } + private[sbt] abstract class ValidateKeyRef { + def apply[T](key: ScopedKey[T], selfRefOk: Boolean): ValidatedRef[T] + } - /** The result of this initialization is the composition of applied transformations. - * This can be useful when dealing with dynamic Initialize values. */ - lazy val capturedTransformations: Initialize[Initialize ~> Initialize] = new TransformCapture(idK[Initialize]) + /** + * The result of this initialization is the composition of applied transformations. + * This can be useful when dealing with dynamic Initialize values. + */ + lazy val capturedTransformations: Initialize[Initialize ~> Initialize] = new TransformCapture(idK[Initialize]) - def setting[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition = NoPosition): Setting[T] = new Setting[T](key, init, pos) - def valueStrict[T](value: T): Initialize[T] = pure(() => value) - def value[T](value: => T): Initialize[T] = pure(value _) - def pure[T](value: () => T): Initialize[T] = new Value(value) - def optional[T,U](i: Initialize[T])(f: Option[T] => U): Initialize[U] = new Optional(Some(i), f) - def update[T](key: ScopedKey[T])(f: T => T): Setting[T] = setting[T](key, map(key)(f), NoPosition) - def bind[S,T](in: Initialize[S])(f: S => Initialize[T]): Initialize[T] = new Bind(f, in) - def map[S,T](in: Initialize[S])(f: S => T): Initialize[T] = new Apply[ ({ type l[L[x]] = L[S] })#l, T](f, in, AList.single[S]) - def app[K[L[x]], T](inputs: K[Initialize])(f: K[Id] => T)(implicit alist: AList[K]): Initialize[T] = new Apply[K, T](f, inputs, alist) - def uniform[S,T](inputs: Seq[Initialize[S]])(f: Seq[S] => T): Initialize[T] = - new Apply[({ type l[L[x]] = List[L[S]] })#l, T](f, inputs.toList, AList.seq[S]) + def setting[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition = NoPosition): Setting[T] = new Setting[T](key, init, pos) + def valueStrict[T](value: T): Initialize[T] = pure(() => value) + def value[T](value: => T): Initialize[T] = pure(value _) + def pure[T](value: () => T): Initialize[T] = new Value(value) + def optional[T, U](i: Initialize[T])(f: Option[T] => U): Initialize[U] = new Optional(Some(i), f) + def update[T](key: ScopedKey[T])(f: T => T): Setting[T] = setting[T](key, map(key)(f), NoPosition) + def bind[S, T](in: Initialize[S])(f: S => Initialize[T]): Initialize[T] = new Bind(f, in) + def map[S, T](in: Initialize[S])(f: S => T): Initialize[T] = new Apply[({ type l[L[x]] = L[S] })#l, T](f, in, AList.single[S]) + def app[K[L[x]], T](inputs: K[Initialize])(f: K[Id] => T)(implicit alist: AList[K]): Initialize[T] = new Apply[K, T](f, inputs, alist) + def uniform[S, T](inputs: Seq[Initialize[S]])(f: Seq[S] => T): Initialize[T] = + new Apply[({ type l[L[x]] = List[L[S]] })#l, T](f, inputs.toList, AList.seq[S]) - /** The result of this initialization is the validated `key`. - * No dependency is introduced on `key`. If `selfRefOk` is true, validation will not fail if the key is referenced by a definition of `key`. - * That is, key := f(validated(key).value) is allowed only if `selfRefOk == true`. */ - private[sbt] final def validated[T](key: ScopedKey[T], selfRefOk: Boolean): ValidationCapture[T] = new ValidationCapture(key, selfRefOk) + /** + * The result of this initialization is the validated `key`. + * No dependency is introduced on `key`. If `selfRefOk` is true, validation will not fail if the key is referenced by a definition of `key`. + * That is, key := f(validated(key).value) is allowed only if `selfRefOk == true`. + */ + private[sbt] final def validated[T](key: ScopedKey[T], selfRefOk: Boolean): ValidationCapture[T] = new ValidationCapture(key, selfRefOk) - /** Constructs a derived setting that will be automatically defined in every scope where one of its dependencies - * is explicitly defined and the where the scope matches `filter`. - * A setting initialized with dynamic dependencies is only allowed if `allowDynamic` is true. - * Only the static dependencies are tracked, however. Dependencies on previous values do not introduce a derived setting either. */ - final def derive[T](s: Setting[T], allowDynamic: Boolean = false, filter: Scope => Boolean = const(true), trigger: AttributeKey[_] => Boolean = const(true), default: Boolean = false): Setting[T] = { - deriveAllowed(s, allowDynamic) foreach error - val d = new DerivedSetting[T](s.key, s.init, s.pos, filter, trigger) - if (default) d.default() else d - } - def deriveAllowed[T](s: Setting[T], allowDynamic: Boolean): Option[String] = s.init match { - case _: Bind[_,_] if !allowDynamic => Some("Cannot derive from dynamic dependencies.") - case _ => None - } - // id is used for equality - private[sbt] final def defaultSetting[T](s: Setting[T]): Setting[T] = s.default() - private[sbt] def defaultSettings(ss: Seq[Setting[_]]): Seq[Setting[_]] = ss.map(s => defaultSetting(s)) - private[this] final val nextID = new java.util.concurrent.atomic.AtomicLong - private[this] final def nextDefaultID(): Long = nextID.incrementAndGet() + /** + * Constructs a derived setting that will be automatically defined in every scope where one of its dependencies + * is explicitly defined and the where the scope matches `filter`. + * A setting initialized with dynamic dependencies is only allowed if `allowDynamic` is true. + * Only the static dependencies are tracked, however. Dependencies on previous values do not introduce a derived setting either. + */ + final def derive[T](s: Setting[T], allowDynamic: Boolean = false, filter: Scope => Boolean = const(true), trigger: AttributeKey[_] => Boolean = const(true), default: Boolean = false): Setting[T] = { + deriveAllowed(s, allowDynamic) foreach error + val d = new DerivedSetting[T](s.key, s.init, s.pos, filter, trigger) + if (default) d.default() else d + } + def deriveAllowed[T](s: Setting[T], allowDynamic: Boolean): Option[String] = s.init match { + case _: Bind[_, _] if !allowDynamic => Some("Cannot derive from dynamic dependencies.") + case _ => None + } + // id is used for equality + private[sbt] final def defaultSetting[T](s: Setting[T]): Setting[T] = s.default() + private[sbt] def defaultSettings(ss: Seq[Setting[_]]): Seq[Setting[_]] = ss.map(s => defaultSetting(s)) + private[this] final val nextID = new java.util.concurrent.atomic.AtomicLong + private[this] final def nextDefaultID(): Long = nextID.incrementAndGet() + def empty(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = new Settings0(Map.empty, delegates) + def asTransform(s: Settings[Scope]): ScopedKey ~> Id = new (ScopedKey ~> Id) { + def apply[T](k: ScopedKey[T]): T = getValue(s, k) + } + def getValue[T](s: Settings[Scope], k: ScopedKey[T]) = s.get(k.scope, k.key) getOrElse (throw new InvalidReference(k)) + def asFunction[T](s: Settings[Scope]): ScopedKey[T] => T = k => getValue(s, k) + def mapScope(f: Scope => Scope): MapScoped = new MapScoped { + def apply[T](k: ScopedKey[T]): ScopedKey[T] = k.copy(scope = f(k.scope)) + } + private final class InvalidReference(val key: ScopedKey[_]) extends RuntimeException("Internal settings error: invalid reference to " + showFullKey(key)) - def empty(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = new Settings0(Map.empty, delegates) - def asTransform(s: Settings[Scope]): ScopedKey ~> Id = new (ScopedKey ~> Id) { - def apply[T](k: ScopedKey[T]): T = getValue(s, k) - } - def getValue[T](s: Settings[Scope], k: ScopedKey[T]) = s.get(k.scope, k.key) getOrElse( throw new InvalidReference(k) ) - def asFunction[T](s: Settings[Scope]): ScopedKey[T] => T = k => getValue(s, k) - def mapScope(f: Scope => Scope): MapScoped = new MapScoped { - def apply[T](k: ScopedKey[T]): ScopedKey[T] = k.copy(scope = f(k.scope)) - } - private final class InvalidReference(val key: ScopedKey[_]) extends RuntimeException("Internal settings error: invalid reference to " + showFullKey(key)) + private[this] def applyDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] = + { + val (defaults, others) = Util.separate[Setting[_], DefaultSetting[_], Setting[_]](ss) { case u: DefaultSetting[_] => Left(u); case s => Right(s) } + defaults.distinct ++ others + } - private[this] def applyDefaults(ss: Seq[Setting[_]]): Seq[Setting[_]] = - { - val (defaults, others) = Util.separate[Setting[_], DefaultSetting[_], Setting[_]](ss) { case u: DefaultSetting[_] => Left(u); case s => Right(s) } - defaults.distinct ++ others - } + def compiled(init: Seq[Setting[_]], actual: Boolean = true)(implicit delegates: Scope => Seq[Scope], scopeLocal: ScopeLocal, display: Show[ScopedKey[_]]): CompiledMap = + { + val initDefaults = applyDefaults(init) + // inject derived settings into scopes where their dependencies are directly defined + // and prepend per-scope settings + val derived = deriveAndLocal(initDefaults) + // group by Scope/Key, dropping dead initializations + val sMap: ScopedMap = grouped(derived) + // delegate references to undefined values according to 'delegates' + val dMap: ScopedMap = if (actual) delegate(sMap)(delegates, display) else sMap + // merge Seq[Setting[_]] into Compiled + compile(dMap) + } + def make(init: Seq[Setting[_]])(implicit delegates: Scope => Seq[Scope], scopeLocal: ScopeLocal, display: Show[ScopedKey[_]]): Settings[Scope] = + { + val cMap = compiled(init)(delegates, scopeLocal, display) + // order the initializations. cyclic references are detected here. + val ordered: Seq[Compiled[_]] = sort(cMap) + // evaluation: apply the initializations. + try { applyInits(ordered) } + catch { case rru: RuntimeUndefined => throw Uninitialized(cMap.keys.toSeq, delegates, rru.undefined, true) } + } + def sort(cMap: CompiledMap): Seq[Compiled[_]] = + Dag.topologicalSort(cMap.values)(_.dependencies.map(cMap)) - def compiled(init: Seq[Setting[_]], actual: Boolean = true)(implicit delegates: Scope => Seq[Scope], scopeLocal: ScopeLocal, display: Show[ScopedKey[_]]): CompiledMap = - { - val initDefaults = applyDefaults(init) - // inject derived settings into scopes where their dependencies are directly defined - // and prepend per-scope settings - val derived = deriveAndLocal(initDefaults) - // group by Scope/Key, dropping dead initializations - val sMap: ScopedMap = grouped(derived) - // delegate references to undefined values according to 'delegates' - val dMap: ScopedMap = if(actual) delegate(sMap)(delegates, display) else sMap - // merge Seq[Setting[_]] into Compiled - compile(dMap) - } - def make(init: Seq[Setting[_]])(implicit delegates: Scope => Seq[Scope], scopeLocal: ScopeLocal, display: Show[ScopedKey[_]]): Settings[Scope] = - { - val cMap = compiled(init)(delegates, scopeLocal, display) - // order the initializations. cyclic references are detected here. - val ordered: Seq[Compiled[_]] = sort(cMap) - // evaluation: apply the initializations. - try { applyInits(ordered) } - catch { case rru: RuntimeUndefined => throw Uninitialized(cMap.keys.toSeq, delegates, rru.undefined, true) } - } - def sort(cMap: CompiledMap): Seq[Compiled[_]] = - Dag.topologicalSort(cMap.values)(_.dependencies.map(cMap)) + def compile(sMap: ScopedMap): CompiledMap = + sMap.toTypedSeq.map { + case sMap.TPair(k, ss) => + val deps = ss flatMap { _.dependencies } toSet; + (k, new Compiled(k, deps, ss)) + } toMap; - def compile(sMap: ScopedMap): CompiledMap = - sMap.toTypedSeq.map { case sMap.TPair(k, ss) => - val deps = ss flatMap { _.dependencies } toSet; - (k, new Compiled(k, deps, ss)) - } toMap; + def grouped(init: Seq[Setting[_]]): ScopedMap = + ((IMap.empty: ScopedMap) /: init)((m, s) => add(m, s)) - def grouped(init: Seq[Setting[_]]): ScopedMap = - ((IMap.empty : ScopedMap) /: init) ( (m,s) => add(m,s) ) + def add[T](m: ScopedMap, s: Setting[T]): ScopedMap = + m.mapValue[T](s.key, Nil, ss => append(ss, s)) - def add[T](m: ScopedMap, s: Setting[T]): ScopedMap = - m.mapValue[T]( s.key, Nil, ss => append(ss, s)) + def append[T](ss: Seq[Setting[T]], s: Setting[T]): Seq[Setting[T]] = + if (s.definitive) s :: Nil else ss :+ s - def append[T](ss: Seq[Setting[T]], s: Setting[T]): Seq[Setting[T]] = - if(s.definitive) s :: Nil else ss :+ s + def addLocal(init: Seq[Setting[_]])(implicit scopeLocal: ScopeLocal): Seq[Setting[_]] = + init.flatMap(_.dependencies flatMap scopeLocal) ++ init - def addLocal(init: Seq[Setting[_]])(implicit scopeLocal: ScopeLocal): Seq[Setting[_]] = - init.flatMap( _.dependencies flatMap scopeLocal ) ++ init + def delegate(sMap: ScopedMap)(implicit delegates: Scope => Seq[Scope], display: Show[ScopedKey[_]]): ScopedMap = + { + def refMap(ref: Setting[_], isFirst: Boolean) = new ValidateKeyRef { + def apply[T](k: ScopedKey[T], selfRefOk: Boolean) = + delegateForKey(sMap, k, delegates(k.scope), ref, selfRefOk || !isFirst) + } + type ValidatedSettings[T] = Either[Seq[Undefined], SettingSeq[T]] + val f = new (SettingSeq ~> ValidatedSettings) { + def apply[T](ks: Seq[Setting[T]]) = { + val (undefs, valid) = Util.separate(ks.zipWithIndex) { case (s, i) => s validateKeyReferenced refMap(s, i == 0) } + if (undefs.isEmpty) Right(valid) else Left(undefs.flatten) + } + } + type Undefs[_] = Seq[Undefined] + val (undefineds, result) = sMap.mapSeparate[Undefs, SettingSeq](f) + if (undefineds.isEmpty) + result + else + throw Uninitialized(sMap.keys.toSeq, delegates, undefineds.values.flatten.toList, false) + } + private[this] def delegateForKey[T](sMap: ScopedMap, k: ScopedKey[T], scopes: Seq[Scope], ref: Setting[_], selfRefOk: Boolean): Either[Undefined, ScopedKey[T]] = + { + val skeys = scopes.iterator.map(x => ScopedKey(x, k.key)) + val definedAt = skeys.find(sk => (selfRefOk || ref.key != sk) && (sMap contains sk)) + definedAt.toRight(Undefined(ref, k)) + } - def delegate(sMap: ScopedMap)(implicit delegates: Scope => Seq[Scope], display: Show[ScopedKey[_]]): ScopedMap = - { - def refMap(ref: Setting[_], isFirst: Boolean) = new ValidateKeyRef { def apply[T](k: ScopedKey[T], selfRefOk: Boolean) = - delegateForKey(sMap, k, delegates(k.scope), ref, selfRefOk || !isFirst) - } - type ValidatedSettings[T] = Either[Seq[Undefined], SettingSeq[T]] - val f = new (SettingSeq ~> ValidatedSettings) { def apply[T](ks: Seq[Setting[T]]) = { - val (undefs, valid) = Util.separate(ks.zipWithIndex){ case (s,i) => s validateKeyReferenced refMap(s, i == 0) } - if(undefs.isEmpty) Right(valid) else Left(undefs.flatten) - }} - type Undefs[_] = Seq[Undefined] - val (undefineds, result) = sMap.mapSeparate[Undefs, SettingSeq]( f ) - if(undefineds.isEmpty) - result - else - throw Uninitialized(sMap.keys.toSeq, delegates, undefineds.values.flatten.toList, false) - } - private[this] def delegateForKey[T](sMap: ScopedMap, k: ScopedKey[T], scopes: Seq[Scope], ref: Setting[_], selfRefOk: Boolean): Either[Undefined, ScopedKey[T]] = - { - val skeys = scopes.iterator.map(x => ScopedKey(x, k.key)) - val definedAt = skeys.find( sk => (selfRefOk || ref.key != sk) && (sMap contains sk)) - definedAt.toRight(Undefined(ref, k)) - } + private[this] def applyInits(ordered: Seq[Compiled[_]])(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = + { + val x = java.util.concurrent.Executors.newFixedThreadPool(Runtime.getRuntime.availableProcessors) + try { + val eval: EvaluateSettings[Scope] = new EvaluateSettings[Scope] { + override val init: Init.this.type = Init.this + def compiledSettings = ordered + def executor = x + } + eval.run + } finally { x.shutdown() } + } - private[this] def applyInits(ordered: Seq[Compiled[_]])(implicit delegates: Scope => Seq[Scope]): Settings[Scope] = - { - val x = java.util.concurrent.Executors.newFixedThreadPool(Runtime.getRuntime.availableProcessors) - try { - val eval: EvaluateSettings[Scope] = new EvaluateSettings[Scope] { - override val init: Init.this.type = Init.this - def compiledSettings = ordered - def executor = x - } - eval.run - } finally { x.shutdown() } - } + def showUndefined(u: Undefined, validKeys: Seq[ScopedKey[_]], delegates: Scope => Seq[Scope])(implicit display: Show[ScopedKey[_]]): String = + { + val guessed = guessIntendedScope(validKeys, delegates, u.referencedKey) + val derived = u.defining.isDerived + val refString = display(u.defining.key) + val sourceString = if (derived) "" else parenPosString(u.defining) + val guessedString = if (derived) "" else guessed.map(g => "\n Did you mean " + display(g) + " ?").toList.mkString + val derivedString = if (derived) ", which is a derived setting that needs this key to be defined in this scope." else "" + display(u.referencedKey) + " from " + refString + sourceString + derivedString + guessedString + } + private[this] def parenPosString(s: Setting[_]): String = + s.positionString match { case None => ""; case Some(s) => " (" + s + ")" } - def showUndefined(u: Undefined, validKeys: Seq[ScopedKey[_]], delegates: Scope => Seq[Scope])(implicit display: Show[ScopedKey[_]]): String = - { - val guessed = guessIntendedScope(validKeys, delegates, u.referencedKey) - val derived = u.defining.isDerived - val refString = display(u.defining.key) - val sourceString = if(derived) "" else parenPosString(u.defining) - val guessedString = if(derived) "" else guessed.map(g => "\n Did you mean " + display(g) + " ?").toList.mkString - val derivedString = if(derived) ", which is a derived setting that needs this key to be defined in this scope." else "" - display(u.referencedKey) + " from " + refString + sourceString + derivedString + guessedString - } - private[this] def parenPosString(s: Setting[_]): String = - s.positionString match { case None => ""; case Some(s) => " (" + s + ")" } + def guessIntendedScope(validKeys: Seq[ScopedKey[_]], delegates: Scope => Seq[Scope], key: ScopedKey[_]): Option[ScopedKey[_]] = + { + val distances = validKeys.flatMap { validKey => refinedDistance(delegates, validKey, key).map(dist => (dist, validKey)) } + distances.sortBy(_._1).map(_._2).headOption + } + def refinedDistance(delegates: Scope => Seq[Scope], a: ScopedKey[_], b: ScopedKey[_]): Option[Int] = + if (a.key != b.key || a == b) None + else { + val dist = delegates(a.scope).indexOf(b.scope) + if (dist < 0) None else Some(dist) + } - def guessIntendedScope(validKeys: Seq[ScopedKey[_]], delegates: Scope => Seq[Scope], key: ScopedKey[_]): Option[ScopedKey[_]] = - { - val distances = validKeys.flatMap { validKey => refinedDistance(delegates, validKey, key).map( dist => (dist, validKey) ) } - distances.sortBy(_._1).map(_._2).headOption - } - def refinedDistance(delegates: Scope => Seq[Scope], a: ScopedKey[_], b: ScopedKey[_]): Option[Int] = - if(a.key != b.key || a == b) None - else - { - val dist = delegates(a.scope).indexOf(b.scope) - if(dist < 0) None else Some(dist) - } + final class Uninitialized(val undefined: Seq[Undefined], override val toString: String) extends Exception(toString) + final class Undefined private[sbt] (val defining: Setting[_], val referencedKey: ScopedKey[_]) { + @deprecated("For compatibility only, use `defining` directly.", "0.13.1") + val definingKey = defining.key + @deprecated("For compatibility only, use `defining` directly.", "0.13.1") + val derived: Boolean = defining.isDerived + @deprecated("Use the non-deprecated Undefined factory method.", "0.13.1") + def this(definingKey: ScopedKey[_], referencedKey: ScopedKey[_], derived: Boolean) = this(fakeUndefinedSetting(definingKey, derived), referencedKey) + } + final class RuntimeUndefined(val undefined: Seq[Undefined]) extends RuntimeException("References to undefined settings at runtime.") - final class Uninitialized(val undefined: Seq[Undefined], override val toString: String) extends Exception(toString) - final class Undefined private[sbt](val defining: Setting[_], val referencedKey: ScopedKey[_]) - { - @deprecated("For compatibility only, use `defining` directly.", "0.13.1") - val definingKey = defining.key - @deprecated("For compatibility only, use `defining` directly.", "0.13.1") - val derived: Boolean = defining.isDerived - @deprecated("Use the non-deprecated Undefined factory method.", "0.13.1") - def this(definingKey: ScopedKey[_], referencedKey: ScopedKey[_], derived: Boolean) = this( fakeUndefinedSetting(definingKey, derived), referencedKey) - } - final class RuntimeUndefined(val undefined: Seq[Undefined]) extends RuntimeException("References to undefined settings at runtime.") + @deprecated("Use the other overload.", "0.13.1") + def Undefined(definingKey: ScopedKey[_], referencedKey: ScopedKey[_], derived: Boolean): Undefined = + new Undefined(fakeUndefinedSetting(definingKey, derived), referencedKey) + private[this] def fakeUndefinedSetting[T](definingKey: ScopedKey[T], d: Boolean): Setting[T] = + { + val init: Initialize[T] = pure(() => error("Dummy setting for compatibility only.")) + new Setting(definingKey, init, NoPosition) { override def isDerived = d } + } - @deprecated("Use the other overload.", "0.13.1") - def Undefined(definingKey: ScopedKey[_], referencedKey: ScopedKey[_], derived: Boolean): Undefined = - new Undefined(fakeUndefinedSetting(definingKey, derived), referencedKey) - private[this] def fakeUndefinedSetting[T](definingKey: ScopedKey[T], d: Boolean): Setting[T] = - { - val init: Initialize[T] = pure(() => error("Dummy setting for compatibility only.")) - new Setting(definingKey, init, NoPosition) { override def isDerived = d } - } + def Undefined(defining: Setting[_], referencedKey: ScopedKey[_]): Undefined = new Undefined(defining, referencedKey) + def Uninitialized(validKeys: Seq[ScopedKey[_]], delegates: Scope => Seq[Scope], keys: Seq[Undefined], runtime: Boolean)(implicit display: Show[ScopedKey[_]]): Uninitialized = + { + assert(!keys.isEmpty) + val suffix = if (keys.length > 1) "s" else "" + val prefix = if (runtime) "Runtime reference" else "Reference" + val keysString = keys.map(u => showUndefined(u, validKeys, delegates)).mkString("\n\n ", "\n\n ", "") + new Uninitialized(keys, prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n ") + } + final class Compiled[T](val key: ScopedKey[T], val dependencies: Iterable[ScopedKey[_]], val settings: Seq[Setting[T]]) { + override def toString = showFullKey(key) + } + final class Flattened(val key: ScopedKey[_], val dependencies: Iterable[ScopedKey[_]]) - def Undefined(defining: Setting[_], referencedKey: ScopedKey[_]): Undefined = new Undefined(defining, referencedKey) - def Uninitialized(validKeys: Seq[ScopedKey[_]], delegates: Scope => Seq[Scope], keys: Seq[Undefined], runtime: Boolean)(implicit display: Show[ScopedKey[_]]): Uninitialized = - { - assert(!keys.isEmpty) - val suffix = if(keys.length > 1) "s" else "" - val prefix = if(runtime) "Runtime reference" else "Reference" - val keysString = keys.map(u => showUndefined(u, validKeys, delegates)).mkString("\n\n ", "\n\n ", "") - new Uninitialized(keys, prefix + suffix + " to undefined setting" + suffix + ": " + keysString + "\n ") - } - final class Compiled[T](val key: ScopedKey[T], val dependencies: Iterable[ScopedKey[_]], val settings: Seq[Setting[T]]) - { - override def toString = showFullKey(key) - } - final class Flattened(val key: ScopedKey[_], val dependencies: Iterable[ScopedKey[_]]) + def flattenLocals(compiled: CompiledMap): Map[ScopedKey[_], Flattened] = + { + import collection.breakOut + val locals = compiled flatMap { case (key, comp) => if (key.key.isLocal) Seq[Compiled[_]](comp) else Nil } + val ordered = Dag.topologicalSort(locals)(_.dependencies.flatMap(dep => if (dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil)) + def flatten(cmap: Map[ScopedKey[_], Flattened], key: ScopedKey[_], deps: Iterable[ScopedKey[_]]): Flattened = + new Flattened(key, deps.flatMap(dep => if (dep.key.isLocal) cmap(dep).dependencies else dep :: Nil)) - def flattenLocals(compiled: CompiledMap): Map[ScopedKey[_],Flattened] = - { - import collection.breakOut - val locals = compiled flatMap { case (key, comp) => if(key.key.isLocal) Seq[Compiled[_]](comp) else Nil } - val ordered = Dag.topologicalSort(locals)(_.dependencies.flatMap(dep => if(dep.key.isLocal) Seq[Compiled[_]](compiled(dep)) else Nil)) - def flatten(cmap: Map[ScopedKey[_],Flattened], key: ScopedKey[_], deps: Iterable[ScopedKey[_]]): Flattened = - new Flattened(key, deps.flatMap(dep => if(dep.key.isLocal) cmap(dep).dependencies else dep :: Nil)) + val empty = Map.empty[ScopedKey[_], Flattened] + val flattenedLocals = (empty /: ordered) { (cmap, c) => cmap.updated(c.key, flatten(cmap, c.key, c.dependencies)) } + compiled flatMap { + case (key, comp) => + if (key.key.isLocal) + Nil + else + Seq[(ScopedKey[_], Flattened)]((key, flatten(flattenedLocals, key, comp.dependencies))) + } + } - val empty = Map.empty[ScopedKey[_],Flattened] - val flattenedLocals = (empty /: ordered) { (cmap, c) => cmap.updated(c.key, flatten(cmap, c.key, c.dependencies)) } - compiled flatMap{ case (key, comp) => - if(key.key.isLocal) - Nil - else - Seq[ (ScopedKey[_], Flattened)]( (key, flatten(flattenedLocals, key, comp.dependencies)) ) - } - } + def definedAtString(settings: Seq[Setting[_]]): String = + { + val posDefined = settings.flatMap(_.positionString.toList) + if (posDefined.size > 0) { + val header = if (posDefined.size == settings.size) "defined at:" else + "some of the defining occurrences:" + header + (posDefined.distinct mkString ("\n\t", "\n\t", "\n")) + } else "" + } - def definedAtString(settings: Seq[Setting[_]]): String = - { - val posDefined = settings.flatMap(_.positionString.toList) - if (posDefined.size > 0) { - val header = if (posDefined.size == settings.size) "defined at:" else - "some of the defining occurrences:" - header + (posDefined.distinct mkString ("\n\t", "\n\t", "\n")) - } else "" - } + /** + * Intersects two scopes, returning the more specific one if they intersect, or None otherwise. + */ + private[sbt] def intersect(s1: Scope, s2: Scope)(implicit delegates: Scope => Seq[Scope]): Option[Scope] = + if (delegates(s1).contains(s2)) Some(s1) // s1 is more specific + else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific + else None - /** - * Intersects two scopes, returning the more specific one if they intersect, or None otherwise. - */ - private[sbt] def intersect(s1: Scope, s2: Scope)(implicit delegates: Scope => Seq[Scope]): Option[Scope] = - if (delegates(s1).contains(s2)) Some(s1) // s1 is more specific - else if (delegates(s2).contains(s1)) Some(s2) // s2 is more specific - else None + private[this] def deriveAndLocal(init: Seq[Setting[_]])(implicit delegates: Scope => Seq[Scope], scopeLocal: ScopeLocal): Seq[Setting[_]] = + { + import collection.mutable - private[this] def deriveAndLocal(init: Seq[Setting[_]])(implicit delegates: Scope => Seq[Scope], scopeLocal: ScopeLocal): Seq[Setting[_]] = - { - import collection.mutable + final class Derived(val setting: DerivedSetting[_]) { + val dependencies = setting.dependencies.map(_.key) + def triggeredBy = dependencies.filter(setting.trigger) + val inScopes = new mutable.HashSet[Scope] + val outputs = new mutable.ListBuffer[Setting[_]] + } + final class Deriveds(val key: AttributeKey[_], val settings: mutable.ListBuffer[Derived]) { + def dependencies = settings.flatMap(_.dependencies) + // This is mainly for use in the cyclic reference error message + override def toString = s"Derived settings for ${key.label}, ${definedAtString(settings.map(_.setting))}" + } - final class Derived(val setting: DerivedSetting[_]) { - val dependencies = setting.dependencies.map(_.key) - def triggeredBy = dependencies.filter(setting.trigger) - val inScopes = new mutable.HashSet[Scope] - val outputs = new mutable.ListBuffer[Setting[_]] - } - final class Deriveds(val key: AttributeKey[_], val settings: mutable.ListBuffer[Derived]) { - def dependencies = settings.flatMap(_.dependencies) - // This is mainly for use in the cyclic reference error message - override def toString = s"Derived settings for ${key.label}, ${definedAtString(settings.map(_.setting))}" - } + // separate `derived` settings from normal settings (`defs`) + val (derived, rawDefs) = Util.separate[Setting[_], Derived, Setting[_]](init) { case d: DerivedSetting[_] => Left(new Derived(d)); case s => Right(s) } + val defs = addLocal(rawDefs)(scopeLocal) - // separate `derived` settings from normal settings (`defs`) - val (derived, rawDefs) = Util.separate[Setting[_],Derived,Setting[_]](init) { case d: DerivedSetting[_] => Left(new Derived(d)); case s => Right(s) } - val defs = addLocal(rawDefs)(scopeLocal) + // group derived settings by the key they define + val derivsByDef = new mutable.HashMap[AttributeKey[_], Deriveds] + for (s <- derived) { + val key = s.setting.key.key + derivsByDef.getOrElseUpdate(key, new Deriveds(key, new mutable.ListBuffer)).settings += s + } + // sort derived settings so that dependencies come first + // this is necessary when verifying that a derived setting's dependencies exist + val ddeps = (d: Deriveds) => d.dependencies.flatMap(derivsByDef.get) + val sortedDerivs = Dag.topologicalSort(derivsByDef.values)(ddeps) - // group derived settings by the key they define - val derivsByDef = new mutable.HashMap[AttributeKey[_], Deriveds] - for(s <- derived) { - val key = s.setting.key.key - derivsByDef.getOrElseUpdate(key, new Deriveds(key, new mutable.ListBuffer)).settings += s - } + // index derived settings by triggering key. This maps a key to the list of settings potentially derived from it. + val derivedBy = new mutable.HashMap[AttributeKey[_], mutable.ListBuffer[Derived]] + for (s <- derived; d <- s.triggeredBy) + derivedBy.getOrElseUpdate(d, new mutable.ListBuffer) += s - // sort derived settings so that dependencies come first - // this is necessary when verifying that a derived setting's dependencies exist - val ddeps = (d: Deriveds) => d.dependencies.flatMap(derivsByDef.get) - val sortedDerivs = Dag.topologicalSort(derivsByDef.values)(ddeps) + // Map a DerivedSetting[_] to the `Derived` struct wrapping it. Used to ultimately replace a DerivedSetting with + // the `Setting`s that were actually derived from it: `Derived.outputs` + val derivedToStruct: Map[DerivedSetting[_], Derived] = (derived map { s => s.setting -> s }).toMap - // index derived settings by triggering key. This maps a key to the list of settings potentially derived from it. - val derivedBy = new mutable.HashMap[AttributeKey[_], mutable.ListBuffer[Derived]] - for(s <- derived; d <- s.triggeredBy) - derivedBy.getOrElseUpdate(d, new mutable.ListBuffer) += s + // set of defined scoped keys, used to ensure a derived setting is only added if all dependencies are present + val defined = new mutable.HashSet[ScopedKey[_]] + def addDefs(ss: Seq[Setting[_]]) { for (s <- ss) defined += s.key } + addDefs(defs) - // Map a DerivedSetting[_] to the `Derived` struct wrapping it. Used to ultimately replace a DerivedSetting with - // the `Setting`s that were actually derived from it: `Derived.outputs` - val derivedToStruct: Map[DerivedSetting[_], Derived] = (derived map { s => s.setting -> s }).toMap + // true iff the scoped key is in `defined`, taking delegation into account + def isDefined(key: AttributeKey[_], scope: Scope) = + delegates(scope).exists(s => defined.contains(ScopedKey(s, key))) - // set of defined scoped keys, used to ensure a derived setting is only added if all dependencies are present - val defined = new mutable.HashSet[ScopedKey[_]] - def addDefs(ss: Seq[Setting[_]]) { for(s <- ss) defined += s.key } - addDefs(defs) + // true iff all dependencies of derived setting `d` have a value (potentially via delegation) in `scope` + def allDepsDefined(d: Derived, scope: Scope, local: Set[AttributeKey[_]]): Boolean = + d.dependencies.forall(dep => local(dep) || isDefined(dep, scope)) - // true iff the scoped key is in `defined`, taking delegation into account - def isDefined(key: AttributeKey[_], scope: Scope) = - delegates(scope).exists(s => defined.contains(ScopedKey(s, key))) + // Returns the list of injectable derived settings and their local settings for `sk`. + // The settings are to be injected under `outputScope` = whichever scope is more specific of: + // * the dependency's (`sk`) scope + // * the DerivedSetting's scope in which it has been declared, `definingScope` + // provided that these two scopes intersect. + // A derived setting is injectable if: + // 1. it has not been previously injected into outputScope + // 2. it applies to outputScope (as determined by its `filter`) + // 3. all of its dependencies are defined for outputScope (allowing for delegation) + // This needs to handle local settings because a derived setting wouldn't be injected if it's local setting didn't exist yet. + val deriveFor = (sk: ScopedKey[_]) => { + val derivedForKey: List[Derived] = derivedBy.get(sk.key).toList.flatten + val scope = sk.scope + def localAndDerived(d: Derived): Seq[Setting[_]] = { + def definingScope = d.setting.key.scope + val outputScope = intersect(scope, definingScope) + outputScope collect { + case s if !d.inScopes.contains(s) && d.setting.filter(s) => + val local = d.dependencies.flatMap(dep => scopeLocal(ScopedKey(s, dep))) + if (allDepsDefined(d, s, local.map(_.key.key).toSet)) { + d.inScopes.add(s) + val out = local :+ d.setting.setScope(s) + d.outputs ++= out + out + } else + Nil + } getOrElse Nil + } + derivedForKey.flatMap(localAndDerived) + } - // true iff all dependencies of derived setting `d` have a value (potentially via delegation) in `scope` - def allDepsDefined(d: Derived, scope: Scope, local: Set[AttributeKey[_]]): Boolean = - d.dependencies.forall(dep => local(dep) || isDefined(dep, scope)) + val processed = new mutable.HashSet[ScopedKey[_]] - // Returns the list of injectable derived settings and their local settings for `sk`. - // The settings are to be injected under `outputScope` = whichever scope is more specific of: - // * the dependency's (`sk`) scope - // * the DerivedSetting's scope in which it has been declared, `definingScope` - // provided that these two scopes intersect. - // A derived setting is injectable if: - // 1. it has not been previously injected into outputScope - // 2. it applies to outputScope (as determined by its `filter`) - // 3. all of its dependencies are defined for outputScope (allowing for delegation) - // This needs to handle local settings because a derived setting wouldn't be injected if it's local setting didn't exist yet. - val deriveFor = (sk: ScopedKey[_]) => { - val derivedForKey: List[Derived] = derivedBy.get(sk.key).toList.flatten - val scope = sk.scope - def localAndDerived(d: Derived): Seq[Setting[_]] = { - def definingScope = d.setting.key.scope - val outputScope = intersect(scope, definingScope) - outputScope collect { case s if !d.inScopes.contains(s) && d.setting.filter(s) => - val local = d.dependencies.flatMap(dep => scopeLocal(ScopedKey(s, dep))) - if(allDepsDefined(d, s, local.map(_.key.key).toSet)) { - d.inScopes.add(s) - val out = local :+ d.setting.setScope(s) - d.outputs ++= out - out - } else - Nil - } getOrElse Nil - } - derivedForKey.flatMap(localAndDerived) - } + // derives settings, transitively so that a derived setting can trigger another + def process(rem: List[Setting[_]]): Unit = rem match { + case s :: ss => + val sk = s.key + val ds = if (processed.add(sk)) deriveFor(sk) else Nil + addDefs(ds) + process(ds ::: ss) + case Nil => + } + process(defs.toList) - val processed = new mutable.HashSet[ScopedKey[_]] + // Take all the original defs and DerivedSettings along with locals, replace each DerivedSetting with the actual + // settings that were derived. + val allDefs = addLocal(init)(scopeLocal) + allDefs flatMap { case d: DerivedSetting[_] => (derivedToStruct get d map (_.outputs)).toStream.flatten; case s => Stream(s) } + } - // derives settings, transitively so that a derived setting can trigger another - def process(rem: List[Setting[_]]): Unit = rem match { - case s :: ss => - val sk = s.key - val ds = if(processed.add(sk)) deriveFor(sk) else Nil - addDefs(ds) - process(ds ::: ss) - case Nil => - } - process(defs.toList) + sealed trait Initialize[T] { + def dependencies: Seq[ScopedKey[_]] + def apply[S](g: T => S): Initialize[S] - // Take all the original defs and DerivedSettings along with locals, replace each DerivedSetting with the actual - // settings that were derived. - val allDefs = addLocal(init)(scopeLocal) - allDefs flatMap { case d: DerivedSetting[_] => (derivedToStruct get d map (_.outputs)).toStream.flatten; case s => Stream(s) } - } + @deprecated("Will be made private.", "0.13.2") + def mapReferenced(g: MapScoped): Initialize[T] + @deprecated("Will be made private.", "0.13.2") + def mapConstant(g: MapConstant): Initialize[T] - sealed trait Initialize[T] - { - def dependencies: Seq[ScopedKey[_]] - def apply[S](g: T => S): Initialize[S] + @deprecated("Will be made private.", "0.13.2") + def validateReferenced(g: ValidateRef): ValidatedInit[T] = + validateKeyReferenced(new ValidateKeyRef { def apply[T](key: ScopedKey[T], selfRefOk: Boolean) = g(key) }) - @deprecated("Will be made private.", "0.13.2") - def mapReferenced(g: MapScoped): Initialize[T] - @deprecated("Will be made private.", "0.13.2") - def mapConstant(g: MapConstant): Initialize[T] + private[sbt] def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[T] - @deprecated("Will be made private.", "0.13.2") - def validateReferenced(g: ValidateRef): ValidatedInit[T] = - validateKeyReferenced( new ValidateKeyRef { def apply[T](key: ScopedKey[T], selfRefOk: Boolean) = g(key) }) + def evaluate(map: Settings[Scope]): T + def zip[S](o: Initialize[S]): Initialize[(T, S)] = zipTupled(o)(idFun) + def zipWith[S, U](o: Initialize[S])(f: (T, S) => U): Initialize[U] = zipTupled(o)(f.tupled) + private[this] def zipTupled[S, U](o: Initialize[S])(f: ((T, S)) => U): Initialize[U] = + new Apply[({ type l[L[x]] = (L[T], L[S]) })#l, U](f, (this, o), AList.tuple2[T, S]) + /** A fold on the static attributes of this and nested Initializes. */ + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S + } + object Initialize { + implicit def joinInitialize[T](s: Seq[Initialize[T]]): JoinInitSeq[T] = new JoinInitSeq(s) + final class JoinInitSeq[T](s: Seq[Initialize[T]]) { + def joinWith[S](f: Seq[T] => S): Initialize[S] = uniform(s)(f) + def join: Initialize[Seq[T]] = uniform(s)(idFun) + } + def join[T](inits: Seq[Initialize[T]]): Initialize[Seq[T]] = uniform(inits)(idFun) + def joinAny[M[_]](inits: Seq[Initialize[M[T]] forSome { type T }]): Initialize[Seq[M[_]]] = + join(inits.asInstanceOf[Seq[Initialize[M[Any]]]]).asInstanceOf[Initialize[Seq[M[T] forSome { type T }]]] + } + object SettingsDefinition { + implicit def unwrapSettingsDefinition(d: SettingsDefinition): Seq[Setting[_]] = d.settings + implicit def wrapSettingsDefinition(ss: Seq[Setting[_]]): SettingsDefinition = new SettingList(ss) + } + sealed trait SettingsDefinition { + def settings: Seq[Setting[_]] + } + final class SettingList(val settings: Seq[Setting[_]]) extends SettingsDefinition + sealed class Setting[T] private[Init] (val key: ScopedKey[T], val init: Initialize[T], val pos: SourcePosition) extends SettingsDefinition { + def settings = this :: Nil + def definitive: Boolean = !init.dependencies.contains(key) + def dependencies: Seq[ScopedKey[_]] = remove(init.dependencies, key) + @deprecated("Will be made private.", "0.13.2") + def mapReferenced(g: MapScoped): Setting[T] = make(key, init mapReferenced g, pos) + @deprecated("Will be made private.", "0.13.2") + def validateReferenced(g: ValidateRef): Either[Seq[Undefined], Setting[T]] = (init validateReferenced g).right.map(newI => make(key, newI, pos)) - private[sbt] def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[T] + private[sbt] def validateKeyReferenced(g: ValidateKeyRef): Either[Seq[Undefined], Setting[T]] = + (init validateKeyReferenced g).right.map(newI => make(key, newI, pos)) - def evaluate(map: Settings[Scope]): T - def zip[S](o: Initialize[S]): Initialize[(T,S)] = zipTupled(o)(idFun) - def zipWith[S,U](o: Initialize[S])(f: (T,S) => U): Initialize[U] = zipTupled(o)(f.tupled) - private[this] def zipTupled[S,U](o: Initialize[S])(f: ((T,S)) => U): Initialize[U] = - new Apply[({ type l[L[x]] = (L[T], L[S]) })#l, U](f, (this, o), AList.tuple2[T,S]) - /** A fold on the static attributes of this and nested Initializes. */ - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S - } - object Initialize - { - implicit def joinInitialize[T](s: Seq[Initialize[T]]): JoinInitSeq[T] = new JoinInitSeq(s) - final class JoinInitSeq[T](s: Seq[Initialize[T]]) - { - def joinWith[S](f: Seq[T] => S): Initialize[S] = uniform(s)(f) - def join: Initialize[Seq[T]] = uniform(s)(idFun) - } - def join[T](inits: Seq[Initialize[T]]): Initialize[Seq[T]] = uniform(inits)(idFun) - def joinAny[M[_]](inits: Seq[Initialize[M[T]] forSome { type T }]): Initialize[Seq[M[_]]] = - join(inits.asInstanceOf[Seq[Initialize[M[Any]]]]).asInstanceOf[Initialize[Seq[M[T] forSome { type T }]]] - } - object SettingsDefinition { - implicit def unwrapSettingsDefinition(d: SettingsDefinition): Seq[Setting[_]] = d.settings - implicit def wrapSettingsDefinition(ss: Seq[Setting[_]]): SettingsDefinition = new SettingList(ss) - } - sealed trait SettingsDefinition { - def settings: Seq[Setting[_]] - } - final class SettingList(val settings: Seq[Setting[_]]) extends SettingsDefinition - sealed class Setting[T] private[Init](val key: ScopedKey[T], val init: Initialize[T], val pos: SourcePosition) extends SettingsDefinition - { - def settings = this :: Nil - def definitive: Boolean = !init.dependencies.contains(key) - def dependencies: Seq[ScopedKey[_]] = remove(init.dependencies, key) - @deprecated("Will be made private.", "0.13.2") - def mapReferenced(g: MapScoped): Setting[T] = make(key, init mapReferenced g, pos) - @deprecated("Will be made private.", "0.13.2") - def validateReferenced(g: ValidateRef): Either[Seq[Undefined], Setting[T]] = (init validateReferenced g).right.map(newI => make(key, newI, pos)) + def mapKey(g: MapScoped): Setting[T] = make(g(key), init, pos) + def mapInit(f: (ScopedKey[T], T) => T): Setting[T] = make(key, init(t => f(key, t)), pos) + @deprecated("Will be made private.", "0.13.2") + def mapConstant(g: MapConstant): Setting[T] = make(key, init mapConstant g, pos) + def withPos(pos: SourcePosition) = make(key, init, pos) + def positionString: Option[String] = pos match { + case pos: FilePosition => Some(pos.path + ":" + pos.startLine) + case NoPosition => None + } + private[sbt] def mapInitialize(f: Initialize[T] => Initialize[T]): Setting[T] = make(key, f(init), pos) + override def toString = "setting(" + key + ") at " + pos - private[sbt] def validateKeyReferenced(g: ValidateKeyRef): Either[Seq[Undefined], Setting[T]] = - (init validateKeyReferenced g).right.map(newI => make(key, newI, pos)) + protected[this] def make[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition): Setting[T] = new Setting[T](key, init, pos) + protected[sbt] def isDerived: Boolean = false + private[sbt] def setScope(s: Scope): Setting[T] = make(key.copy(scope = s), init.mapReferenced(mapScope(const(s))), pos) + /** Turn this setting into a `DefaultSetting` if it's not already, otherwise returns `this` */ + private[sbt] def default(id: => Long = nextDefaultID()): DefaultSetting[T] = DefaultSetting(key, init, pos, id) + } + private[Init] sealed class DerivedSetting[T](sk: ScopedKey[T], i: Initialize[T], p: SourcePosition, val filter: Scope => Boolean, val trigger: AttributeKey[_] => Boolean) extends Setting[T](sk, i, p) { + override def make[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition): Setting[T] = new DerivedSetting[T](key, init, pos, filter, trigger) + protected[sbt] override def isDerived: Boolean = true + override def default(_id: => Long): DefaultSetting[T] = new DerivedSetting[T](sk, i, p, filter, trigger) with DefaultSetting[T] { val id = _id } + override def toString = "derived " + super.toString + } + // Only keep the first occurence of this setting and move it to the front so that it has lower precedence than non-defaults. + // This is intended for internal sbt use only, where alternatives like Plugin.globalSettings are not available. + private[Init] sealed trait DefaultSetting[T] extends Setting[T] { + val id: Long + override def make[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition): Setting[T] = super.make(key, init, pos) default id + override final def hashCode = id.hashCode + override final def equals(o: Any): Boolean = o match { case d: DefaultSetting[_] => d.id == id; case _ => false } + override def toString = s"default($id) " + super.toString + override def default(id: => Long) = this + } - def mapKey(g: MapScoped): Setting[T] = make(g(key), init, pos) - def mapInit(f: (ScopedKey[T], T) => T): Setting[T] = make(key, init(t => f(key,t)), pos) - @deprecated("Will be made private.", "0.13.2") - def mapConstant(g: MapConstant): Setting[T] = make(key, init mapConstant g, pos) - def withPos(pos: SourcePosition) = make(key, init, pos) - def positionString: Option[String] = pos match { - case pos: FilePosition => Some(pos.path + ":" + pos.startLine) - case NoPosition => None - } - private[sbt] def mapInitialize(f: Initialize[T] => Initialize[T]): Setting[T] = make(key, f(init), pos) - override def toString = "setting(" + key + ") at " + pos + object DefaultSetting { + def apply[T](sk: ScopedKey[T], i: Initialize[T], p: SourcePosition, _id: Long) = new Setting[T](sk, i, p) with DefaultSetting[T] { val id = _id } + } - protected[this] def make[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition): Setting[T] = new Setting[T](key, init, pos) - protected[sbt] def isDerived: Boolean = false - private[sbt] def setScope(s: Scope): Setting[T] = make(key.copy(scope = s), init.mapReferenced(mapScope(const(s))), pos) - /** Turn this setting into a `DefaultSetting` if it's not already, otherwise returns `this` */ - private[sbt] def default(id: => Long = nextDefaultID()): DefaultSetting[T] = DefaultSetting(key, init, pos, id) - } - private[Init] sealed class DerivedSetting[T](sk: ScopedKey[T], i: Initialize[T], p: SourcePosition, val filter: Scope => Boolean, val trigger: AttributeKey[_] => Boolean) extends Setting[T](sk, i, p) { - override def make[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition): Setting[T] = new DerivedSetting[T](key, init, pos, filter, trigger) - protected[sbt] override def isDerived: Boolean = true - override def default(_id: => Long): DefaultSetting[T] = new DerivedSetting[T](sk, i, p, filter, trigger) with DefaultSetting[T] { val id = _id } - override def toString = "derived " + super.toString - } - // Only keep the first occurence of this setting and move it to the front so that it has lower precedence than non-defaults. - // This is intended for internal sbt use only, where alternatives like Plugin.globalSettings are not available. - private[Init] sealed trait DefaultSetting[T] extends Setting[T] { - val id: Long - override def make[T](key: ScopedKey[T], init: Initialize[T], pos: SourcePosition): Setting[T] = super.make(key, init, pos) default id - override final def hashCode = id.hashCode - override final def equals(o: Any): Boolean = o match { case d: DefaultSetting[_] => d.id == id; case _ => false } - override def toString = s"default($id) " + super.toString - override def default(id: => Long) = this - } + private[this] def handleUndefined[T](vr: ValidatedInit[T]): Initialize[T] = vr match { + case Left(undefs) => throw new RuntimeUndefined(undefs) + case Right(x) => x + } - object DefaultSetting { - def apply[T](sk: ScopedKey[T], i: Initialize[T], p: SourcePosition, _id: Long) = new Setting[T](sk, i, p) with DefaultSetting[T] { val id = _id } - } + private[this] lazy val getValidated = + new (ValidatedInit ~> Initialize) { def apply[T](v: ValidatedInit[T]) = handleUndefined[T](v) } + // mainly for reducing generated class count + private[this] def validateKeyReferencedT(g: ValidateKeyRef) = + new (Initialize ~> ValidatedInit) { def apply[T](i: Initialize[T]) = i validateKeyReferenced g } - private[this] def handleUndefined[T](vr: ValidatedInit[T]): Initialize[T] = vr match { - case Left(undefs) => throw new RuntimeUndefined(undefs) - case Right(x) => x - } + private[this] def mapReferencedT(g: MapScoped) = + new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapReferenced g } - private[this] lazy val getValidated = - new (ValidatedInit ~> Initialize) { def apply[T](v: ValidatedInit[T]) = handleUndefined[T](v) } + private[this] def mapConstantT(g: MapConstant) = + new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapConstant g } - // mainly for reducing generated class count - private[this] def validateKeyReferencedT(g: ValidateKeyRef) = - new (Initialize ~> ValidatedInit) { def apply[T](i: Initialize[T]) = i validateKeyReferenced g } + private[this] def evaluateT(g: Settings[Scope]) = + new (Initialize ~> Id) { def apply[T](i: Initialize[T]) = i evaluate g } - private[this] def mapReferencedT(g: MapScoped) = - new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapReferenced g } + private[this] def deps(ls: Seq[Initialize[_]]): Seq[ScopedKey[_]] = ls.flatMap(_.dependencies) - private[this] def mapConstantT(g: MapConstant) = - new (Initialize ~> Initialize) { def apply[T](i: Initialize[T]) = i mapConstant g } + sealed trait Keyed[S, T] extends Initialize[T] { + def scopedKey: ScopedKey[S] + def transform: S => T + final def dependencies = scopedKey :: Nil + final def apply[Z](g: T => Z): Initialize[Z] = new GetValue(scopedKey, g compose transform) + final def evaluate(ss: Settings[Scope]): T = transform(getValue(ss, scopedKey)) + final def mapReferenced(g: MapScoped): Initialize[T] = new GetValue(g(scopedKey), transform) + private[sbt] final def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[T] = g(scopedKey, false) match { + case Left(un) => Left(un :: Nil) + case Right(nk) => Right(new GetValue(nk, transform)) + } + final def mapConstant(g: MapConstant): Initialize[T] = g(scopedKey) match { + case None => this + case Some(const) => new Value(() => transform(const)) + } + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init + } + private[this] final class GetValue[S, T](val scopedKey: ScopedKey[S], val transform: S => T) extends Keyed[S, T] + trait KeyedInitialize[T] extends Keyed[T, T] { + final val transform = idFun[T] + } - private[this] def evaluateT(g: Settings[Scope]) = - new (Initialize ~> Id) { def apply[T](i: Initialize[T]) = i evaluate g } + private[sbt] final class TransformCapture(val f: Initialize ~> Initialize) extends Initialize[Initialize ~> Initialize] { + def dependencies = Nil + def apply[Z](g2: (Initialize ~> Initialize) => Z): Initialize[Z] = map(this)(g2) + def evaluate(ss: Settings[Scope]): Initialize ~> Initialize = f + def mapReferenced(g: MapScoped) = new TransformCapture(mapReferencedT(g) ∙ f) + def mapConstant(g: MapConstant) = new TransformCapture(mapConstantT(g) ∙ f) + def validateKeyReferenced(g: ValidateKeyRef) = Right(new TransformCapture(getValidated ∙ validateKeyReferencedT(g) ∙ f)) + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init + } + private[sbt] final class ValidationCapture[T](val key: ScopedKey[T], val selfRefOk: Boolean) extends Initialize[ScopedKey[T]] { + def dependencies = Nil + def apply[Z](g2: ScopedKey[T] => Z): Initialize[Z] = map(this)(g2) + def evaluate(ss: Settings[Scope]) = key + def mapReferenced(g: MapScoped) = new ValidationCapture(g(key), selfRefOk) + def mapConstant(g: MapConstant) = this + def validateKeyReferenced(g: ValidateKeyRef) = g(key, selfRefOk) match { + case Left(un) => Left(un :: Nil) + case Right(k) => Right(new ValidationCapture(k, selfRefOk)) + } - private[this] def deps(ls: Seq[Initialize[_]]): Seq[ScopedKey[_]] = ls.flatMap(_.dependencies) + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init + } + private[sbt] final class Bind[S, T](val f: S => Initialize[T], val in: Initialize[S]) extends Initialize[T] { + def dependencies = in.dependencies + def apply[Z](g: T => Z): Initialize[Z] = new Bind[S, Z](s => f(s)(g), in) + def evaluate(ss: Settings[Scope]): T = f(in evaluate ss) evaluate ss + def mapReferenced(g: MapScoped) = new Bind[S, T](s => f(s) mapReferenced g, in mapReferenced g) + def validateKeyReferenced(g: ValidateKeyRef) = (in validateKeyReferenced g).right.map { validIn => + new Bind[S, T](s => handleUndefined(f(s) validateKeyReferenced g), validIn) + } + def mapConstant(g: MapConstant) = new Bind[S, T](s => f(s) mapConstant g, in mapConstant g) + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = in.processAttributes(init)(f) + } + private[sbt] final class Optional[S, T](val a: Option[Initialize[S]], val f: Option[S] => T) extends Initialize[T] { + def dependencies = deps(a.toList) + def apply[Z](g: T => Z): Initialize[Z] = new Optional[S, Z](a, g compose f) + def mapReferenced(g: MapScoped) = new Optional(a map mapReferencedT(g).fn, f) + def validateKeyReferenced(g: ValidateKeyRef) = a match { + case None => Right(this) + case Some(i) => Right(new Optional(i.validateKeyReferenced(g).right.toOption, f)) + } + def mapConstant(g: MapConstant): Initialize[T] = new Optional(a map mapConstantT(g).fn, f) + def evaluate(ss: Settings[Scope]): T = f(a.flatMap(i => trapBadRef(evaluateT(ss)(i)))) + // proper solution is for evaluate to be deprecated or for external use only and a new internal method returning Either be used + private[this] def trapBadRef[A](run: => A): Option[A] = try Some(run) catch { case e: InvalidReference => None } + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = a match { + case None => init + case Some(i) => i.processAttributes(init)(f) + } + } + private[sbt] final class Value[T](val value: () => T) extends Initialize[T] { + def dependencies = Nil + def mapReferenced(g: MapScoped) = this + def validateKeyReferenced(g: ValidateKeyRef) = Right(this) + def apply[S](g: T => S) = new Value[S](() => g(value())) + def mapConstant(g: MapConstant) = this + def evaluate(map: Settings[Scope]): T = value() + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init + } + private[sbt] final object StaticScopes extends Initialize[Set[Scope]] { + def dependencies = Nil + def mapReferenced(g: MapScoped) = this + def validateKeyReferenced(g: ValidateKeyRef) = Right(this) + def apply[S](g: Set[Scope] => S) = map(this)(g) + def mapConstant(g: MapConstant) = this + def evaluate(map: Settings[Scope]) = map.scopes + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init + } + private[sbt] final class Apply[K[L[x]], T](val f: K[Id] => T, val inputs: K[Initialize], val alist: AList[K]) extends Initialize[T] { + def dependencies = deps(alist.toList(inputs)) + def mapReferenced(g: MapScoped) = mapInputs(mapReferencedT(g)) + def apply[S](g: T => S) = new Apply(g compose f, inputs, alist) + def mapConstant(g: MapConstant) = mapInputs(mapConstantT(g)) + def mapInputs(g: Initialize ~> Initialize): Initialize[T] = new Apply(f, alist.transform(inputs, g), alist) + def evaluate(ss: Settings[Scope]) = f(alist.transform(inputs, evaluateT(ss))) + def validateKeyReferenced(g: ValidateKeyRef) = + { + val tx = alist.transform(inputs, validateKeyReferencedT(g)) + val undefs = alist.toList(tx).flatMap(_.left.toSeq.flatten) + val get = new (ValidatedInit ~> Initialize) { def apply[T](vr: ValidatedInit[T]) = vr.right.get } + if (undefs.isEmpty) Right(new Apply(f, alist.transform(tx, get), alist)) else Left(undefs) + } - sealed trait Keyed[S, T] extends Initialize[T] - { - def scopedKey: ScopedKey[S] - def transform: S => T - final def dependencies = scopedKey :: Nil - final def apply[Z](g: T => Z): Initialize[Z] = new GetValue(scopedKey, g compose transform) - final def evaluate(ss: Settings[Scope]): T = transform(getValue(ss, scopedKey)) - final def mapReferenced(g: MapScoped): Initialize[T] = new GetValue( g(scopedKey), transform) - private[sbt] final def validateKeyReferenced(g: ValidateKeyRef): ValidatedInit[T] = g(scopedKey, false) match { - case Left(un) => Left(un :: Nil) - case Right(nk) => Right(new GetValue(nk, transform)) - } - final def mapConstant(g: MapConstant): Initialize[T] = g(scopedKey) match { - case None => this - case Some(const) => new Value(() => transform(const)) - } - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init - } - private[this] final class GetValue[S,T](val scopedKey: ScopedKey[S], val transform: S => T) extends Keyed[S, T] - trait KeyedInitialize[T] extends Keyed[T, T] { - final val transform = idFun[T] - } - - private[sbt] final class TransformCapture(val f: Initialize ~> Initialize) extends Initialize[Initialize ~> Initialize] - { - def dependencies = Nil - def apply[Z](g2: (Initialize ~> Initialize) => Z): Initialize[Z] = map(this)(g2) - def evaluate(ss: Settings[Scope]): Initialize ~> Initialize = f - def mapReferenced(g: MapScoped) = new TransformCapture(mapReferencedT(g) ∙ f) - def mapConstant(g: MapConstant) = new TransformCapture(mapConstantT(g) ∙ f) - def validateKeyReferenced(g: ValidateKeyRef) = Right(new TransformCapture(getValidated ∙ validateKeyReferencedT(g) ∙ f)) - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init - } - private[sbt] final class ValidationCapture[T](val key: ScopedKey[T], val selfRefOk: Boolean) extends Initialize[ScopedKey[T]] { - def dependencies = Nil - def apply[Z](g2: ScopedKey[T] => Z): Initialize[Z] = map(this)(g2) - def evaluate(ss: Settings[Scope]) = key - def mapReferenced(g: MapScoped) = new ValidationCapture(g(key), selfRefOk) - def mapConstant(g: MapConstant) = this - def validateKeyReferenced(g: ValidateKeyRef) = g(key, selfRefOk) match { - case Left(un) => Left(un :: Nil) - case Right(k) => Right(new ValidationCapture(k, selfRefOk)) - } - - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init - } - private[sbt] final class Bind[S,T](val f: S => Initialize[T], val in: Initialize[S]) extends Initialize[T] - { - def dependencies = in.dependencies - def apply[Z](g: T => Z): Initialize[Z] = new Bind[S,Z](s => f(s)(g), in) - def evaluate(ss: Settings[Scope]): T = f(in evaluate ss) evaluate ss - def mapReferenced(g: MapScoped) = new Bind[S,T](s => f(s) mapReferenced g, in mapReferenced g) - def validateKeyReferenced(g: ValidateKeyRef) = (in validateKeyReferenced g).right.map { validIn => - new Bind[S,T](s => handleUndefined( f(s) validateKeyReferenced g), validIn) - } - def mapConstant(g: MapConstant) = new Bind[S,T](s => f(s) mapConstant g, in mapConstant g) - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = in.processAttributes(init)(f) - } - private[sbt] final class Optional[S,T](val a: Option[Initialize[S]], val f: Option[S] => T) extends Initialize[T] - { - def dependencies = deps(a.toList) - def apply[Z](g: T => Z): Initialize[Z] = new Optional[S,Z](a, g compose f) - def mapReferenced(g: MapScoped) = new Optional(a map mapReferencedT(g).fn, f) - def validateKeyReferenced(g: ValidateKeyRef) = a match { - case None => Right(this) - case Some(i) => Right( new Optional(i.validateKeyReferenced(g).right.toOption, f) ) - } - def mapConstant(g: MapConstant): Initialize[T] = new Optional(a map mapConstantT(g).fn, f) - def evaluate(ss: Settings[Scope]): T = f( a.flatMap( i => trapBadRef(evaluateT(ss)(i)) ) ) - // proper solution is for evaluate to be deprecated or for external use only and a new internal method returning Either be used - private[this] def trapBadRef[A](run: => A): Option[A] = try Some(run) catch { case e: InvalidReference => None } - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = a match { - case None => init - case Some(i) => i.processAttributes(init)(f) - } - } - private[sbt] final class Value[T](val value: () => T) extends Initialize[T] - { - def dependencies = Nil - def mapReferenced(g: MapScoped) = this - def validateKeyReferenced(g: ValidateKeyRef) = Right(this) - def apply[S](g: T => S) = new Value[S](() => g(value())) - def mapConstant(g: MapConstant) = this - def evaluate(map: Settings[Scope]): T = value() - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init - } - private[sbt] final object StaticScopes extends Initialize[Set[Scope]] - { - def dependencies = Nil - def mapReferenced(g: MapScoped) = this - def validateKeyReferenced(g: ValidateKeyRef) = Right(this) - def apply[S](g: Set[Scope] => S) = map(this)(g) - def mapConstant(g: MapConstant) = this - def evaluate(map: Settings[Scope]) = map.scopes - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = init - } - private[sbt] final class Apply[K[L[x]], T](val f: K[Id] => T, val inputs: K[Initialize], val alist: AList[K]) extends Initialize[T] - { - def dependencies = deps(alist.toList(inputs)) - def mapReferenced(g: MapScoped) = mapInputs( mapReferencedT(g) ) - def apply[S](g: T => S) = new Apply(g compose f, inputs, alist) - def mapConstant(g: MapConstant) = mapInputs( mapConstantT(g) ) - def mapInputs(g: Initialize ~> Initialize): Initialize[T] = new Apply(f, alist.transform(inputs, g), alist) - def evaluate(ss: Settings[Scope]) = f(alist.transform(inputs, evaluateT(ss))) - def validateKeyReferenced(g: ValidateKeyRef) = - { - val tx = alist.transform(inputs, validateKeyReferencedT(g)) - val undefs = alist.toList(tx).flatMap(_.left.toSeq.flatten) - val get = new (ValidatedInit ~> Initialize) { def apply[T](vr: ValidatedInit[T]) = vr.right.get } - if(undefs.isEmpty) Right(new Apply(f, alist.transform(tx, get), alist)) else Left(undefs) - } - - private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = - (init /: alist.toList(inputs)) { (v, i) => i.processAttributes(v)(f) } - } - private def remove[T](s: Seq[T], v: T) = s filterNot (_ == v) + private[sbt] def processAttributes[S](init: S)(f: (S, AttributeMap) => S): S = + (init /: alist.toList(inputs)) { (v, i) => i.processAttributes(v)(f) } + } + private def remove[T](s: Seq[T], v: T) = s filterNot (_ == v) } diff --git a/util/collection/src/main/scala/sbt/Show.scala b/util/collection/src/main/scala/sbt/Show.scala index fe4e85950..1f8e9703b 100644 --- a/util/collection/src/main/scala/sbt/Show.scala +++ b/util/collection/src/main/scala/sbt/Show.scala @@ -1,9 +1,8 @@ package sbt trait Show[T] { - def apply(t: T): String + def apply(t: T): String } -object Show -{ - def apply[T](f: T => String): Show[T] = new Show[T] { def apply(t: T): String = f(t) } +object Show { + def apply[T](f: T => String): Show[T] = new Show[T] { def apply(t: T): String = f(t) } } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/Signal.scala b/util/collection/src/main/scala/sbt/Signal.scala index 0069e4b53..e8c9e7e6c 100644 --- a/util/collection/src/main/scala/sbt/Signal.scala +++ b/util/collection/src/main/scala/sbt/Signal.scala @@ -1,91 +1,85 @@ package sbt -object Signals -{ - val CONT = "CONT" - val INT = "INT" - def withHandler[T](handler: () => Unit, signal: String = INT)(action: () => T): T = - { - val result = - try - { - val signals = new Signals0 - signals.withHandler(signal, handler, action) - } - catch { case e: LinkageError => Right(action()) } +object Signals { + val CONT = "CONT" + val INT = "INT" + def withHandler[T](handler: () => Unit, signal: String = INT)(action: () => T): T = + { + val result = + try { + val signals = new Signals0 + signals.withHandler(signal, handler, action) + } catch { case e: LinkageError => Right(action()) } - result match { - case Left(e) => throw e - case Right(v) => v - } - } + result match { + case Left(e) => throw e + case Right(v) => v + } + } - /** Helper interface so we can expose internals of signal-isms to others. */ - sealed trait Registration { - def remove(): Unit - } - /** Register a signal handler that can be removed later. - * NOTE: Does not stack with other signal handlers!!!! - */ - def register(handler: () => Unit, signal: String = INT): Registration = - // TODO - Maybe we can just ignore things if not is-supported. - if(supported(signal)) { - import sun.misc.{Signal,SignalHandler} - val intSignal = new Signal(signal) - val newHandler = new SignalHandler { - def handle(sig: Signal) { handler() } - } - val oldHandler = Signal.handle(intSignal, newHandler) - object unregisterNewHandler extends Registration { - override def remove(): Unit = { - Signal.handle(intSignal, oldHandler) - } - } - unregisterNewHandler - } else { - // TODO - Maybe we should just throw an exception if we don't support signals... - object NullUnregisterNewHandler extends Registration { - override def remove(): Unit = () - } - NullUnregisterNewHandler - } + /** Helper interface so we can expose internals of signal-isms to others. */ + sealed trait Registration { + def remove(): Unit + } + /** + * Register a signal handler that can be removed later. + * NOTE: Does not stack with other signal handlers!!!! + */ + def register(handler: () => Unit, signal: String = INT): Registration = + // TODO - Maybe we can just ignore things if not is-supported. + if (supported(signal)) { + import sun.misc.{ Signal, SignalHandler } + val intSignal = new Signal(signal) + val newHandler = new SignalHandler { + def handle(sig: Signal) { handler() } + } + val oldHandler = Signal.handle(intSignal, newHandler) + object unregisterNewHandler extends Registration { + override def remove(): Unit = { + Signal.handle(intSignal, oldHandler) + } + } + unregisterNewHandler + } else { + // TODO - Maybe we should just throw an exception if we don't support signals... + object NullUnregisterNewHandler extends Registration { + override def remove(): Unit = () + } + NullUnregisterNewHandler + } - - def supported(signal: String): Boolean = - try - { - val signals = new Signals0 - signals.supported(signal) - } - catch { case e: LinkageError => false } + def supported(signal: String): Boolean = + try { + val signals = new Signals0 + signals.supported(signal) + } catch { case e: LinkageError => false } } // Must only be referenced using a // try { } catch { case e: LinkageError => ... } // block to -private final class Signals0 -{ - def supported(signal: String): Boolean = - { - import sun.misc.Signal - try { new Signal(signal); true } - catch { case e: IllegalArgumentException => false } - } +private final class Signals0 { + def supported(signal: String): Boolean = + { + import sun.misc.Signal + try { new Signal(signal); true } + catch { case e: IllegalArgumentException => false } + } - // returns a LinkageError in `action` as Left(t) in order to avoid it being - // incorrectly swallowed as missing Signal/SignalHandler - def withHandler[T](signal: String, handler: () => Unit, action: () => T): Either[Throwable, T] = - { - import sun.misc.{Signal,SignalHandler} - val intSignal = new Signal(signal) - val newHandler = new SignalHandler { - def handle(sig: Signal) { handler() } - } + // returns a LinkageError in `action` as Left(t) in order to avoid it being + // incorrectly swallowed as missing Signal/SignalHandler + def withHandler[T](signal: String, handler: () => Unit, action: () => T): Either[Throwable, T] = + { + import sun.misc.{ Signal, SignalHandler } + val intSignal = new Signal(signal) + val newHandler = new SignalHandler { + def handle(sig: Signal) { handler() } + } - val oldHandler = Signal.handle(intSignal, newHandler) + val oldHandler = Signal.handle(intSignal, newHandler) - try Right(action()) - catch { case e: LinkageError => Left(e) } - finally Signal.handle(intSignal, oldHandler) - } + try Right(action()) + catch { case e: LinkageError => Left(e) } + finally Signal.handle(intSignal, oldHandler) + } } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/TypeFunctions.scala b/util/collection/src/main/scala/sbt/TypeFunctions.scala index 6a4978750..74f0a7d99 100644 --- a/util/collection/src/main/scala/sbt/TypeFunctions.scala +++ b/util/collection/src/main/scala/sbt/TypeFunctions.scala @@ -3,51 +3,48 @@ */ package sbt -trait TypeFunctions -{ - type Id[X] = X - sealed trait Const[A] { type Apply[B] = A } - sealed trait ConstK[A] { type l[L[x]] = A } - sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] } - sealed trait ∙[A[_], B[_]] { type l[T] = A[B[T]] } - sealed trait P1of2[M[_,_], A] { type Apply[B] = M[A,B]; type Flip[B] = M[B, A] } +trait TypeFunctions { + type Id[X] = X + sealed trait Const[A] { type Apply[B] = A } + sealed trait ConstK[A] { type l[L[x]] = A } + sealed trait Compose[A[_], B[_]] { type Apply[T] = A[B[T]] } + sealed trait ∙[A[_], B[_]] { type l[T] = A[B[T]] } + sealed trait P1of2[M[_, _], A] { type Apply[B] = M[A, B]; type Flip[B] = M[B, A] } - final val left = new (Id ~> P1of2[Left, Nothing]#Flip) { def apply[T](t: T) = Left(t) } - final val right = new (Id ~> P1of2[Right, Nothing]#Apply) { def apply[T](t: T) = Right(t) } - final val some = new (Id ~> Some) { def apply[T](t: T) = Some(t) } - final def idFun[T] = (t: T) => t - final def const[A,B](b: B): A=> B = _ => b - final def idK[M[_]]: M ~> M = new (M ~> M) { def apply[T](m: M[T]): M[T] = m } - - def nestCon[M[_], N[_], G[_]](f: M ~> N): (M ∙ G)#l ~> (N ∙ G)#l = - f.asInstanceOf[(M ∙ G)#l ~> (N ∙ G)#l] // implemented with a cast to avoid extra object+method call. castless version: - /* new ( (M ∙ G)#l ~> (N ∙ G)#l ) { + final val left = new (Id ~> P1of2[Left, Nothing]#Flip) { def apply[T](t: T) = Left(t) } + final val right = new (Id ~> P1of2[Right, Nothing]#Apply) { def apply[T](t: T) = Right(t) } + final val some = new (Id ~> Some) { def apply[T](t: T) = Some(t) } + final def idFun[T] = (t: T) => t + final def const[A, B](b: B): A => B = _ => b + final def idK[M[_]]: M ~> M = new (M ~> M) { def apply[T](m: M[T]): M[T] = m } + + def nestCon[M[_], N[_], G[_]](f: M ~> N): (M ∙ G)#l ~> (N ∙ G)#l = + f.asInstanceOf[(M ∙ G)#l ~> (N ∙ G)#l] // implemented with a cast to avoid extra object+method call. castless version: + /* new ( (M ∙ G)#l ~> (N ∙ G)#l ) { def apply[T](mg: M[G[T]]): N[G[T]] = f(mg) }*/ - implicit def toFn1[A,B](f: A => B): Fn1[A,B] = new Fn1[A,B] { - def ∙[C](g: C => A) = f compose g - } - - type Endo[T] = T=>T - type ~>|[A[_],B[_]] = A ~> Compose[Option, B]#Apply + implicit def toFn1[A, B](f: A => B): Fn1[A, B] = new Fn1[A, B] { + def ∙[C](g: C => A) = f compose g + } + + type Endo[T] = T => T + type ~>|[A[_], B[_]] = A ~> Compose[Option, B]#Apply } object TypeFunctions extends TypeFunctions -trait ~>[-A[_], +B[_]] -{ outer => - def apply[T](a: A[T]): B[T] - // directly on ~> because of type inference limitations - final def ∙[C[_]](g: C ~> A): C ~> B = new (C ~> B) { def apply[T](c: C[T]) = outer.apply(g(c)) } - final def ∙[C,D](g: C => D)(implicit ev: D <:< A[D]): C => B[D] = i => apply(ev(g(i)) ) - final def fn[T] = (t: A[T]) => apply[T](t) +trait ~>[-A[_], +B[_]] { outer => + def apply[T](a: A[T]): B[T] + // directly on ~> because of type inference limitations + final def ∙[C[_]](g: C ~> A): C ~> B = new (C ~> B) { def apply[T](c: C[T]) = outer.apply(g(c)) } + final def ∙[C, D](g: C => D)(implicit ev: D <:< A[D]): C => B[D] = i => apply(ev(g(i))) + final def fn[T] = (t: A[T]) => apply[T](t) } -object ~> -{ - import TypeFunctions._ - val Id: Id ~> Id = new (Id ~> Id) { def apply[T](a: T): T = a } - implicit def tcIdEquals: (Id ~> Id) = Id +object ~> { + import TypeFunctions._ + val Id: Id ~> Id = new (Id ~> Id) { def apply[T](a: T): T = a } + implicit def tcIdEquals: (Id ~> Id) = Id } trait Fn1[A, B] { - def ∙[C](g: C => A): C => B + def ∙[C](g: C => A): C => B } \ No newline at end of file diff --git a/util/collection/src/main/scala/sbt/Types.scala b/util/collection/src/main/scala/sbt/Types.scala index d3a3420b0..29994f3d1 100644 --- a/util/collection/src/main/scala/sbt/Types.scala +++ b/util/collection/src/main/scala/sbt/Types.scala @@ -5,9 +5,8 @@ package sbt object Types extends Types -trait Types extends TypeFunctions -{ - val :^: = KCons - type :+:[H, T <: HList] = HCons[H,T] - val :+: = HCons +trait Types extends TypeFunctions { + val :^: = KCons + type :+:[H, T <: HList] = HCons[H, T] + val :+: = HCons } diff --git a/util/collection/src/main/scala/sbt/Util.scala b/util/collection/src/main/scala/sbt/Util.scala index 27b32dd87..befc7b5a9 100644 --- a/util/collection/src/main/scala/sbt/Util.scala +++ b/util/collection/src/main/scala/sbt/Util.scala @@ -5,41 +5,39 @@ package sbt import java.util.Locale -object Util -{ - def makeList[T](size: Int, value: T): List[T] = List.fill(size)(value) +object Util { + def makeList[T](size: Int, value: T): List[T] = List.fill(size)(value) - def separateE[A,B](ps: Seq[Either[A,B]]): (Seq[A], Seq[B]) = - separate(ps)(Types.idFun) + def separateE[A, B](ps: Seq[Either[A, B]]): (Seq[A], Seq[B]) = + separate(ps)(Types.idFun) - def separate[T,A,B](ps: Seq[T])(f: T => Either[A,B]): (Seq[A], Seq[B]) = - { - val (a,b) = ((Nil: Seq[A], Nil: Seq[B]) /: ps)( (xs, y) => prependEither(xs, f(y)) ) - (a.reverse, b.reverse) - } + def separate[T, A, B](ps: Seq[T])(f: T => Either[A, B]): (Seq[A], Seq[B]) = + { + val (a, b) = ((Nil: Seq[A], Nil: Seq[B]) /: ps)((xs, y) => prependEither(xs, f(y))) + (a.reverse, b.reverse) + } - def prependEither[A,B](acc: (Seq[A], Seq[B]), next: Either[A,B]): (Seq[A], Seq[B]) = - next match - { - case Left(l) => (l +: acc._1, acc._2) - case Right(r) => (acc._1, r +: acc._2) - } + def prependEither[A, B](acc: (Seq[A], Seq[B]), next: Either[A, B]): (Seq[A], Seq[B]) = + next match { + case Left(l) => (l +: acc._1, acc._2) + case Right(r) => (acc._1, r +: acc._2) + } - def pairID[A,B] = (a: A, b: B) => (a,b) + def pairID[A, B] = (a: A, b: B) => (a, b) - private[this] lazy val Hypen = """-(\p{javaLowerCase})""".r - def hasHyphen(s: String): Boolean = s.indexOf('-') >= 0 - @deprecated("Use the properly spelled version: hyphenToCamel", "0.13.0") - def hypenToCamel(s: String): String = hyphenToCamel(s) - def hyphenToCamel(s: String): String = - if(hasHyphen(s)) - Hypen.replaceAllIn(s, _.group(1).toUpperCase(Locale.ENGLISH)) - else - s + private[this] lazy val Hypen = """-(\p{javaLowerCase})""".r + def hasHyphen(s: String): Boolean = s.indexOf('-') >= 0 + @deprecated("Use the properly spelled version: hyphenToCamel", "0.13.0") + def hypenToCamel(s: String): String = hyphenToCamel(s) + def hyphenToCamel(s: String): String = + if (hasHyphen(s)) + Hypen.replaceAllIn(s, _.group(1).toUpperCase(Locale.ENGLISH)) + else + s - private[this] lazy val Camel = """(\p{javaLowerCase})(\p{javaUpperCase})""".r - def camelToHypen(s: String): String = - Camel.replaceAllIn(s, m => m.group(1) + "-" + m.group(2).toLowerCase(Locale.ENGLISH)) + private[this] lazy val Camel = """(\p{javaLowerCase})(\p{javaUpperCase})""".r + def camelToHypen(s: String): String = + Camel.replaceAllIn(s, m => m.group(1) + "-" + m.group(2).toLowerCase(Locale.ENGLISH)) - def quoteIfKeyword(s: String): String = if(ScalaKeywords.values(s)) '`' + s + '`' else s + def quoteIfKeyword(s: String): String = if (ScalaKeywords.values(s)) '`' + s + '`' else s } diff --git a/util/complete/src/main/scala/sbt/LineReader.scala b/util/complete/src/main/scala/sbt/LineReader.scala index 9fba225f4..8f9fc219f 100644 --- a/util/complete/src/main/scala/sbt/LineReader.scala +++ b/util/complete/src/main/scala/sbt/LineReader.scala @@ -3,144 +3,137 @@ */ package sbt - import jline.console.ConsoleReader - import jline.console.history.{FileHistory, MemoryHistory} - import java.io.{File, InputStream, PrintWriter} - import complete.Parser - import java.util.concurrent.atomic.AtomicBoolean +import jline.console.ConsoleReader +import jline.console.history.{ FileHistory, MemoryHistory } +import java.io.{ File, InputStream, PrintWriter } +import complete.Parser +import java.util.concurrent.atomic.AtomicBoolean -abstract class JLine extends LineReader -{ - protected[this] val handleCONT: Boolean - protected[this] val reader: ConsoleReader +abstract class JLine extends LineReader { + protected[this] val handleCONT: Boolean + protected[this] val reader: ConsoleReader - def readLine(prompt: String, mask: Option[Char] = None) = JLine.withJLine { unsynchronizedReadLine(prompt, mask) } + def readLine(prompt: String, mask: Option[Char] = None) = JLine.withJLine { unsynchronizedReadLine(prompt, mask) } - private[this] def unsynchronizedReadLine(prompt: String, mask: Option[Char]) = - readLineWithHistory(prompt, mask) match - { - case null => None - case x => Some(x.trim) - } + private[this] def unsynchronizedReadLine(prompt: String, mask: Option[Char]) = + readLineWithHistory(prompt, mask) match { + case null => None + case x => Some(x.trim) + } - private[this] def readLineWithHistory(prompt: String, mask: Option[Char]): String = - reader.getHistory match - { - case fh: FileHistory => - try { readLineDirect(prompt, mask) } - finally { fh.flush() } - case _ => readLineDirect(prompt, mask) - } + private[this] def readLineWithHistory(prompt: String, mask: Option[Char]): String = + reader.getHistory match { + case fh: FileHistory => + try { readLineDirect(prompt, mask) } + finally { fh.flush() } + case _ => readLineDirect(prompt, mask) + } - private[this] def readLineDirect(prompt: String, mask: Option[Char]): String = - if(handleCONT) - Signals.withHandler(() => resume(), signal = Signals.CONT)( () => readLineDirectRaw(prompt, mask) ) - else - readLineDirectRaw(prompt, mask) - private[this] def readLineDirectRaw(prompt: String, mask: Option[Char]): String = - { - val newprompt = handleMultilinePrompt(prompt) - mask match { - case Some(m) => reader.readLine(newprompt, m) - case None => reader.readLine(newprompt) - } - } + private[this] def readLineDirect(prompt: String, mask: Option[Char]): String = + if (handleCONT) + Signals.withHandler(() => resume(), signal = Signals.CONT)(() => readLineDirectRaw(prompt, mask)) + else + readLineDirectRaw(prompt, mask) + private[this] def readLineDirectRaw(prompt: String, mask: Option[Char]): String = + { + val newprompt = handleMultilinePrompt(prompt) + mask match { + case Some(m) => reader.readLine(newprompt, m) + case None => reader.readLine(newprompt) + } + } - private[this] def handleMultilinePrompt(prompt: String): String = { - val lines = """\r?\n""".r.split(prompt) - lines.size match { - case 0 | 1 => prompt - case _ => reader.print(lines.init.mkString("\n") + "\n"); lines.last; - } - } + private[this] def handleMultilinePrompt(prompt: String): String = { + val lines = """\r?\n""".r.split(prompt) + lines.size match { + case 0 | 1 => prompt + case _ => reader.print(lines.init.mkString("\n") + "\n"); lines.last; + } + } - private[this] def resume() - { - jline.TerminalFactory.reset - JLine.terminal.init - reader.drawLine() - reader.flush() - } + private[this] def resume() { + jline.TerminalFactory.reset + JLine.terminal.init + reader.drawLine() + reader.flush() + } } -private object JLine -{ - private[this] val TerminalProperty = "jline.terminal" +private object JLine { + private[this] val TerminalProperty = "jline.terminal" - fixTerminalProperty() + fixTerminalProperty() - // translate explicit class names to type in order to support - // older Scala, since it shaded classes but not the system property - private[sbt] def fixTerminalProperty() { - val newValue = System.getProperty(TerminalProperty) match { - case "jline.UnixTerminal" => "unix" - case null if System.getProperty("sbt.cygwin") != null => "unix" - case "jline.WindowsTerminal" => "windows" - case "jline.AnsiWindowsTerminal" => "windows" - case "jline.UnsupportedTerminal" => "none" - case x => x - } - if(newValue != null) System.setProperty(TerminalProperty, newValue) - } + // translate explicit class names to type in order to support + // older Scala, since it shaded classes but not the system property + private[sbt] def fixTerminalProperty() { + val newValue = System.getProperty(TerminalProperty) match { + case "jline.UnixTerminal" => "unix" + case null if System.getProperty("sbt.cygwin") != null => "unix" + case "jline.WindowsTerminal" => "windows" + case "jline.AnsiWindowsTerminal" => "windows" + case "jline.UnsupportedTerminal" => "none" + case x => x + } + if (newValue != null) System.setProperty(TerminalProperty, newValue) + } - // When calling this, ensure that enableEcho has been or will be called. - // TerminalFactory.get will initialize the terminal to disable echo. - private def terminal = jline.TerminalFactory.get - private def withTerminal[T](f: jline.Terminal => T): T = - synchronized - { - val t = terminal - t.synchronized { f(t) } - } - /** For accessing the JLine Terminal object. - * This ensures synchronized access as well as re-enabling echo after getting the Terminal. */ - def usingTerminal[T](f: jline.Terminal => T): T = - withTerminal { t => - t.restore - f(t) - } - def createReader(): ConsoleReader = createReader(None) - def createReader(historyPath: Option[File]): ConsoleReader = - usingTerminal { t => - val cr = new ConsoleReader - cr.setExpandEvents(false) // https://issues.scala-lang.org/browse/SI-7650 - cr.setBellEnabled(false) - val h = historyPath match { - case None => new MemoryHistory - case Some(file) => new FileHistory(file) - } - h.setMaxSize(MaxHistorySize) - cr.setHistory(h) - cr - } - def withJLine[T](action: => T): T = - withTerminal { t => - t.init - try { action } - finally { t.restore } - } + // When calling this, ensure that enableEcho has been or will be called. + // TerminalFactory.get will initialize the terminal to disable echo. + private def terminal = jline.TerminalFactory.get + private def withTerminal[T](f: jline.Terminal => T): T = + synchronized { + val t = terminal + t.synchronized { f(t) } + } + /** + * For accessing the JLine Terminal object. + * This ensures synchronized access as well as re-enabling echo after getting the Terminal. + */ + def usingTerminal[T](f: jline.Terminal => T): T = + withTerminal { t => + t.restore + f(t) + } + def createReader(): ConsoleReader = createReader(None) + def createReader(historyPath: Option[File]): ConsoleReader = + usingTerminal { t => + val cr = new ConsoleReader + cr.setExpandEvents(false) // https://issues.scala-lang.org/browse/SI-7650 + cr.setBellEnabled(false) + val h = historyPath match { + case None => new MemoryHistory + case Some(file) => new FileHistory(file) + } + h.setMaxSize(MaxHistorySize) + cr.setHistory(h) + cr + } + def withJLine[T](action: => T): T = + withTerminal { t => + t.init + try { action } + finally { t.restore } + } - def simple(historyPath: Option[File], handleCONT: Boolean = HandleCONT): SimpleReader = new SimpleReader(historyPath, handleCONT) - val MaxHistorySize = 500 - val HandleCONT = !java.lang.Boolean.getBoolean("sbt.disable.cont") && Signals.supported(Signals.CONT) + def simple(historyPath: Option[File], handleCONT: Boolean = HandleCONT): SimpleReader = new SimpleReader(historyPath, handleCONT) + val MaxHistorySize = 500 + val HandleCONT = !java.lang.Boolean.getBoolean("sbt.disable.cont") && Signals.supported(Signals.CONT) } -trait LineReader -{ - def readLine(prompt: String, mask: Option[Char] = None): Option[String] +trait LineReader { + def readLine(prompt: String, mask: Option[Char] = None): Option[String] } -final class FullReader(historyPath: Option[File], complete: Parser[_], val handleCONT: Boolean = JLine.HandleCONT) extends JLine -{ - protected[this] val reader = - { - val cr = JLine.createReader(historyPath) - sbt.complete.JLineCompletion.installCustomCompletor(cr, complete) - cr - } +final class FullReader(historyPath: Option[File], complete: Parser[_], val handleCONT: Boolean = JLine.HandleCONT) extends JLine { + protected[this] val reader = + { + val cr = JLine.createReader(historyPath) + sbt.complete.JLineCompletion.installCustomCompletor(cr, complete) + cr + } } -class SimpleReader private[sbt] (historyPath: Option[File], val handleCONT: Boolean) extends JLine -{ - protected[this] val reader = JLine.createReader(historyPath) +class SimpleReader private[sbt] (historyPath: Option[File], val handleCONT: Boolean) extends JLine { + protected[this] val reader = JLine.createReader(historyPath) } object SimpleReader extends SimpleReader(None, JLine.HandleCONT) diff --git a/util/complete/src/main/scala/sbt/complete/Completions.scala b/util/complete/src/main/scala/sbt/complete/Completions.scala index 594a9b9da..5237ad26d 100644 --- a/util/complete/src/main/scala/sbt/complete/Completions.scala +++ b/util/complete/src/main/scala/sbt/complete/Completions.scala @@ -4,148 +4,141 @@ package sbt.complete /** -* Represents a set of completions. -* It exists instead of implicitly defined operations on top of Set[Completion] -* for laziness. -*/ -sealed trait Completions -{ - def get: Set[Completion] - final def x(o: Completions): Completions = flatMap(_ x o) - final def ++(o: Completions): Completions = Completions( get ++ o.get ) - final def +:(o: Completion): Completions = Completions(get + o) - final def filter(f: Completion => Boolean): Completions = Completions(get filter f) - final def filterS(f: String => Boolean): Completions = filter(c => f(c.append)) - override def toString = get.mkString("Completions(",",",")") - final def flatMap(f: Completion => Completions): Completions = Completions(get.flatMap(c => f(c).get)) - final def map(f: Completion => Completion): Completions = Completions(get map f) - override final def hashCode = get.hashCode - override final def equals(o: Any) = o match { case c: Completions => get == c.get; case _ => false } + * Represents a set of completions. + * It exists instead of implicitly defined operations on top of Set[Completion] + * for laziness. + */ +sealed trait Completions { + def get: Set[Completion] + final def x(o: Completions): Completions = flatMap(_ x o) + final def ++(o: Completions): Completions = Completions(get ++ o.get) + final def +:(o: Completion): Completions = Completions(get + o) + final def filter(f: Completion => Boolean): Completions = Completions(get filter f) + final def filterS(f: String => Boolean): Completions = filter(c => f(c.append)) + override def toString = get.mkString("Completions(", ",", ")") + final def flatMap(f: Completion => Completions): Completions = Completions(get.flatMap(c => f(c).get)) + final def map(f: Completion => Completion): Completions = Completions(get map f) + override final def hashCode = get.hashCode + override final def equals(o: Any) = o match { case c: Completions => get == c.get; case _ => false } } -object Completions -{ - /** Returns a lazy Completions instance using the provided Completion Set. */ - def apply(cs: => Set[Completion]): Completions = new Completions { - lazy val get = cs - } +object Completions { + /** Returns a lazy Completions instance using the provided Completion Set. */ + def apply(cs: => Set[Completion]): Completions = new Completions { + lazy val get = cs + } - /** Returns a strict Completions instance using the provided Completion Set. */ - def strict(cs: Set[Completion]): Completions = apply(cs) + /** Returns a strict Completions instance using the provided Completion Set. */ + def strict(cs: Set[Completion]): Completions = apply(cs) - /** No suggested completions, not even the empty Completion. - * This typically represents invalid input. */ - val nil: Completions = strict(Set.empty) + /** + * No suggested completions, not even the empty Completion. + * This typically represents invalid input. + */ + val nil: Completions = strict(Set.empty) - /** Only includes an empty Suggestion. - * This typically represents valid input that either has no completions or accepts no further input. */ - val empty: Completions = strict(Set.empty + Completion.empty) + /** + * Only includes an empty Suggestion. + * This typically represents valid input that either has no completions or accepts no further input. + */ + val empty: Completions = strict(Set.empty + Completion.empty) - /** Returns a strict Completions instance containing only the provided Completion.*/ - def single(c: Completion): Completions = strict(Set.empty + c) + /** Returns a strict Completions instance containing only the provided Completion.*/ + def single(c: Completion): Completions = strict(Set.empty + c) } /** -* Represents a completion. -* The abstract members `display` and `append` are best explained with an example. -* -* Assuming space-delimited tokens, processing this: -* am is are w -* could produce these Completions: -* Completion { display = "was"; append = "as" } -* Completion { display = "were"; append = "ere" } -* to suggest the tokens "was" and "were". -* -* In this way, two pieces of information are preserved: -* 1) what needs to be appended to the current input if a completion is selected -* 2) the full token being completed, which is useful for presenting a user with choices to select -*/ -sealed trait Completion -{ - /** The proposed suffix to append to the existing input to complete the last token in the input.*/ - def append: String - /** The string to present to the user to represent the full token being suggested.*/ - def display: String - /** True if this Completion is suggesting the empty string.*/ - def isEmpty: Boolean + * Represents a completion. + * The abstract members `display` and `append` are best explained with an example. + * + * Assuming space-delimited tokens, processing this: + * am is are w + * could produce these Completions: + * Completion { display = "was"; append = "as" } + * Completion { display = "were"; append = "ere" } + * to suggest the tokens "was" and "were". + * + * In this way, two pieces of information are preserved: + * 1) what needs to be appended to the current input if a completion is selected + * 2) the full token being completed, which is useful for presenting a user with choices to select + */ +sealed trait Completion { + /** The proposed suffix to append to the existing input to complete the last token in the input.*/ + def append: String + /** The string to present to the user to represent the full token being suggested.*/ + def display: String + /** True if this Completion is suggesting the empty string.*/ + def isEmpty: Boolean - /** Appends the completions in `o` with the completions in this Completion.*/ - def ++(o: Completion): Completion = Completion.concat(this, o) - final def x(o: Completions): Completions = if(Completion evaluatesRight this) o.map(this ++ _) else Completions.strict(Set.empty + this) - override final lazy val hashCode = Completion.hashCode(this) - override final def equals(o: Any) = o match { case c: Completion => Completion.equal(this, c); case _ => false } + /** Appends the completions in `o` with the completions in this Completion.*/ + def ++(o: Completion): Completion = Completion.concat(this, o) + final def x(o: Completions): Completions = if (Completion evaluatesRight this) o.map(this ++ _) else Completions.strict(Set.empty + this) + override final lazy val hashCode = Completion.hashCode(this) + override final def equals(o: Any) = o match { case c: Completion => Completion.equal(this, c); case _ => false } } -final class DisplayOnly(val display: String) extends Completion -{ - def isEmpty = display.isEmpty - def append = "" - override def toString = "{" + display + "}" +final class DisplayOnly(val display: String) extends Completion { + def isEmpty = display.isEmpty + def append = "" + override def toString = "{" + display + "}" } -final class Token(val display: String, val append: String) extends Completion -{ - @deprecated("Retained only for compatibility. All information is now in `display` and `append`.", "0.12.1") - lazy val prepend = display.stripSuffix(append) - def isEmpty = display.isEmpty && append.isEmpty - override final def toString = "[" + display + "]++" + append +final class Token(val display: String, val append: String) extends Completion { + @deprecated("Retained only for compatibility. All information is now in `display` and `append`.", "0.12.1") + lazy val prepend = display.stripSuffix(append) + def isEmpty = display.isEmpty && append.isEmpty + override final def toString = "[" + display + "]++" + append } -final class Suggestion(val append: String) extends Completion -{ - def isEmpty = append.isEmpty - def display = append - override def toString = append +final class Suggestion(val append: String) extends Completion { + def isEmpty = append.isEmpty + def display = append + override def toString = append } -object Completion -{ - def concat(a: Completion, b: Completion): Completion = - (a,b) match - { - case (as: Suggestion, bs: Suggestion) => suggestion(as.append + bs.append) - case (at: Token, _) if at.append.isEmpty => b - case _ if a.isEmpty => b - case _ => a - } - def evaluatesRight(a: Completion): Boolean = - a match - { - case _: Suggestion => true - case at: Token if at.append.isEmpty => true - case _ => a.isEmpty - } +object Completion { + def concat(a: Completion, b: Completion): Completion = + (a, b) match { + case (as: Suggestion, bs: Suggestion) => suggestion(as.append + bs.append) + case (at: Token, _) if at.append.isEmpty => b + case _ if a.isEmpty => b + case _ => a + } + def evaluatesRight(a: Completion): Boolean = + a match { + case _: Suggestion => true + case at: Token if at.append.isEmpty => true + case _ => a.isEmpty + } - def equal(a: Completion, b: Completion): Boolean = - (a,b) match - { - case (as: Suggestion, bs: Suggestion) => as.append == bs.append - case (ad: DisplayOnly, bd: DisplayOnly) => ad.display == bd.display - case (at: Token, bt: Token) => at.display == bt.display && at.append == bt.append - case _ => false - } + def equal(a: Completion, b: Completion): Boolean = + (a, b) match { + case (as: Suggestion, bs: Suggestion) => as.append == bs.append + case (ad: DisplayOnly, bd: DisplayOnly) => ad.display == bd.display + case (at: Token, bt: Token) => at.display == bt.display && at.append == bt.append + case _ => false + } - def hashCode(a: Completion): Int = - a match - { - case as: Suggestion => (0, as.append).hashCode - case ad: DisplayOnly => (1, ad.display).hashCode - case at: Token => (2, at.display, at.append).hashCode - } + def hashCode(a: Completion): Int = + a match { + case as: Suggestion => (0, as.append).hashCode + case ad: DisplayOnly => (1, ad.display).hashCode + case at: Token => (2, at.display, at.append).hashCode + } - val empty: Completion = suggestion("") - def single(c: Char): Completion = suggestion(c.toString) - - // TODO: make strict in 0.13.0 to match DisplayOnly - def displayOnly(value: => String): Completion = new DisplayOnly(value) - @deprecated("Use displayOnly.", "0.12.1") - def displayStrict(value: String): Completion = displayOnly(value) + val empty: Completion = suggestion("") + def single(c: Char): Completion = suggestion(c.toString) - // TODO: make strict in 0.13.0 to match Token - def token(prepend: => String, append: => String): Completion = new Token(prepend+append, append) - @deprecated("Use token.", "0.12.1") - def tokenStrict(prepend: String, append: String): Completion = token(prepend, append) + // TODO: make strict in 0.13.0 to match DisplayOnly + def displayOnly(value: => String): Completion = new DisplayOnly(value) + @deprecated("Use displayOnly.", "0.12.1") + def displayStrict(value: String): Completion = displayOnly(value) - /** @since 0.12.1 */ - def tokenDisplay(append: String, display: String): Completion = new Token(display, append) + // TODO: make strict in 0.13.0 to match Token + def token(prepend: => String, append: => String): Completion = new Token(prepend + append, append) + @deprecated("Use token.", "0.12.1") + def tokenStrict(prepend: String, append: String): Completion = token(prepend, append) - // TODO: make strict in 0.13.0 to match Suggestion - def suggestion(value: => String): Completion = new Suggestion(value) - @deprecated("Use suggestion.", "0.12.1") - def suggestStrict(value: String): Completion = suggestion(value) + /** @since 0.12.1 */ + def tokenDisplay(append: String, display: String): Completion = new Token(display, append) + + // TODO: make strict in 0.13.0 to match Suggestion + def suggestion(value: => String): Completion = new Suggestion(value) + @deprecated("Use suggestion.", "0.12.1") + def suggestStrict(value: String): Completion = suggestion(value) } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/EditDistance.scala b/util/complete/src/main/scala/sbt/complete/EditDistance.scala index 5e4cb277f..95ed0c91f 100644 --- a/util/complete/src/main/scala/sbt/complete/EditDistance.scala +++ b/util/complete/src/main/scala/sbt/complete/EditDistance.scala @@ -1,41 +1,41 @@ package sbt.complete - import java.lang.Character.{toLowerCase => lower} +import java.lang.Character.{ toLowerCase => lower } /** @author Paul Phillips*/ object EditDistance { - /** Translated from the java version at - * http://www.merriampark.com/ld.htm - * which is declared to be public domain. - */ - def levenshtein(s: String, t: String, insertCost: Int = 1, deleteCost: Int = 1, subCost: Int = 1, transposeCost: Int = 1, matchCost: Int = 0, caseCost: Int = 1, transpositions: Boolean = false): Int = { - val n = s.length - val m = t.length - if (n == 0) return m - if (m == 0) return n + /** + * Translated from the java version at + * http://www.merriampark.com/ld.htm + * which is declared to be public domain. + */ + def levenshtein(s: String, t: String, insertCost: Int = 1, deleteCost: Int = 1, subCost: Int = 1, transposeCost: Int = 1, matchCost: Int = 0, caseCost: Int = 1, transpositions: Boolean = false): Int = { + val n = s.length + val m = t.length + if (n == 0) return m + if (m == 0) return n - val d = Array.ofDim[Int](n + 1, m + 1) - 0 to n foreach (x => d(x)(0) = x) - 0 to m foreach (x => d(0)(x) = x) + val d = Array.ofDim[Int](n + 1, m + 1) + 0 to n foreach (x => d(x)(0) = x) + 0 to m foreach (x => d(0)(x) = x) - for (i <- 1 to n ; s_i = s(i - 1) ; j <- 1 to m) { - val t_j = t(j - 1) - val cost = if (s_i == t_j) matchCost else if(lower(s_i) == lower(t_j)) caseCost else subCost - val tcost = if (s_i == t_j) matchCost else transposeCost - + for (i <- 1 to n; s_i = s(i - 1); j <- 1 to m) { + val t_j = t(j - 1) + val cost = if (s_i == t_j) matchCost else if (lower(s_i) == lower(t_j)) caseCost else subCost + val tcost = if (s_i == t_j) matchCost else transposeCost - val c1 = d(i - 1)(j) + deleteCost - val c2 = d(i)(j - 1) + insertCost - val c3 = d(i - 1)(j - 1) + cost + val c1 = d(i - 1)(j) + deleteCost + val c2 = d(i)(j - 1) + insertCost + val c3 = d(i - 1)(j - 1) + cost - d(i)(j) = c1 min c2 min c3 + d(i)(j) = c1 min c2 min c3 - if (transpositions) { - if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1)) - d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost) - } - } + if (transpositions) { + if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1)) + d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost) + } + } - d(n)(m) - } + d(n)(m) + } } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/ExampleSource.scala b/util/complete/src/main/scala/sbt/complete/ExampleSource.scala index 565a8c3f1..6d0469aa0 100644 --- a/util/complete/src/main/scala/sbt/complete/ExampleSource.scala +++ b/util/complete/src/main/scala/sbt/complete/ExampleSource.scala @@ -8,35 +8,33 @@ import sbt.IO._ * [[sbt.complete.FileExamples]] class, which provides a list of suggested files to the user as they press the * TAB key in the console. */ -trait ExampleSource -{ - /** - * @return a (possibly lazy) list of completion example strings. These strings are continuations of user's input. The - * user's input is incremented with calls to [[withAddedPrefix]]. - */ - def apply(): Iterable[String] +trait ExampleSource { + /** + * @return a (possibly lazy) list of completion example strings. These strings are continuations of user's input. The + * user's input is incremented with calls to [[withAddedPrefix]]. + */ + def apply(): Iterable[String] - /** - * @param addedPrefix a string that just typed in by the user. - * @return a new source of only those examples that start with the string typed by the user so far (with addition of - * the just added prefix). - */ - def withAddedPrefix(addedPrefix: String): ExampleSource + /** + * @param addedPrefix a string that just typed in by the user. + * @return a new source of only those examples that start with the string typed by the user so far (with addition of + * the just added prefix). + */ + def withAddedPrefix(addedPrefix: String): ExampleSource } /** * A convenience example source that wraps any collection of strings into a source of examples. * @param examples the examples that will be displayed to the user when they press the TAB key. */ -sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSource -{ - override def withAddedPrefix(addedPrefix: String): ExampleSource = FixedSetExamples(examplesWithRemovedPrefix(addedPrefix)) +sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSource { + override def withAddedPrefix(addedPrefix: String): ExampleSource = FixedSetExamples(examplesWithRemovedPrefix(addedPrefix)) - override def apply(): Iterable[String] = examples + override def apply(): Iterable[String] = examples - private def examplesWithRemovedPrefix(prefix: String) = examples.collect { - case example if example startsWith prefix => example substring prefix.length - } + private def examplesWithRemovedPrefix(prefix: String) = examples.collect { + case example if example startsWith prefix => example substring prefix.length + } } /** @@ -44,19 +42,18 @@ sealed case class FixedSetExamples(examples: Iterable[String]) extends ExampleSo * @param base the directory within which this class will search for completion examples. * @param prefix the part of the path already written by the user. */ -class FileExamples(base: File, prefix: String = "") extends ExampleSource -{ - override def apply(): Stream[String] = files(base).map(_ substring prefix.length) +class FileExamples(base: File, prefix: String = "") extends ExampleSource { + override def apply(): Stream[String] = files(base).map(_ substring prefix.length) - override def withAddedPrefix(addedPrefix: String): FileExamples = new FileExamples(base, prefix + addedPrefix) + override def withAddedPrefix(addedPrefix: String): FileExamples = new FileExamples(base, prefix + addedPrefix) - protected def files(directory: File): Stream[String] = { - val childPaths = directory.listFiles().toStream - val prefixedDirectChildPaths = childPaths.map(relativize(base, _).get).filter(_ startsWith prefix) - val dirsToRecurseInto = childPaths.filter(_.isDirectory).map(relativize(base, _).get).filter(dirStartsWithPrefix) - prefixedDirectChildPaths append dirsToRecurseInto.flatMap(dir => files(new File(base, dir))) - } + protected def files(directory: File): Stream[String] = { + val childPaths = directory.listFiles().toStream + val prefixedDirectChildPaths = childPaths.map(relativize(base, _).get).filter(_ startsWith prefix) + val dirsToRecurseInto = childPaths.filter(_.isDirectory).map(relativize(base, _).get).filter(dirStartsWithPrefix) + prefixedDirectChildPaths append dirsToRecurseInto.flatMap(dir => files(new File(base, dir))) + } - private def dirStartsWithPrefix(relativizedPath: String): Boolean = - (relativizedPath startsWith prefix) || (prefix startsWith relativizedPath) + private def dirStartsWithPrefix(relativizedPath: String): Boolean = + (relativizedPath startsWith prefix) || (prefix startsWith relativizedPath) } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/History.scala b/util/complete/src/main/scala/sbt/complete/History.scala index 9c36f2605..ca394abf8 100644 --- a/util/complete/src/main/scala/sbt/complete/History.scala +++ b/util/complete/src/main/scala/sbt/complete/History.scala @@ -4,47 +4,42 @@ package sbt package complete - import History.number - import java.io.File +import History.number +import java.io.File -final class History private(val lines: IndexedSeq[String], val path: Option[File], error: String => Unit) extends NotNull -{ - private def reversed = lines.reverse +final class History private (val lines: IndexedSeq[String], val path: Option[File], error: String => Unit) extends NotNull { + private def reversed = lines.reverse - def all: Seq[String] = lines - def size = lines.length - def !! : Option[String] = !- (1) - def apply(i: Int): Option[String] = if(0 <= i && i < size) Some( lines(i) ) else { error("Invalid history index: " + i); None } - def !(i: Int): Option[String] = apply(i) + def all: Seq[String] = lines + def size = lines.length + def !! : Option[String] = !-(1) + def apply(i: Int): Option[String] = if (0 <= i && i < size) Some(lines(i)) else { error("Invalid history index: " + i); None } + def !(i: Int): Option[String] = apply(i) - def !(s: String): Option[String] = - number(s) match - { - case Some(n) => if(n < 0) !- (-n) else apply(n) - case None => nonEmpty(s) { reversed.find(_.startsWith(s)) } - } - def !- (n: Int): Option[String] = apply(size - n - 1) + def !(s: String): Option[String] = + number(s) match { + case Some(n) => if (n < 0) !-(-n) else apply(n) + case None => nonEmpty(s) { reversed.find(_.startsWith(s)) } + } + def !-(n: Int): Option[String] = apply(size - n - 1) - def !?(s: String): Option[String] = nonEmpty(s) { reversed.drop(1).find(_.contains(s)) } + def !?(s: String): Option[String] = nonEmpty(s) { reversed.drop(1).find(_.contains(s)) } - private def nonEmpty[T](s: String)(act: => Option[T]): Option[T] = - if(s.isEmpty) - { - error("No action specified to history command") - None - } - else - act + private def nonEmpty[T](s: String)(act: => Option[T]): Option[T] = + if (s.isEmpty) { + error("No action specified to history command") + None + } else + act - def list(historySize: Int, show: Int): Seq[String] = - lines.toList.drop((lines.size - historySize) max 0).zipWithIndex.map { case (line, number) => " " + number + " " + line }.takeRight(show max 1) + def list(historySize: Int, show: Int): Seq[String] = + lines.toList.drop((lines.size - historySize) max 0).zipWithIndex.map { case (line, number) => " " + number + " " + line }.takeRight(show max 1) } -object History -{ - def apply(lines: Seq[String], path: Option[File], error: String => Unit): History = new History(lines.toIndexedSeq, path, error) +object History { + def apply(lines: Seq[String], path: Option[File], error: String => Unit): History = new History(lines.toIndexedSeq, path, error) - def number(s: String): Option[Int] = - try { Some(s.toInt) } - catch { case e: NumberFormatException => None } + def number(s: String): Option[Int] = + try { Some(s.toInt) } + catch { case e: NumberFormatException => None } } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/HistoryCommands.scala b/util/complete/src/main/scala/sbt/complete/HistoryCommands.scala index 906aa328a..762f48c6d 100644 --- a/util/complete/src/main/scala/sbt/complete/HistoryCommands.scala +++ b/util/complete/src/main/scala/sbt/complete/HistoryCommands.scala @@ -4,69 +4,70 @@ package sbt package complete - import java.io.File +import java.io.File -object HistoryCommands -{ - val Start = "!" - // second characters - val Contains = "?" - val Last = "!" - val ListCommands = ":" +object HistoryCommands { + val Start = "!" + // second characters + val Contains = "?" + val Last = "!" + val ListCommands = ":" - def ContainsFull = h(Contains) - def LastFull = h(Last) - def ListFull = h(ListCommands) + def ContainsFull = h(Contains) + def LastFull = h(Last) + def ListFull = h(ListCommands) - def ListN = ListFull + "n" - def ContainsString = ContainsFull + "string" - def StartsWithString = Start + "string" - def Previous = Start + "-n" - def Nth = Start + "n" - - private def h(s: String) = Start + s - def plainCommands = Seq(ListFull, Start, LastFull, ContainsFull) + def ListN = ListFull + "n" + def ContainsString = ContainsFull + "string" + def StartsWithString = Start + "string" + def Previous = Start + "-n" + def Nth = Start + "n" - def descriptions = Seq( - LastFull -> "Execute the last command again", - ListFull -> "Show all previous commands", - ListN -> "Show the last n commands", - Nth -> ("Execute the command with index n, as shown by the " + ListFull + " command"), - Previous -> "Execute the nth command before this one", - StartsWithString -> "Execute the most recent command starting with 'string'", - ContainsString -> "Execute the most recent command containing 'string'" - ) - def helpString = "History commands:\n " + (descriptions.map{ case (c,d) => c + " " + d}).mkString("\n ") - def printHelp(): Unit = - println(helpString) - def printHistory(history: complete.History, historySize: Int, show: Int): Unit = - history.list(historySize, show).foreach(println) + private def h(s: String) = Start + s + def plainCommands = Seq(ListFull, Start, LastFull, ContainsFull) - import DefaultParsers._ + def descriptions = Seq( + LastFull -> "Execute the last command again", + ListFull -> "Show all previous commands", + ListN -> "Show the last n commands", + Nth -> ("Execute the command with index n, as shown by the " + ListFull + " command"), + Previous -> "Execute the nth command before this one", + StartsWithString -> "Execute the most recent command starting with 'string'", + ContainsString -> "Execute the most recent command containing 'string'" + ) + def helpString = "History commands:\n " + (descriptions.map { case (c, d) => c + " " + d }).mkString("\n ") + def printHelp(): Unit = + println(helpString) + def printHistory(history: complete.History, historySize: Int, show: Int): Unit = + history.list(historySize, show).foreach(println) - val MaxLines = 500 - lazy val num = token(NatBasic, "") - lazy val last = Last ^^^ { execute(_ !!) } - lazy val list = ListCommands ~> (num ?? Int.MaxValue) map { show => - (h: History) => { printHistory(h, MaxLines, show); Some(Nil) } - } - lazy val execStr = flag('?') ~ token(any.+.string, "") map { case (contains, str) => - execute(h => if(contains) h !? str else h ! str) - } - lazy val execInt = flag('-') ~ num map { case (neg, value) => - execute(h => if(neg) h !- value else h ! value) - } - lazy val help = success( (h: History) => { printHelp(); Some(Nil) } ) + import DefaultParsers._ - def execute(f: History => Option[String]): History => Option[List[String]] = (h: History) => - { - val command = f(h) - val lines = h.lines.toArray - command.foreach(lines(lines.length - 1) = _) - h.path foreach { h => IO.writeLines(h, lines) } - Some(command.toList) - } + val MaxLines = 500 + lazy val num = token(NatBasic, "") + lazy val last = Last ^^^ { execute(_ !!) } + lazy val list = ListCommands ~> (num ?? Int.MaxValue) map { show => + (h: History) => { printHistory(h, MaxLines, show); Some(Nil) } + } + lazy val execStr = flag('?') ~ token(any.+.string, "") map { + case (contains, str) => + execute(h => if (contains) h !? str else h ! str) + } + lazy val execInt = flag('-') ~ num map { + case (neg, value) => + execute(h => if (neg) h !- value else h ! value) + } + lazy val help = success((h: History) => { printHelp(); Some(Nil) }) - val actionParser: Parser[complete.History => Option[List[String]]] = - Start ~> (help | last | execInt | list | execStr ) // execStr must come last + def execute(f: History => Option[String]): History => Option[List[String]] = (h: History) => + { + val command = f(h) + val lines = h.lines.toArray + command.foreach(lines(lines.length - 1) = _) + h.path foreach { h => IO.writeLines(h, lines) } + Some(command.toList) + } + + val actionParser: Parser[complete.History => Option[List[String]]] = + Start ~> (help | last | execInt | list | execStr) // execStr must come last } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/JLineCompletion.scala b/util/complete/src/main/scala/sbt/complete/JLineCompletion.scala index 1aae8e826..1d876f0ba 100644 --- a/util/complete/src/main/scala/sbt/complete/JLineCompletion.scala +++ b/util/complete/src/main/scala/sbt/complete/JLineCompletion.scala @@ -3,157 +3,154 @@ */ package sbt.complete - import jline.console.ConsoleReader - import jline.console.completer.{CandidateListCompletionHandler,Completer,CompletionHandler} - import scala.annotation.tailrec - import collection.JavaConversions +import jline.console.ConsoleReader +import jline.console.completer.{ CandidateListCompletionHandler, Completer, CompletionHandler } +import scala.annotation.tailrec +import collection.JavaConversions -object JLineCompletion -{ - def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit = - installCustomCompletor(reader)(parserAsCompletor(parser)) - def installCustomCompletor(reader: ConsoleReader)(complete: (String, Int) => (Seq[String], Seq[String])): Unit = - installCustomCompletor(customCompletor(complete), reader) - def installCustomCompletor(complete: (ConsoleReader, Int) => Boolean, reader: ConsoleReader): Unit = - { - reader.removeCompleter(DummyCompletor) - reader.addCompleter(DummyCompletor) - reader.setCompletionHandler(new CustomHandler(complete)) - } +object JLineCompletion { + def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit = + installCustomCompletor(reader)(parserAsCompletor(parser)) + def installCustomCompletor(reader: ConsoleReader)(complete: (String, Int) => (Seq[String], Seq[String])): Unit = + installCustomCompletor(customCompletor(complete), reader) + def installCustomCompletor(complete: (ConsoleReader, Int) => Boolean, reader: ConsoleReader): Unit = + { + reader.removeCompleter(DummyCompletor) + reader.addCompleter(DummyCompletor) + reader.setCompletionHandler(new CustomHandler(complete)) + } - private[this] final class CustomHandler(completeImpl: (ConsoleReader, Int) => Boolean) extends CompletionHandler - { - private[this] var previous: Option[(String,Int)] = None - private[this] var level: Int = 1 - override def complete(reader: ConsoleReader, candidates: java.util.List[CharSequence], position: Int) = { - val current = Some(bufferSnapshot(reader)) - level = if(current == previous) level + 1 else 1 - previous = current - try completeImpl(reader, level) - catch { case e: Exception => - reader.print("\nException occurred while determining completions.") - e.printStackTrace() - false - } - } - } - - // always provides dummy completions so that the custom completion handler gets called - // (ConsoleReader doesn't call the handler if there aren't any completions) - // the custom handler will then throw away the candidates and call the custom function - private[this] final object DummyCompletor extends Completer - { - override def complete(buffer: String, cursor: Int, candidates: java.util.List[CharSequence]): Int = - { - candidates.asInstanceOf[java.util.List[String]] add "dummy" - 0 - } - } + private[this] final class CustomHandler(completeImpl: (ConsoleReader, Int) => Boolean) extends CompletionHandler { + private[this] var previous: Option[(String, Int)] = None + private[this] var level: Int = 1 + override def complete(reader: ConsoleReader, candidates: java.util.List[CharSequence], position: Int) = { + val current = Some(bufferSnapshot(reader)) + level = if (current == previous) level + 1 else 1 + previous = current + try completeImpl(reader, level) + catch { + case e: Exception => + reader.print("\nException occurred while determining completions.") + e.printStackTrace() + false + } + } + } - def parserAsCompletor(p: Parser[_]): (String, Int) => (Seq[String], Seq[String]) = - (str, level) => convertCompletions(Parser.completions(p, str, level)) + // always provides dummy completions so that the custom completion handler gets called + // (ConsoleReader doesn't call the handler if there aren't any completions) + // the custom handler will then throw away the candidates and call the custom function + private[this] final object DummyCompletor extends Completer { + override def complete(buffer: String, cursor: Int, candidates: java.util.List[CharSequence]): Int = + { + candidates.asInstanceOf[java.util.List[String]] add "dummy" + 0 + } + } - def convertCompletions(c: Completions): (Seq[String], Seq[String]) = - { - val cs = c.get - if(cs.isEmpty) - (Nil, "{invalid input}" :: Nil) - else - convertCompletions(cs) - } - def convertCompletions(cs: Set[Completion]): (Seq[String], Seq[String]) = - { - val (insert, display) = - ( (Set.empty[String], Set.empty[String]) /: cs) { case ( t @ (insert,display), comp) => - if(comp.isEmpty) t else (insert + comp.append, appendNonEmpty(display, comp.display)) - } - (insert.toSeq, display.toSeq.sorted) - } - def appendNonEmpty(set: Set[String], add: String) = if(add.trim.isEmpty) set else set + add + def parserAsCompletor(p: Parser[_]): (String, Int) => (Seq[String], Seq[String]) = + (str, level) => convertCompletions(Parser.completions(p, str, level)) - def customCompletor(f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean = - (reader, level) => { - val success = complete(beforeCursor(reader), reader => f(reader, level), reader) - reader.flush() - success - } + def convertCompletions(c: Completions): (Seq[String], Seq[String]) = + { + val cs = c.get + if (cs.isEmpty) + (Nil, "{invalid input}" :: Nil) + else + convertCompletions(cs) + } + def convertCompletions(cs: Set[Completion]): (Seq[String], Seq[String]) = + { + val (insert, display) = + ((Set.empty[String], Set.empty[String]) /: cs) { + case (t @ (insert, display), comp) => + if (comp.isEmpty) t else (insert + comp.append, appendNonEmpty(display, comp.display)) + } + (insert.toSeq, display.toSeq.sorted) + } + def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add - def bufferSnapshot(reader: ConsoleReader): (String, Int) = - { - val b = reader.getCursorBuffer - (b.buffer.toString, b.cursor) - } - def beforeCursor(reader: ConsoleReader): String = - { - val b = reader.getCursorBuffer - b.buffer.substring(0, b.cursor) - } + def customCompletor(f: (String, Int) => (Seq[String], Seq[String])): (ConsoleReader, Int) => Boolean = + (reader, level) => { + val success = complete(beforeCursor(reader), reader => f(reader, level), reader) + reader.flush() + success + } - // returns false if there was nothing to insert and nothing to display - def complete(beforeCursor: String, completions: String => (Seq[String],Seq[String]), reader: ConsoleReader): Boolean = - { - val (insert,display) = completions(beforeCursor) - val common = commonPrefix(insert) - if(common.isEmpty) - if(display.isEmpty) - () - else - showCompletions(display, reader) - else - appendCompletion(common, reader) + def bufferSnapshot(reader: ConsoleReader): (String, Int) = + { + val b = reader.getCursorBuffer + (b.buffer.toString, b.cursor) + } + def beforeCursor(reader: ConsoleReader): String = + { + val b = reader.getCursorBuffer + b.buffer.substring(0, b.cursor) + } - !(common.isEmpty && display.isEmpty) - } + // returns false if there was nothing to insert and nothing to display + def complete(beforeCursor: String, completions: String => (Seq[String], Seq[String]), reader: ConsoleReader): Boolean = + { + val (insert, display) = completions(beforeCursor) + val common = commonPrefix(insert) + if (common.isEmpty) + if (display.isEmpty) + () + else + showCompletions(display, reader) + else + appendCompletion(common, reader) - def appendCompletion(common: String, reader: ConsoleReader) - { - reader.getCursorBuffer.write(common) - reader.redrawLine() - } + !(common.isEmpty && display.isEmpty) + } - /** `display` is assumed to be the exact strings requested to be displayed. - * In particular, duplicates should have been removed already. */ - def showCompletions(display: Seq[String], reader: ConsoleReader) - { - printCompletions(display, reader) - reader.drawLine() - } - def printCompletions(cs: Seq[String], reader: ConsoleReader) - { - val print = shouldPrint(cs, reader) - reader.println() - if(print) printLinesAndColumns(cs, reader) - } - def printLinesAndColumns(cs: Seq[String], reader: ConsoleReader) - { - val (lines, columns) = cs partition hasNewline - for(line <- lines) { - reader.print(line) - if(line.charAt(line.length - 1) != '\n') - reader.println() - } - reader.printColumns(JavaConversions.seqAsJavaList(columns.map(_.trim))) - } - def hasNewline(s: String): Boolean = s.indexOf('\n') >= 0 - def shouldPrint(cs: Seq[String], reader: ConsoleReader): Boolean = - { - val size = cs.size - (size <= reader.getAutoprintThreshold) || - confirm("Display all %d possibilities? (y or n) ".format(size), 'y', 'n', reader) - } - def confirm(prompt: String, trueC: Char, falseC: Char, reader: ConsoleReader): Boolean = - { - reader.println() - reader.print(prompt) - reader.flush() - reader.readCharacter(trueC, falseC) == trueC - } + def appendCompletion(common: String, reader: ConsoleReader) { + reader.getCursorBuffer.write(common) + reader.redrawLine() + } - def commonPrefix(s: Seq[String]): String = if(s.isEmpty) "" else s reduceLeft commonPrefix - def commonPrefix(a: String, b: String): String = - { - val len = a.length min b.length - @tailrec def loop(i: Int): Int = if(i >= len) len else if(a(i) != b(i)) i else loop(i+1) - a.substring(0, loop(0)) - } + /** + * `display` is assumed to be the exact strings requested to be displayed. + * In particular, duplicates should have been removed already. + */ + def showCompletions(display: Seq[String], reader: ConsoleReader) { + printCompletions(display, reader) + reader.drawLine() + } + def printCompletions(cs: Seq[String], reader: ConsoleReader) { + val print = shouldPrint(cs, reader) + reader.println() + if (print) printLinesAndColumns(cs, reader) + } + def printLinesAndColumns(cs: Seq[String], reader: ConsoleReader) { + val (lines, columns) = cs partition hasNewline + for (line <- lines) { + reader.print(line) + if (line.charAt(line.length - 1) != '\n') + reader.println() + } + reader.printColumns(JavaConversions.seqAsJavaList(columns.map(_.trim))) + } + def hasNewline(s: String): Boolean = s.indexOf('\n') >= 0 + def shouldPrint(cs: Seq[String], reader: ConsoleReader): Boolean = + { + val size = cs.size + (size <= reader.getAutoprintThreshold) || + confirm("Display all %d possibilities? (y or n) ".format(size), 'y', 'n', reader) + } + def confirm(prompt: String, trueC: Char, falseC: Char, reader: ConsoleReader): Boolean = + { + reader.println() + reader.print(prompt) + reader.flush() + reader.readCharacter(trueC, falseC) == trueC + } + + def commonPrefix(s: Seq[String]): String = if (s.isEmpty) "" else s reduceLeft commonPrefix + def commonPrefix(a: String, b: String): String = + { + val len = a.length min b.length + @tailrec def loop(i: Int): Int = if (i >= len) len else if (a(i) != b(i)) i else loop(i + 1) + a.substring(0, loop(0)) + } } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/Parser.scala b/util/complete/src/main/scala/sbt/complete/Parser.scala index 575cc5ec6..393501792 100644 --- a/util/complete/src/main/scala/sbt/complete/Parser.scala +++ b/util/complete/src/main/scala/sbt/complete/Parser.scala @@ -3,725 +3,730 @@ */ package sbt.complete - import Parser._ - import sbt.Types.{left, right, some} - import sbt.Util.{makeList,separate} +import Parser._ +import sbt.Types.{ left, right, some } +import sbt.Util.{ makeList, separate } -/** A String parser that provides semi-automatic tab completion. -* A successful parse results in a value of type `T`. -* The methods in this trait are what must be implemented to define a new Parser implementation, but are not typically useful for common usage. -* Instead, most useful methods for combining smaller parsers into larger parsers are implicitly added by the [[RichParser]] type. -*/ -sealed trait Parser[+T] -{ - def derive(i: Char): Parser[T] - def resultEmpty: Result[T] - def result: Option[T] - def completions(level: Int): Completions - def failure: Option[Failure] - def isTokenStart = false - def ifValid[S](p: => Parser[S]): Parser[S] - def valid: Boolean +/** + * A String parser that provides semi-automatic tab completion. + * A successful parse results in a value of type `T`. + * The methods in this trait are what must be implemented to define a new Parser implementation, but are not typically useful for common usage. + * Instead, most useful methods for combining smaller parsers into larger parsers are implicitly added by the [[RichParser]] type. + */ +sealed trait Parser[+T] { + def derive(i: Char): Parser[T] + def resultEmpty: Result[T] + def result: Option[T] + def completions(level: Int): Completions + def failure: Option[Failure] + def isTokenStart = false + def ifValid[S](p: => Parser[S]): Parser[S] + def valid: Boolean } -sealed trait RichParser[A] -{ - /** Apply the original Parser and then apply `next` (in order). The result of both is provides as a pair. */ - def ~[B](next: Parser[B]): Parser[(A,B)] +sealed trait RichParser[A] { + /** Apply the original Parser and then apply `next` (in order). The result of both is provides as a pair. */ + def ~[B](next: Parser[B]): Parser[(A, B)] - /** Apply the original Parser one or more times and provide the non-empty sequence of results.*/ - def + : Parser[Seq[A]] + /** Apply the original Parser one or more times and provide the non-empty sequence of results.*/ + def + : Parser[Seq[A]] - /** Apply the original Parser zero or more times and provide the (potentially empty) sequence of results.*/ - def * : Parser[Seq[A]] + /** Apply the original Parser zero or more times and provide the (potentially empty) sequence of results.*/ + def * : Parser[Seq[A]] - /** Apply the original Parser zero or one times, returning None if it was applied zero times or the result wrapped in Some if it was applied once.*/ - def ? : Parser[Option[A]] + /** Apply the original Parser zero or one times, returning None if it was applied zero times or the result wrapped in Some if it was applied once.*/ + def ? : Parser[Option[A]] - /** Apply either the original Parser or `b`.*/ - def |[B >: A](b: Parser[B]): Parser[B] + /** Apply either the original Parser or `b`.*/ + def |[B >: A](b: Parser[B]): Parser[B] - /** Apply either the original Parser or `b`.*/ - def ||[B](b: Parser[B]): Parser[Either[A,B]] + /** Apply either the original Parser or `b`.*/ + def ||[B](b: Parser[B]): Parser[Either[A, B]] - /** Apply the original Parser to the input and then apply `f` to the result.*/ - def map[B](f: A => B): Parser[B] + /** Apply the original Parser to the input and then apply `f` to the result.*/ + def map[B](f: A => B): Parser[B] - /** Returns the original parser. This is useful for converting literals to Parsers. - * For example, `'c'.id` or `"asdf".id`*/ - def id: Parser[A] + /** + * Returns the original parser. This is useful for converting literals to Parsers. + * For example, `'c'.id` or `"asdf".id` + */ + def id: Parser[A] - /** Apply the original Parser, but provide `value` as the result if it succeeds. */ - def ^^^[B](value: B): Parser[B] + /** Apply the original Parser, but provide `value` as the result if it succeeds. */ + def ^^^[B](value: B): Parser[B] - /** Apply the original Parser, but provide `alt` as the result if it fails.*/ - def ??[B >: A](alt: B): Parser[B] + /** Apply the original Parser, but provide `alt` as the result if it fails.*/ + def ??[B >: A](alt: B): Parser[B] - /** Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of `next`. - * (The arrow point in the direction of the retained result.)*/ - def <~[B](b: Parser[B]): Parser[A] + /** + * Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of `next`. + * (The arrow point in the direction of the retained result.) + */ + def <~[B](b: Parser[B]): Parser[A] - /** Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of the original parser. - * (The arrow point in the direction of the retained result.)*/ - def ~>[B](b: Parser[B]): Parser[B] + /** + * Produces a Parser that applies the original Parser and then applies `next` (in order), discarding the result of the original parser. + * (The arrow point in the direction of the retained result.) + */ + def ~>[B](b: Parser[B]): Parser[B] - /** Uses the specified message if the original Parser fails.*/ - def !!!(msg: String): Parser[A] + /** Uses the specified message if the original Parser fails.*/ + def !!!(msg: String): Parser[A] - /** If an exception is thrown by the original Parser, - * capture it and fail locally instead of allowing the exception to propagate up and terminate parsing.*/ - def failOnException: Parser[A] + /** + * If an exception is thrown by the original Parser, + * capture it and fail locally instead of allowing the exception to propagate up and terminate parsing. + */ + def failOnException: Parser[A] - @deprecated("Use `not` and explicitly provide the failure message", "0.12.2") - def unary_- : Parser[Unit] + @deprecated("Use `not` and explicitly provide the failure message", "0.12.2") + def unary_- : Parser[Unit] - /** Apply the original parser, but only succeed if `o` also succeeds. - * Note that `o` does not need to consume the same amount of input to satisfy this condition.*/ - def & (o: Parser[_]): Parser[A] + /** + * Apply the original parser, but only succeed if `o` also succeeds. + * Note that `o` does not need to consume the same amount of input to satisfy this condition. + */ + def &(o: Parser[_]): Parser[A] - @deprecated("Use `and` and `not` and explicitly provide the failure message", "0.12.2") - def - (o: Parser[_]): Parser[A] + @deprecated("Use `and` and `not` and explicitly provide the failure message", "0.12.2") + def -(o: Parser[_]): Parser[A] - /** Explicitly defines the completions for the original Parser.*/ - def examples(s: String*): Parser[A] + /** Explicitly defines the completions for the original Parser.*/ + def examples(s: String*): Parser[A] - /** Explicitly defines the completions for the original Parser.*/ - def examples(s: Set[String], check: Boolean = false): Parser[A] + /** Explicitly defines the completions for the original Parser.*/ + def examples(s: Set[String], check: Boolean = false): Parser[A] - /** - * @param exampleSource the source of examples when displaying completions to the user. - * @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can - * prevent lengthy pauses and avoids bad interactive user experience. - * @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the - * given parser). Invalid examples will be filtered out and only valid suggestions will - * be displayed. - * @return a new parser with a new source of completions. - */ - def examples(exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] + /** + * @param exampleSource the source of examples when displaying completions to the user. + * @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can + * prevent lengthy pauses and avoids bad interactive user experience. + * @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the + * given parser). Invalid examples will be filtered out and only valid suggestions will + * be displayed. + * @return a new parser with a new source of completions. + */ + def examples(exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] - /** - * @param exampleSource the source of examples when displaying completions to the user. - * @return a new parser with a new source of completions. It displays at most 25 completion examples and does not - * remove invalid examples. - */ - def examples(exampleSource: ExampleSource): Parser[A] = examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false) + /** + * @param exampleSource the source of examples when displaying completions to the user. + * @return a new parser with a new source of completions. It displays at most 25 completion examples and does not + * remove invalid examples. + */ + def examples(exampleSource: ExampleSource): Parser[A] = examples(exampleSource, maxNumberOfExamples = 25, removeInvalidExamples = false) - /** Converts a Parser returning a Char sequence to a Parser returning a String.*/ - def string(implicit ev: A <:< Seq[Char]): Parser[String] + /** Converts a Parser returning a Char sequence to a Parser returning a String.*/ + def string(implicit ev: A <:< Seq[Char]): Parser[String] - /** Produces a Parser that filters the original parser. - * If 'f' is not true when applied to the output of the original parser, the Parser returned by this method fails. - * The failure message is constructed by applying `msg` to the String that was successfully parsed by the original parser. */ - def filter(f: A => Boolean, msg: String => String): Parser[A] + /** + * Produces a Parser that filters the original parser. + * If 'f' is not true when applied to the output of the original parser, the Parser returned by this method fails. + * The failure message is constructed by applying `msg` to the String that was successfully parsed by the original parser. + */ + def filter(f: A => Boolean, msg: String => String): Parser[A] - /** Applies the original parser, applies `f` to the result to get the next parser, and applies that parser and uses its result for the overall result. */ - def flatMap[B](f: A => Parser[B]): Parser[B] + /** Applies the original parser, applies `f` to the result to get the next parser, and applies that parser and uses its result for the overall result. */ + def flatMap[B](f: A => Parser[B]): Parser[B] } /** Contains Parser implementation helper methods not typically needed for using parsers. */ -object Parser extends ParserMain -{ - sealed abstract class Result[+T] { - def isFailure: Boolean - def isValid: Boolean - def errors: Seq[String] - def or[B >: T](b: => Result[B]): Result[B] - def either[B](b: => Result[B]): Result[Either[T,B]] - def map[B](f: T => B): Result[B] - def flatMap[B](f: T => Result[B]): Result[B] - def &&(b: => Result[_]): Result[T] - def filter(f: T => Boolean, msg: => String): Result[T] - def seq[B](b: => Result[B]): Result[(T,B)] = app(b)( (m,n) => (m,n) ) - def app[B,C](b: => Result[B])(f: (T, B) => C): Result[C] - def toEither: Either[() => Seq[String], T] - } - final case class Value[+T](value: T) extends Result[T] { - def isFailure = false - def isValid: Boolean = true - def errors = Nil - def app[B,C](b: => Result[B])(f: (T, B) => C): Result[C] = b match { - case fail: Failure => fail - case Value(bv) => Value(f(value, bv)) - } - def &&(b: => Result[_]): Result[T] = b match { case f: Failure => f; case _ => this } - def or[B >: T](b: => Result[B]): Result[B] = this - def either[B](b: => Result[B]): Result[Either[T,B]] = Value(Left(value)) - def map[B](f: T => B): Result[B] = Value(f(value)) - def flatMap[B](f: T => Result[B]): Result[B] = f(value) - def filter(f: T => Boolean, msg: => String): Result[T] = if(f(value)) this else mkFailure(msg) - def toEither = Right(value) - } - final class Failure private[sbt](mkErrors: => Seq[String], val definitive: Boolean) extends Result[Nothing] { - lazy val errors: Seq[String] = mkErrors - def isFailure = true - def isValid = false - def map[B](f: Nothing => B) = this - def flatMap[B](f: Nothing => Result[B]) = this - def or[B](b: => Result[B]): Result[B] = b match { - case v: Value[B] => v - case f: Failure => if(definitive) this else this ++ f - } - def either[B](b: => Result[B]): Result[Either[Nothing,B]] = b match { - case Value(v) => Value(Right(v)) - case f: Failure => if(definitive) this else this ++ f - } - def filter(f: Nothing => Boolean, msg: => String) = this - def app[B,C](b: => Result[B])(f: (Nothing, B) => C): Result[C] = this - def &&(b: => Result[_]) = this - def toEither = Left(() => errors) +object Parser extends ParserMain { + sealed abstract class Result[+T] { + def isFailure: Boolean + def isValid: Boolean + def errors: Seq[String] + def or[B >: T](b: => Result[B]): Result[B] + def either[B](b: => Result[B]): Result[Either[T, B]] + def map[B](f: T => B): Result[B] + def flatMap[B](f: T => Result[B]): Result[B] + def &&(b: => Result[_]): Result[T] + def filter(f: T => Boolean, msg: => String): Result[T] + def seq[B](b: => Result[B]): Result[(T, B)] = app(b)((m, n) => (m, n)) + def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C] + def toEither: Either[() => Seq[String], T] + } + final case class Value[+T](value: T) extends Result[T] { + def isFailure = false + def isValid: Boolean = true + def errors = Nil + def app[B, C](b: => Result[B])(f: (T, B) => C): Result[C] = b match { + case fail: Failure => fail + case Value(bv) => Value(f(value, bv)) + } + def &&(b: => Result[_]): Result[T] = b match { case f: Failure => f; case _ => this } + def or[B >: T](b: => Result[B]): Result[B] = this + def either[B](b: => Result[B]): Result[Either[T, B]] = Value(Left(value)) + def map[B](f: T => B): Result[B] = Value(f(value)) + def flatMap[B](f: T => Result[B]): Result[B] = f(value) + def filter(f: T => Boolean, msg: => String): Result[T] = if (f(value)) this else mkFailure(msg) + def toEither = Right(value) + } + final class Failure private[sbt] (mkErrors: => Seq[String], val definitive: Boolean) extends Result[Nothing] { + lazy val errors: Seq[String] = mkErrors + def isFailure = true + def isValid = false + def map[B](f: Nothing => B) = this + def flatMap[B](f: Nothing => Result[B]) = this + def or[B](b: => Result[B]): Result[B] = b match { + case v: Value[B] => v + case f: Failure => if (definitive) this else this ++ f + } + def either[B](b: => Result[B]): Result[Either[Nothing, B]] = b match { + case Value(v) => Value(Right(v)) + case f: Failure => if (definitive) this else this ++ f + } + def filter(f: Nothing => Boolean, msg: => String) = this + def app[B, C](b: => Result[B])(f: (Nothing, B) => C): Result[C] = this + def &&(b: => Result[_]) = this + def toEither = Left(() => errors) - private[sbt] def ++(f: Failure) = mkFailures(errors ++ f.errors) - } - def mkFailures(errors: => Seq[String], definitive: Boolean = false): Failure = new Failure(errors.distinct, definitive) - def mkFailure(error: => String, definitive: Boolean = false): Failure = new Failure(error :: Nil, definitive) + private[sbt] def ++(f: Failure) = mkFailures(errors ++ f.errors) + } + def mkFailures(errors: => Seq[String], definitive: Boolean = false): Failure = new Failure(errors.distinct, definitive) + def mkFailure(error: => String, definitive: Boolean = false): Failure = new Failure(error :: Nil, definitive) - @deprecated("This method is deprecated and will be removed in the next major version. Use the parser directly to check for invalid completions.", since = "0.13.2") - def checkMatches(a: Parser[_], completions: Seq[String]) - { - val bad = completions.filter( apply(a)(_).resultEmpty.isFailure) - if(!bad.isEmpty) sys.error("Invalid example completions: " + bad.mkString("'", "', '", "'")) - } + @deprecated("This method is deprecated and will be removed in the next major version. Use the parser directly to check for invalid completions.", since = "0.13.2") + def checkMatches(a: Parser[_], completions: Seq[String]) { + val bad = completions.filter(apply(a)(_).resultEmpty.isFailure) + if (!bad.isEmpty) sys.error("Invalid example completions: " + bad.mkString("'", "', '", "'")) + } - def tuple[A,B](a: Option[A], b: Option[B]): Option[(A,B)] = - (a,b) match { case (Some(av), Some(bv)) => Some((av, bv)); case _ => None } + def tuple[A, B](a: Option[A], b: Option[B]): Option[(A, B)] = + (a, b) match { case (Some(av), Some(bv)) => Some((av, bv)); case _ => None } - def mapParser[A,B](a: Parser[A], f: A => B): Parser[B] = - a.ifValid { - a.result match - { - case Some(av) => success( f(av) ) - case None => new MapParser(a, f) - } - } + def mapParser[A, B](a: Parser[A], f: A => B): Parser[B] = + a.ifValid { + a.result match { + case Some(av) => success(f(av)) + case None => new MapParser(a, f) + } + } - def bindParser[A,B](a: Parser[A], f: A => Parser[B]): Parser[B] = - a.ifValid { - a.result match - { - case Some(av) => f(av) - case None => new BindParser(a, f) - } - } + def bindParser[A, B](a: Parser[A], f: A => Parser[B]): Parser[B] = + a.ifValid { + a.result match { + case Some(av) => f(av) + case None => new BindParser(a, f) + } + } - def filterParser[T](a: Parser[T], f: T => Boolean, seen: String, msg: String => String): Parser[T] = - a.ifValid { - a.result match - { - case Some(av) if f(av) => success( av ) - case _ => new Filter(a, f, seen, msg) - } - } + def filterParser[T](a: Parser[T], f: T => Boolean, seen: String, msg: String => String): Parser[T] = + a.ifValid { + a.result match { + case Some(av) if f(av) => success(av) + case _ => new Filter(a, f, seen, msg) + } + } - def seqParser[A,B](a: Parser[A], b: Parser[B]): Parser[(A,B)] = - a.ifValid { b.ifValid { - (a.result, b.result) match { - case (Some(av), Some(bv)) => success( (av, bv) ) - case (Some(av), None) => b map { bv => (av, bv) } - case (None, Some(bv)) => a map { av => (av, bv) } - case (None, None) => new SeqParser(a,b) - } - }} + def seqParser[A, B](a: Parser[A], b: Parser[B]): Parser[(A, B)] = + a.ifValid { + b.ifValid { + (a.result, b.result) match { + case (Some(av), Some(bv)) => success((av, bv)) + case (Some(av), None) => b map { bv => (av, bv) } + case (None, Some(bv)) => a map { av => (av, bv) } + case (None, None) => new SeqParser(a, b) + } + } + } - def choiceParser[A,B](a: Parser[A], b: Parser[B]): Parser[Either[A,B]] = - if(a.valid) - if(b.valid) new HetParser(a,b) else a.map( left.fn ) - else - b.map( right.fn ) + def choiceParser[A, B](a: Parser[A], b: Parser[B]): Parser[Either[A, B]] = + if (a.valid) + if (b.valid) new HetParser(a, b) else a.map(left.fn) + else + b.map(right.fn) - def opt[T](a: Parser[T]): Parser[Option[T]] = - if(a.valid) new Optional(a) else success(None) + def opt[T](a: Parser[T]): Parser[Option[T]] = + if (a.valid) new Optional(a) else success(None) - def onFailure[T](delegate: Parser[T], msg: String): Parser[T] = - if(delegate.valid) new OnFailure(delegate, msg) else failure(msg) - def trapAndFail[T](delegate: Parser[T]): Parser[T] = - delegate.ifValid( new TrapAndFail(delegate) ) + def onFailure[T](delegate: Parser[T], msg: String): Parser[T] = + if (delegate.valid) new OnFailure(delegate, msg) else failure(msg) + def trapAndFail[T](delegate: Parser[T]): Parser[T] = + delegate.ifValid(new TrapAndFail(delegate)) - def zeroOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 0, Infinite) - def oneOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 1, Infinite) + def zeroOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 0, Infinite) + def oneOrMore[T](p: Parser[T]): Parser[Seq[T]] = repeat(p, 1, Infinite) - def repeat[T](p: Parser[T], min: Int = 0, max: UpperBound = Infinite): Parser[Seq[T]] = - repeat(None, p, min, max, Nil) - private[complete] def repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, revAcc: List[T]): Parser[Seq[T]] = - { - assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")") - assume(max >= min, "Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")") + def repeat[T](p: Parser[T], min: Int = 0, max: UpperBound = Infinite): Parser[Seq[T]] = + repeat(None, p, min, max, Nil) + private[complete] def repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, revAcc: List[T]): Parser[Seq[T]] = + { + assume(min >= 0, "Minimum must be greater than or equal to zero (was " + min + ")") + assume(max >= min, "Minimum must be less than or equal to maximum (min: " + min + ", max: " + max + ")") - def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] = - repeated match - { - case i: Invalid if min == 0 => invalidButOptional - case i: Invalid => i - case _ => - repeated.result match - { - case Some(value) => success(revAcc reverse_::: value :: Nil) // revAcc should be Nil here - case None => if(max.isZero) success(revAcc.reverse) else new Repeat(partial, repeated, min, max, revAcc) - } - } + def checkRepeated(invalidButOptional: => Parser[Seq[T]]): Parser[Seq[T]] = + repeated match { + case i: Invalid if min == 0 => invalidButOptional + case i: Invalid => i + case _ => + repeated.result match { + case Some(value) => success(revAcc reverse_::: value :: Nil) // revAcc should be Nil here + case None => if (max.isZero) success(revAcc.reverse) else new Repeat(partial, repeated, min, max, revAcc) + } + } - partial match - { - case Some(part) => - part.ifValid { - part.result match - { - case Some(value) => repeat(None, repeated, min, max, value :: revAcc) - case None => checkRepeated(part.map(lv => (lv :: revAcc).reverse)) - } - } - case None => checkRepeated(success(Nil)) - } - } + partial match { + case Some(part) => + part.ifValid { + part.result match { + case Some(value) => repeat(None, repeated, min, max, value :: revAcc) + case None => checkRepeated(part.map(lv => (lv :: revAcc).reverse)) + } + } + case None => checkRepeated(success(Nil)) + } + } - @deprecated("Explicitly call `and` and `not` to provide the failure message.", "0.12.2") - def sub[T](a: Parser[T], b: Parser[_]): Parser[T] = and(a, not(b)) + @deprecated("Explicitly call `and` and `not` to provide the failure message.", "0.12.2") + def sub[T](a: Parser[T], b: Parser[_]): Parser[T] = and(a, not(b)) - def and[T](a: Parser[T], b: Parser[_]): Parser[T] = a.ifValid( b.ifValid( new And(a, b) )) + def and[T](a: Parser[T], b: Parser[_]): Parser[T] = a.ifValid(b.ifValid(new And(a, b))) } -trait ParserMain -{ - /** Provides combinators for Parsers.*/ - implicit def richParser[A](a: Parser[A]): RichParser[A] = new RichParser[A] - { - def ~[B](b: Parser[B]) = seqParser(a, b) - def ||[B](b: Parser[B]) = choiceParser(a,b) - def |[B >: A](b: Parser[B]) = homParser(a,b) - def ? = opt(a) - def * = zeroOrMore(a) - def + = oneOrMore(a) - def map[B](f: A => B) = mapParser(a, f) - def id = a +trait ParserMain { + /** Provides combinators for Parsers.*/ + implicit def richParser[A](a: Parser[A]): RichParser[A] = new RichParser[A] { + def ~[B](b: Parser[B]) = seqParser(a, b) + def ||[B](b: Parser[B]) = choiceParser(a, b) + def |[B >: A](b: Parser[B]) = homParser(a, b) + def ? = opt(a) + def * = zeroOrMore(a) + def + = oneOrMore(a) + def map[B](f: A => B) = mapParser(a, f) + def id = a - def ^^^[B](value: B): Parser[B] = a map { _ => value } - def ??[B >: A](alt: B): Parser[B] = a.? map { _ getOrElse alt } - def <~[B](b: Parser[B]): Parser[A] = (a ~ b) map { case av ~ _ => av } - def ~>[B](b: Parser[B]): Parser[B] = (a ~ b) map { case _ ~ bv => bv } - def !!!(msg: String): Parser[A] = onFailure(a, msg) - def failOnException: Parser[A] = trapAndFail(a) + def ^^^[B](value: B): Parser[B] = a map { _ => value } + def ??[B >: A](alt: B): Parser[B] = a.? map { _ getOrElse alt } + def <~[B](b: Parser[B]): Parser[A] = (a ~ b) map { case av ~ _ => av } + def ~>[B](b: Parser[B]): Parser[B] = (a ~ b) map { case _ ~ bv => bv } + def !!!(msg: String): Parser[A] = onFailure(a, msg) + def failOnException: Parser[A] = trapAndFail(a) - def unary_- = not(a) - def & (o: Parser[_]) = and(a, o) - def - (o: Parser[_]) = sub(a, o) - def examples(s: String*): Parser[A] = examples(s.toSet) - def examples(s: Set[String], check: Boolean = false): Parser[A] = examples(new FixedSetExamples(s), s.size, check) - def examples(s: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] = Parser.examples(a, s, maxNumberOfExamples, removeInvalidExamples) - def filter(f: A => Boolean, msg: String => String): Parser[A] = filterParser(a, f, "", msg) - def string(implicit ev: A <:< Seq[Char]): Parser[String] = map(_.mkString) - def flatMap[B](f: A => Parser[B]) = bindParser(a, f) - } + def unary_- = not(a) + def &(o: Parser[_]) = and(a, o) + def -(o: Parser[_]) = sub(a, o) + def examples(s: String*): Parser[A] = examples(s.toSet) + def examples(s: Set[String], check: Boolean = false): Parser[A] = examples(new FixedSetExamples(s), s.size, check) + def examples(s: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] = Parser.examples(a, s, maxNumberOfExamples, removeInvalidExamples) + def filter(f: A => Boolean, msg: String => String): Parser[A] = filterParser(a, f, "", msg) + def string(implicit ev: A <:< Seq[Char]): Parser[String] = map(_.mkString) + def flatMap[B](f: A => Parser[B]) = bindParser(a, f) + } - implicit def literalRichCharParser(c: Char): RichParser[Char] = richParser(c) - implicit def literalRichStringParser(s: String): RichParser[String] = richParser(s) + implicit def literalRichCharParser(c: Char): RichParser[Char] = richParser(c) + implicit def literalRichStringParser(s: String): RichParser[String] = richParser(s) - /** Construct a parser that is valid, but has no valid result. This is used as a way - * to provide a definitive Failure when a parser doesn't match empty input. For example, - * in `softFailure(...) | p`, if `p` doesn't match the empty sequence, the failure will come - * from the Parser constructed by the `softFailure` method. */ - private[sbt] def softFailure(msg: => String, definitive: Boolean = false): Parser[Nothing] = - SoftInvalid( mkFailures(msg :: Nil, definitive) ) + /** + * Construct a parser that is valid, but has no valid result. This is used as a way + * to provide a definitive Failure when a parser doesn't match empty input. For example, + * in `softFailure(...) | p`, if `p` doesn't match the empty sequence, the failure will come + * from the Parser constructed by the `softFailure` method. + */ + private[sbt] def softFailure(msg: => String, definitive: Boolean = false): Parser[Nothing] = + SoftInvalid(mkFailures(msg :: Nil, definitive)) - /** Defines a parser that always fails on any input with messages `msgs`. - * If `definitive` is `true`, any failures by later alternatives are discarded.*/ - def invalid(msgs: => Seq[String], definitive: Boolean = false): Parser[Nothing] = Invalid(mkFailures(msgs, definitive)) + /** + * Defines a parser that always fails on any input with messages `msgs`. + * If `definitive` is `true`, any failures by later alternatives are discarded. + */ + def invalid(msgs: => Seq[String], definitive: Boolean = false): Parser[Nothing] = Invalid(mkFailures(msgs, definitive)) - /** Defines a parser that always fails on any input with message `msg`. - * If `definitive` is `true`, any failures by later alternatives are discarded.*/ - def failure(msg: => String, definitive: Boolean = false): Parser[Nothing] = invalid(msg :: Nil, definitive) + /** + * Defines a parser that always fails on any input with message `msg`. + * If `definitive` is `true`, any failures by later alternatives are discarded. + */ + def failure(msg: => String, definitive: Boolean = false): Parser[Nothing] = invalid(msg :: Nil, definitive) - /** Defines a parser that always succeeds on empty input with the result `value`.*/ - def success[T](value: T): Parser[T] = new ValidParser[T] { - override def result = Some(value) - def resultEmpty = Value(value) - def derive(c: Char) = Parser.failure("Expected end of input.") - def completions(level: Int) = Completions.empty - override def toString = "success(" + value + ")" - } + /** Defines a parser that always succeeds on empty input with the result `value`.*/ + def success[T](value: T): Parser[T] = new ValidParser[T] { + override def result = Some(value) + def resultEmpty = Value(value) + def derive(c: Char) = Parser.failure("Expected end of input.") + def completions(level: Int) = Completions.empty + override def toString = "success(" + value + ")" + } - /** Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.*/ - implicit def range(r: collection.immutable.NumericRange[Char]): Parser[Char] = - charClass(r contains _).examples(r.map(_.toString) : _*) + /** Presents a Char range as a Parser. A single Char is parsed only if it is in the given range.*/ + implicit def range(r: collection.immutable.NumericRange[Char]): Parser[Char] = + charClass(r contains _).examples(r.map(_.toString): _*) - /** Defines a Parser that parses a single character only if it is contained in `legal`.*/ - def chars(legal: String): Parser[Char] = - { - val set = legal.toSet - charClass(set, "character in '" + legal + "'") examples(set.map(_.toString)) - } + /** Defines a Parser that parses a single character only if it is contained in `legal`.*/ + def chars(legal: String): Parser[Char] = + { + val set = legal.toSet + charClass(set, "character in '" + legal + "'") examples (set.map(_.toString)) + } - /** Defines a Parser that parses a single character only if the predicate `f` returns true for that character. - * If this parser fails, `label` is used as the failure message. */ - def charClass(f: Char => Boolean, label: String = ""): Parser[Char] = new CharacterClass(f, label) + /** + * Defines a Parser that parses a single character only if the predicate `f` returns true for that character. + * If this parser fails, `label` is used as the failure message. + */ + def charClass(f: Char => Boolean, label: String = ""): Parser[Char] = new CharacterClass(f, label) - /** Presents a single Char `ch` as a Parser that only parses that exact character. */ - implicit def literal(ch: Char): Parser[Char] = new ValidParser[Char] { - def result = None - def resultEmpty = mkFailure( "Expected '" + ch + "'" ) - def derive(c: Char) = if(c == ch) success(ch) else new Invalid(resultEmpty) - def completions(level: Int) = Completions.single(Completion.suggestStrict(ch.toString)) - override def toString = "'" + ch + "'" - } - /** Presents a literal String `s` as a Parser that only parses that exact text and provides it as the result.*/ - implicit def literal(s: String): Parser[String] = stringLiteral(s, 0) + /** Presents a single Char `ch` as a Parser that only parses that exact character. */ + implicit def literal(ch: Char): Parser[Char] = new ValidParser[Char] { + def result = None + def resultEmpty = mkFailure("Expected '" + ch + "'") + def derive(c: Char) = if (c == ch) success(ch) else new Invalid(resultEmpty) + def completions(level: Int) = Completions.single(Completion.suggestStrict(ch.toString)) + override def toString = "'" + ch + "'" + } + /** Presents a literal String `s` as a Parser that only parses that exact text and provides it as the result.*/ + implicit def literal(s: String): Parser[String] = stringLiteral(s, 0) - /** See [[unapply]]. */ - object ~ { - /** Convenience for destructuring a tuple that mirrors the `~` combinator.*/ - def unapply[A,B](t: (A,B)): Some[(A,B)] = Some(t) - } + /** See [[unapply]]. */ + object ~ { + /** Convenience for destructuring a tuple that mirrors the `~` combinator.*/ + def unapply[A, B](t: (A, B)): Some[(A, B)] = Some(t) + } - /** Parses input `str` using `parser`. If successful, the result is provided wrapped in `Right`. If unsuccesful, an error message is provided in `Left`.*/ - def parse[T](str: String, parser: Parser[T]): Either[String, T] = - Parser.result(parser, str).left.map { failures => - val (msgs,pos) = failures() - ProcessError(str, msgs, pos) - } + /** Parses input `str` using `parser`. If successful, the result is provided wrapped in `Right`. If unsuccesful, an error message is provided in `Left`.*/ + def parse[T](str: String, parser: Parser[T]): Either[String, T] = + Parser.result(parser, str).left.map { failures => + val (msgs, pos) = failures() + ProcessError(str, msgs, pos) + } - /** Convenience method to use when developing a parser. - * `parser` is applied to the input `str`. - * If `completions` is true, the available completions for the input are displayed. - * Otherwise, the result of parsing is printed using the result's `toString` method. - * If parsing fails, the error message is displayed. - * - * See also [[sampleParse]] and [[sampleCompletions]]. */ - def sample(str: String, parser: Parser[_], completions: Boolean = false): Unit = - if(completions) sampleCompletions(str, parser) else sampleParse(str, parser) + /** + * Convenience method to use when developing a parser. + * `parser` is applied to the input `str`. + * If `completions` is true, the available completions for the input are displayed. + * Otherwise, the result of parsing is printed using the result's `toString` method. + * If parsing fails, the error message is displayed. + * + * See also [[sampleParse]] and [[sampleCompletions]]. + */ + def sample(str: String, parser: Parser[_], completions: Boolean = false): Unit = + if (completions) sampleCompletions(str, parser) else sampleParse(str, parser) - /** Convenience method to use when developing a parser. - * `parser` is applied to the input `str` and the result of parsing is printed using the result's `toString` method. - * If parsing fails, the error message is displayed. */ - def sampleParse(str: String, parser: Parser[_]): Unit = - parse(str, parser) match { - case Left(msg) => println(msg) - case Right(v) => println(v) - } + /** + * Convenience method to use when developing a parser. + * `parser` is applied to the input `str` and the result of parsing is printed using the result's `toString` method. + * If parsing fails, the error message is displayed. + */ + def sampleParse(str: String, parser: Parser[_]): Unit = + parse(str, parser) match { + case Left(msg) => println(msg) + case Right(v) => println(v) + } - /** Convenience method to use when developing a parser. - * `parser` is applied to the input `str` and the available completions are displayed on separate lines. - * If parsing fails, the error message is displayed. */ - def sampleCompletions(str: String, parser: Parser[_], level: Int = 1): Unit = - Parser.completions(parser, str, level).get foreach println + /** + * Convenience method to use when developing a parser. + * `parser` is applied to the input `str` and the available completions are displayed on separate lines. + * If parsing fails, the error message is displayed. + */ + def sampleCompletions(str: String, parser: Parser[_], level: Int = 1): Unit = + Parser.completions(parser, str, level).get foreach println - // intended to be temporary pending proper error feedback - def result[T](p: Parser[T], s: String): Either[() => (Seq[String],Int), T] = - { - def loop(i: Int, a: Parser[T]): Either[() => (Seq[String],Int), T] = - a match - { - case Invalid(f) => Left( () => (f.errors, i) ) - case _ => - val ci = i+1 - if(ci >= s.length) - a.resultEmpty.toEither.left.map { msgs0 => () => - val msgs = msgs0() - val nonEmpty = if(msgs.isEmpty) "Unexpected end of input" :: Nil else msgs - (nonEmpty, ci) - } - else - loop(ci, a derive s(ci) ) - } - loop(-1, p) - } + // intended to be temporary pending proper error feedback + def result[T](p: Parser[T], s: String): Either[() => (Seq[String], Int), T] = + { + def loop(i: Int, a: Parser[T]): Either[() => (Seq[String], Int), T] = + a match { + case Invalid(f) => Left(() => (f.errors, i)) + case _ => + val ci = i + 1 + if (ci >= s.length) + a.resultEmpty.toEither.left.map { msgs0 => + () => + val msgs = msgs0() + val nonEmpty = if (msgs.isEmpty) "Unexpected end of input" :: Nil else msgs + (nonEmpty, ci) + } + else + loop(ci, a derive s(ci)) + } + loop(-1, p) + } - /** Applies parser `p` to input `s`. */ - def apply[T](p: Parser[T])(s: String): Parser[T] = - (p /: s)(derive1) + /** Applies parser `p` to input `s`. */ + def apply[T](p: Parser[T])(s: String): Parser[T] = + (p /: s)(derive1) - /** Applies parser `p` to a single character of input. */ - def derive1[T](p: Parser[T], c: Char): Parser[T] = - if(p.valid) p.derive(c) else p + /** Applies parser `p` to a single character of input. */ + def derive1[T](p: Parser[T], c: Char): Parser[T] = + if (p.valid) p.derive(c) else p - /** Applies parser `p` to input `s` and returns the completions at verbosity `level`. - * The interpretation of `level` is up to parser definitions, but 0 is the default by convention, - * with increasing positive numbers corresponding to increasing verbosity. Typically no more than - * a few levels are defined. */ - def completions(p: Parser[_], s: String, level: Int): Completions = - // The x Completions.empty removes any trailing token completions where append.isEmpty - apply(p)(s).completions(level) x Completions.empty + /** + * Applies parser `p` to input `s` and returns the completions at verbosity `level`. + * The interpretation of `level` is up to parser definitions, but 0 is the default by convention, + * with increasing positive numbers corresponding to increasing verbosity. Typically no more than + * a few levels are defined. + */ + def completions(p: Parser[_], s: String, level: Int): Completions = + // The x Completions.empty removes any trailing token completions where append.isEmpty + apply(p)(s).completions(level) x Completions.empty - def examples[A](a: Parser[A], completions: Set[String], check: Boolean = false): Parser[A] = - examples(a, new FixedSetExamples(completions), completions.size, check) + def examples[A](a: Parser[A], completions: Set[String], check: Boolean = false): Parser[A] = + examples(a, new FixedSetExamples(completions), completions.size, check) - /** - * @param a the parser to decorate with a source of examples. All validation and parsing is delegated to this parser, - * only [[Parser.completions]] is modified. - * @param completions the source of examples when displaying completions to the user. - * @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can - * prevent lengthy pauses and avoids bad interactive user experience. - * @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the given parser). An - * exception is thrown if the example source contains no valid completion suggestions. - * @tparam A the type of values that are returned by the parser. - * @return - */ - def examples[A](a: Parser[A], completions: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] = - if(a.valid) { - a.result match - { - case Some(av) => success( av ) - case None => - new ParserWithExamples(a, completions, maxNumberOfExamples, removeInvalidExamples) - } - } - else a + /** + * @param a the parser to decorate with a source of examples. All validation and parsing is delegated to this parser, + * only [[Parser.completions]] is modified. + * @param completions the source of examples when displaying completions to the user. + * @param maxNumberOfExamples limits the number of examples that the source of examples should return. This can + * prevent lengthy pauses and avoids bad interactive user experience. + * @param removeInvalidExamples indicates whether completion examples should be checked for validity (against the given parser). An + * exception is thrown if the example source contains no valid completion suggestions. + * @tparam A the type of values that are returned by the parser. + * @return + */ + def examples[A](a: Parser[A], completions: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean): Parser[A] = + if (a.valid) { + a.result match { + case Some(av) => success(av) + case None => + new ParserWithExamples(a, completions, maxNumberOfExamples, removeInvalidExamples) + } + } else a - def matched(t: Parser[_], seen: Vector[Char] = Vector.empty, partial: Boolean = false): Parser[String] = - t match - { - case i: Invalid => if(partial && !seen.isEmpty) success(seen.mkString) else i - case _ => - if(t.result.isEmpty) - new MatchedString(t, seen, partial) - else - success(seen.mkString) - } + def matched(t: Parser[_], seen: Vector[Char] = Vector.empty, partial: Boolean = false): Parser[String] = + t match { + case i: Invalid => if (partial && !seen.isEmpty) success(seen.mkString) else i + case _ => + if (t.result.isEmpty) + new MatchedString(t, seen, partial) + else + success(seen.mkString) + } - /** Establishes delegate parser `t` as a single token of tab completion. - * When tab completion of part of this token is requested, the completions provided by the delegate `t` or a later derivative are appended to - * the prefix String already seen by this parser. */ - def token[T](t: Parser[T]): Parser[T] = token(t, TokenCompletions.default) + /** + * Establishes delegate parser `t` as a single token of tab completion. + * When tab completion of part of this token is requested, the completions provided by the delegate `t` or a later derivative are appended to + * the prefix String already seen by this parser. + */ + def token[T](t: Parser[T]): Parser[T] = token(t, TokenCompletions.default) - /** Establishes delegate parser `t` as a single token of tab completion. - * When tab completion of part of this token is requested, no completions are returned if `hide` returns true for the current tab completion level. - * Otherwise, the completions provided by the delegate `t` or a later derivative are appended to the prefix String already seen by this parser.*/ - def token[T](t: Parser[T], hide: Int => Boolean): Parser[T] = token(t, TokenCompletions.default.hideWhen(hide)) + /** + * Establishes delegate parser `t` as a single token of tab completion. + * When tab completion of part of this token is requested, no completions are returned if `hide` returns true for the current tab completion level. + * Otherwise, the completions provided by the delegate `t` or a later derivative are appended to the prefix String already seen by this parser. + */ + def token[T](t: Parser[T], hide: Int => Boolean): Parser[T] = token(t, TokenCompletions.default.hideWhen(hide)) - /** Establishes delegate parser `t` as a single token of tab completion. - * When tab completion of part of this token is requested, `description` is displayed for suggestions and no completions are ever performed. */ - def token[T](t: Parser[T], description: String): Parser[T] = token(t, TokenCompletions.displayOnly(description)) + /** + * Establishes delegate parser `t` as a single token of tab completion. + * When tab completion of part of this token is requested, `description` is displayed for suggestions and no completions are ever performed. + */ + def token[T](t: Parser[T], description: String): Parser[T] = token(t, TokenCompletions.displayOnly(description)) - /** Establishes delegate parser `t` as a single token of tab completion. - * When tab completion of part of this token is requested, `display` is used as the printed suggestion, but the completions from the delegate - * parser `t` are used to complete if unambiguous. */ - def tokenDisplay[T](t: Parser[T], display: String): Parser[T] = - token(t, TokenCompletions.overrideDisplay(display)) + /** + * Establishes delegate parser `t` as a single token of tab completion. + * When tab completion of part of this token is requested, `display` is used as the printed suggestion, but the completions from the delegate + * parser `t` are used to complete if unambiguous. + */ + def tokenDisplay[T](t: Parser[T], display: String): Parser[T] = + token(t, TokenCompletions.overrideDisplay(display)) - def token[T](t: Parser[T], complete: TokenCompletions): Parser[T] = - mkToken(t, "", complete) + def token[T](t: Parser[T], complete: TokenCompletions): Parser[T] = + mkToken(t, "", complete) - @deprecated("Use a different `token` overload.", "0.12.1") - def token[T](t: Parser[T], seen: String, track: Boolean, hide: Int => Boolean): Parser[T] = - { - val base = if(track) TokenCompletions.default else TokenCompletions.displayOnly(seen) - token(t, base.hideWhen(hide)) - } + @deprecated("Use a different `token` overload.", "0.12.1") + def token[T](t: Parser[T], seen: String, track: Boolean, hide: Int => Boolean): Parser[T] = + { + val base = if (track) TokenCompletions.default else TokenCompletions.displayOnly(seen) + token(t, base.hideWhen(hide)) + } - private[sbt] def mkToken[T](t: Parser[T], seen: String, complete: TokenCompletions): Parser[T] = - if(t.valid && !t.isTokenStart) - if(t.result.isEmpty) new TokenStart(t, seen, complete) else t - else - t + private[sbt] def mkToken[T](t: Parser[T], seen: String, complete: TokenCompletions): Parser[T] = + if (t.valid && !t.isTokenStart) + if (t.result.isEmpty) new TokenStart(t, seen, complete) else t + else + t - def homParser[A](a: Parser[A], b: Parser[A]): Parser[A] = (a,b) match { - case (Invalid(af), Invalid(bf)) => Invalid(af ++ bf) - case (Invalid(_), bv) => bv - case (av, Invalid(_)) => av - case (av, bv) => new HomParser(a, b) - } + def homParser[A](a: Parser[A], b: Parser[A]): Parser[A] = (a, b) match { + case (Invalid(af), Invalid(bf)) => Invalid(af ++ bf) + case (Invalid(_), bv) => bv + case (av, Invalid(_)) => av + case (av, bv) => new HomParser(a, b) + } - @deprecated("Explicitly specify the failure message.", "0.12.2") - def not(p: Parser[_]): Parser[Unit] = not(p, "Excluded.") + @deprecated("Explicitly specify the failure message.", "0.12.2") + def not(p: Parser[_]): Parser[Unit] = not(p, "Excluded.") - def not(p: Parser[_], failMessage: String): Parser[Unit] = p.result match { - case None => new Not(p, failMessage) - case Some(_) => failure(failMessage) - } + def not(p: Parser[_], failMessage: String): Parser[Unit] = p.result match { + case None => new Not(p, failMessage) + case Some(_) => failure(failMessage) + } - def oneOf[T](p: Seq[Parser[T]]): Parser[T] = p.reduceLeft(_ | _) - def seq[T](p: Seq[Parser[T]]): Parser[Seq[T]] = seq0(p, Nil) - def seq0[T](p: Seq[Parser[T]], errors: => Seq[String]): Parser[Seq[T]] = - { - val (newErrors, valid) = separate(p) { case Invalid(f) => Left(f.errors); case ok => Right(ok) } - def combinedErrors = errors ++ newErrors.flatten - if(valid.isEmpty) invalid(combinedErrors) else new ParserSeq(valid, combinedErrors) - } + def oneOf[T](p: Seq[Parser[T]]): Parser[T] = p.reduceLeft(_ | _) + def seq[T](p: Seq[Parser[T]]): Parser[Seq[T]] = seq0(p, Nil) + def seq0[T](p: Seq[Parser[T]], errors: => Seq[String]): Parser[Seq[T]] = + { + val (newErrors, valid) = separate(p) { case Invalid(f) => Left(f.errors); case ok => Right(ok) } + def combinedErrors = errors ++ newErrors.flatten + if (valid.isEmpty) invalid(combinedErrors) else new ParserSeq(valid, combinedErrors) + } - def stringLiteral(s: String, start: Int): Parser[String] = - { - val len = s.length - if(len == 0) sys.error("String literal cannot be empty") else if(start >= len) success(s) else new StringLiteral(s, start) - } + def stringLiteral(s: String, start: Int): Parser[String] = + { + val len = s.length + if (len == 0) sys.error("String literal cannot be empty") else if (start >= len) success(s) else new StringLiteral(s, start) + } } -sealed trait ValidParser[T] extends Parser[T] -{ - final def valid = true - final def failure = None - final def ifValid[S](p: => Parser[S]): Parser[S] = p +sealed trait ValidParser[T] extends Parser[T] { + final def valid = true + final def failure = None + final def ifValid[S](p: => Parser[S]): Parser[S] = p } -private final case class Invalid(fail: Failure) extends Parser[Nothing] -{ - def failure = Some(fail) - def result = None - def resultEmpty = fail - def derive(c: Char) = sys.error("Invalid.") - def completions(level: Int) = Completions.nil - override def toString = fail.errors.mkString("; ") - def valid = false - def ifValid[S](p: => Parser[S]): Parser[S] = this +private final case class Invalid(fail: Failure) extends Parser[Nothing] { + def failure = Some(fail) + def result = None + def resultEmpty = fail + def derive(c: Char) = sys.error("Invalid.") + def completions(level: Int) = Completions.nil + override def toString = fail.errors.mkString("; ") + def valid = false + def ifValid[S](p: => Parser[S]): Parser[S] = this } -private final case class SoftInvalid(fail: Failure) extends ValidParser[Nothing] -{ - def result = None - def resultEmpty = fail - def derive(c: Char) = Invalid(fail) - def completions(level: Int) = Completions.nil - override def toString = fail.errors.mkString("; ") +private final case class SoftInvalid(fail: Failure) extends ValidParser[Nothing] { + def result = None + def resultEmpty = fail + def derive(c: Char) = Invalid(fail) + def completions(level: Int) = Completions.nil + override def toString = fail.errors.mkString("; ") } -private final class TrapAndFail[A](a: Parser[A]) extends ValidParser[A] -{ - def result = try { a.result } catch { case e: Exception => None } - def resultEmpty = try { a.resultEmpty } catch { case e: Exception => fail(e) } - def derive(c: Char) = try { trapAndFail(a derive c) } catch { case e: Exception => Invalid(fail(e)) } - def completions(level: Int) = try { a.completions(level) } catch { case e: Exception => Completions.nil } - override def toString = "trap(" + a + ")" - override def isTokenStart = a.isTokenStart - private[this] def fail(e: Exception): Failure = mkFailure(e.toString) +private final class TrapAndFail[A](a: Parser[A]) extends ValidParser[A] { + def result = try { a.result } catch { case e: Exception => None } + def resultEmpty = try { a.resultEmpty } catch { case e: Exception => fail(e) } + def derive(c: Char) = try { trapAndFail(a derive c) } catch { case e: Exception => Invalid(fail(e)) } + def completions(level: Int) = try { a.completions(level) } catch { case e: Exception => Completions.nil } + override def toString = "trap(" + a + ")" + override def isTokenStart = a.isTokenStart + private[this] def fail(e: Exception): Failure = mkFailure(e.toString) } -private final class OnFailure[A](a: Parser[A], message: String) extends ValidParser[A] -{ - def result = a.result - def resultEmpty = a.resultEmpty match { case f: Failure => mkFailure(message); case v: Value[A] => v } - def derive(c: Char) = onFailure(a derive c, message) - def completions(level: Int) = a.completions(level) - override def toString = "(" + a + " !!! \"" + message + "\" )" - override def isTokenStart = a.isTokenStart +private final class OnFailure[A](a: Parser[A], message: String) extends ValidParser[A] { + def result = a.result + def resultEmpty = a.resultEmpty match { case f: Failure => mkFailure(message); case v: Value[A] => v } + def derive(c: Char) = onFailure(a derive c, message) + def completions(level: Int) = a.completions(level) + override def toString = "(" + a + " !!! \"" + message + "\" )" + override def isTokenStart = a.isTokenStart } -private final class SeqParser[A,B](a: Parser[A], b: Parser[B]) extends ValidParser[(A,B)] -{ - lazy val result = tuple(a.result,b.result) - lazy val resultEmpty = a.resultEmpty seq b.resultEmpty - def derive(c: Char) = - { - val common = a.derive(c) ~ b - a.resultEmpty match - { - case Value(av) => common | b.derive(c).map(br => (av,br)) - case _: Failure => common - } - } - def completions(level: Int) = a.completions(level) x b.completions(level) - override def toString = "(" + a + " ~ " + b + ")" +private final class SeqParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[(A, B)] { + lazy val result = tuple(a.result, b.result) + lazy val resultEmpty = a.resultEmpty seq b.resultEmpty + def derive(c: Char) = + { + val common = a.derive(c) ~ b + a.resultEmpty match { + case Value(av) => common | b.derive(c).map(br => (av, br)) + case _: Failure => common + } + } + def completions(level: Int) = a.completions(level) x b.completions(level) + override def toString = "(" + a + " ~ " + b + ")" } -private final class HomParser[A](a: Parser[A], b: Parser[A]) extends ValidParser[A] -{ - lazy val result = tuple(a.result, b.result) map (_._1) - def derive(c: Char) = (a derive c) | (b derive c) - lazy val resultEmpty = a.resultEmpty or b.resultEmpty - def completions(level: Int) = a.completions(level) ++ b.completions(level) - override def toString = "(" + a + " | " + b + ")" +private final class HomParser[A](a: Parser[A], b: Parser[A]) extends ValidParser[A] { + lazy val result = tuple(a.result, b.result) map (_._1) + def derive(c: Char) = (a derive c) | (b derive c) + lazy val resultEmpty = a.resultEmpty or b.resultEmpty + def completions(level: Int) = a.completions(level) ++ b.completions(level) + override def toString = "(" + a + " | " + b + ")" } -private final class HetParser[A,B](a: Parser[A], b: Parser[B]) extends ValidParser[Either[A,B]] -{ - lazy val result = tuple(a.result, b.result) map { case (a,b) => Left(a) } - def derive(c: Char) = (a derive c) || (b derive c) - lazy val resultEmpty = a.resultEmpty either b.resultEmpty - def completions(level: Int) = a.completions(level) ++ b.completions(level) - override def toString = "(" + a + " || " + b + ")" +private final class HetParser[A, B](a: Parser[A], b: Parser[B]) extends ValidParser[Either[A, B]] { + lazy val result = tuple(a.result, b.result) map { case (a, b) => Left(a) } + def derive(c: Char) = (a derive c) || (b derive c) + lazy val resultEmpty = a.resultEmpty either b.resultEmpty + def completions(level: Int) = a.completions(level) ++ b.completions(level) + override def toString = "(" + a + " || " + b + ")" } -private final class ParserSeq[T](a: Seq[Parser[T]], errors: => Seq[String]) extends ValidParser[Seq[T]] -{ - assert(!a.isEmpty) - lazy val resultEmpty: Result[Seq[T]] = - { - val res = a.map(_.resultEmpty) - val (failures, values) = separate(res)(_.toEither) -// if(failures.isEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors) - if(values.nonEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors) - } - def result = { - val success = a.flatMap(_.result) - if(success.length == a.length) Some(success) else None - } - def completions(level: Int) = a.map(_.completions(level)).reduceLeft(_ ++ _) - def derive(c: Char) = seq0(a.map(_ derive c), errors) - override def toString = "seq(" + a + ")" +private final class ParserSeq[T](a: Seq[Parser[T]], errors: => Seq[String]) extends ValidParser[Seq[T]] { + assert(!a.isEmpty) + lazy val resultEmpty: Result[Seq[T]] = + { + val res = a.map(_.resultEmpty) + val (failures, values) = separate(res)(_.toEither) + // if(failures.isEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors) + if (values.nonEmpty) Value(values) else mkFailures(failures.flatMap(_()) ++ errors) + } + def result = { + val success = a.flatMap(_.result) + if (success.length == a.length) Some(success) else None + } + def completions(level: Int) = a.map(_.completions(level)).reduceLeft(_ ++ _) + def derive(c: Char) = seq0(a.map(_ derive c), errors) + override def toString = "seq(" + a + ")" } -private final class BindParser[A,B](a: Parser[A], f: A => Parser[B]) extends ValidParser[B] -{ - lazy val result = a.result flatMap { av => f(av).result } - lazy val resultEmpty = a.resultEmpty flatMap { av => f(av).resultEmpty } - def completions(level: Int) = - a.completions(level) flatMap { c => - apply(a)(c.append).resultEmpty match { - case _: Failure => Completions.strict(Set.empty + c) - case Value(av) => c x f(av).completions(level) - } - } +private final class BindParser[A, B](a: Parser[A], f: A => Parser[B]) extends ValidParser[B] { + lazy val result = a.result flatMap { av => f(av).result } + lazy val resultEmpty = a.resultEmpty flatMap { av => f(av).resultEmpty } + def completions(level: Int) = + a.completions(level) flatMap { c => + apply(a)(c.append).resultEmpty match { + case _: Failure => Completions.strict(Set.empty + c) + case Value(av) => c x f(av).completions(level) + } + } - def derive(c: Char) = - { - val common = a derive c flatMap f - a.resultEmpty match - { - case Value(av) => common | derive1(f(av), c) - case _: Failure => common - } - } - override def isTokenStart = a.isTokenStart - override def toString = "bind(" + a + ")" + def derive(c: Char) = + { + val common = a derive c flatMap f + a.resultEmpty match { + case Value(av) => common | derive1(f(av), c) + case _: Failure => common + } + } + override def isTokenStart = a.isTokenStart + override def toString = "bind(" + a + ")" } -private final class MapParser[A,B](a: Parser[A], f: A => B) extends ValidParser[B] -{ - lazy val result = a.result map f - lazy val resultEmpty = a.resultEmpty map f - def derive(c: Char) = (a derive c) map f - def completions(level: Int) = a.completions(level) - override def isTokenStart = a.isTokenStart - override def toString = "map(" + a + ")" +private final class MapParser[A, B](a: Parser[A], f: A => B) extends ValidParser[B] { + lazy val result = a.result map f + lazy val resultEmpty = a.resultEmpty map f + def derive(c: Char) = (a derive c) map f + def completions(level: Int) = a.completions(level) + override def isTokenStart = a.isTokenStart + override def toString = "map(" + a + ")" } -private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg: String => String) extends ValidParser[T] -{ - def filterResult(r: Result[T]) = r.filter(f, msg(seen)) - lazy val result = p.result filter f - lazy val resultEmpty = filterResult(p.resultEmpty) - def derive(c: Char) = filterParser(p derive c, f, seen + c, msg) - def completions(level: Int) = p.completions(level) filterS { s => filterResult(apply(p)(s).resultEmpty).isValid } - override def toString = "filter(" + p + ")" - override def isTokenStart = p.isTokenStart +private final class Filter[T](p: Parser[T], f: T => Boolean, seen: String, msg: String => String) extends ValidParser[T] { + def filterResult(r: Result[T]) = r.filter(f, msg(seen)) + lazy val result = p.result filter f + lazy val resultEmpty = filterResult(p.resultEmpty) + def derive(c: Char) = filterParser(p derive c, f, seen + c, msg) + def completions(level: Int) = p.completions(level) filterS { s => filterResult(apply(p)(s).resultEmpty).isValid } + override def toString = "filter(" + p + ")" + override def isTokenStart = p.isTokenStart } -private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], partial: Boolean) extends ValidParser[String] -{ - lazy val seen = seenV.mkString - def derive(c: Char) = matched(delegate derive c, seenV :+ c, partial) - def completions(level: Int) = delegate.completions(level) - def result = if(delegate.result.isDefined) Some(seen) else None - def resultEmpty = delegate.resultEmpty match { case f: Failure if !partial => f; case _ => Value(seen) } - override def isTokenStart = delegate.isTokenStart - override def toString = "matched(" + partial + ", " + seen + ", " + delegate + ")" +private final class MatchedString(delegate: Parser[_], seenV: Vector[Char], partial: Boolean) extends ValidParser[String] { + lazy val seen = seenV.mkString + def derive(c: Char) = matched(delegate derive c, seenV :+ c, partial) + def completions(level: Int) = delegate.completions(level) + def result = if (delegate.result.isDefined) Some(seen) else None + def resultEmpty = delegate.resultEmpty match { case f: Failure if !partial => f; case _ => Value(seen) } + override def isTokenStart = delegate.isTokenStart + override def toString = "matched(" + partial + ", " + seen + ", " + delegate + ")" } -private final class TokenStart[T](delegate: Parser[T], seen: String, complete: TokenCompletions) extends ValidParser[T] -{ - def derive(c: Char) = mkToken( delegate derive c, seen + c, complete) - def completions(level: Int) = complete match { - case dc: TokenCompletions.Delegating => dc.completions(seen, level, delegate.completions(level)) - case fc: TokenCompletions.Fixed => fc.completions(seen, level) - } - def result = delegate.result - def resultEmpty = delegate.resultEmpty - override def isTokenStart = true - override def toString = "token('" + complete + ", " + delegate + ")" +private final class TokenStart[T](delegate: Parser[T], seen: String, complete: TokenCompletions) extends ValidParser[T] { + def derive(c: Char) = mkToken(delegate derive c, seen + c, complete) + def completions(level: Int) = complete match { + case dc: TokenCompletions.Delegating => dc.completions(seen, level, delegate.completions(level)) + case fc: TokenCompletions.Fixed => fc.completions(seen, level) + } + def result = delegate.result + def resultEmpty = delegate.resultEmpty + override def isTokenStart = true + override def toString = "token('" + complete + ", " + delegate + ")" } -private final class And[T](a: Parser[T], b: Parser[_]) extends ValidParser[T] -{ - lazy val result = tuple(a.result,b.result) map { _._1 } - def derive(c: Char) = (a derive c) & (b derive c) - def completions(level: Int) = a.completions(level).filterS(s => apply(b)(s).resultEmpty.isValid ) - lazy val resultEmpty = a.resultEmpty && b.resultEmpty - override def toString = "(%s) && (%s)".format(a,b) +private final class And[T](a: Parser[T], b: Parser[_]) extends ValidParser[T] { + lazy val result = tuple(a.result, b.result) map { _._1 } + def derive(c: Char) = (a derive c) & (b derive c) + def completions(level: Int) = a.completions(level).filterS(s => apply(b)(s).resultEmpty.isValid) + lazy val resultEmpty = a.resultEmpty && b.resultEmpty + override def toString = "(%s) && (%s)".format(a, b) } -private final class Not(delegate: Parser[_], failMessage: String) extends ValidParser[Unit] -{ - def derive(c: Char) = if(delegate.valid) not(delegate derive c, failMessage) else this - def completions(level: Int) = Completions.empty - def result = None - lazy val resultEmpty = delegate.resultEmpty match { - case f: Failure => Value(()) - case v: Value[_] => mkFailure(failMessage) - } - override def toString = " -(%s)".format(delegate) +private final class Not(delegate: Parser[_], failMessage: String) extends ValidParser[Unit] { + def derive(c: Char) = if (delegate.valid) not(delegate derive c, failMessage) else this + def completions(level: Int) = Completions.empty + def result = None + lazy val resultEmpty = delegate.resultEmpty match { + case f: Failure => Value(()) + case v: Value[_] => mkFailure(failMessage) + } + override def toString = " -(%s)".format(delegate) } /** @@ -739,115 +744,105 @@ private final class Not(delegate: Parser[_], failMessage: String) extends ValidP * @param removeInvalidExamples indicates whether to remove examples that are deemed invalid by the delegate parser. * @tparam T the type of value produced by the parser. */ -private final class ParserWithExamples[T](delegate: Parser[T], exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean) extends ValidParser[T] -{ - def derive(c: Char) = - examples(delegate derive c, exampleSource.withAddedPrefix(c.toString), maxNumberOfExamples, removeInvalidExamples) +private final class ParserWithExamples[T](delegate: Parser[T], exampleSource: ExampleSource, maxNumberOfExamples: Int, removeInvalidExamples: Boolean) extends ValidParser[T] { + def derive(c: Char) = + examples(delegate derive c, exampleSource.withAddedPrefix(c.toString), maxNumberOfExamples, removeInvalidExamples) - def result = delegate.result + def result = delegate.result - lazy val resultEmpty = delegate.resultEmpty + lazy val resultEmpty = delegate.resultEmpty - def completions(level: Int) = { - if(exampleSource().isEmpty) - if(resultEmpty.isValid) Completions.nil else Completions.empty - else { - val examplesBasedOnTheResult = filteredExamples.take(maxNumberOfExamples).toSet - Completions(examplesBasedOnTheResult.map(ex => Completion.suggestion(ex))) - } - } + def completions(level: Int) = { + if (exampleSource().isEmpty) + if (resultEmpty.isValid) Completions.nil else Completions.empty + else { + val examplesBasedOnTheResult = filteredExamples.take(maxNumberOfExamples).toSet + Completions(examplesBasedOnTheResult.map(ex => Completion.suggestion(ex))) + } + } - override def toString = "examples(" + delegate + ", " + exampleSource().take(2).toList + ")" + override def toString = "examples(" + delegate + ", " + exampleSource().take(2).toList + ")" - private def filteredExamples: Iterable[String] = { - if (removeInvalidExamples) - exampleSource().filter(isExampleValid) - else - exampleSource() - } + private def filteredExamples: Iterable[String] = { + if (removeInvalidExamples) + exampleSource().filter(isExampleValid) + else + exampleSource() + } - private def isExampleValid(example: String): Boolean = { - apply(delegate)(example).resultEmpty.isValid - } + private def isExampleValid(example: String): Boolean = { + apply(delegate)(example).resultEmpty.isValid + } } -private final class StringLiteral(str: String, start: Int) extends ValidParser[String] -{ - assert(0 <= start && start < str.length) - def failMsg = "Expected '" + str + "'" - def resultEmpty = mkFailure(failMsg) - def result = None - def derive(c: Char) = if(str.charAt(start) == c) stringLiteral(str, start+1) else new Invalid(resultEmpty) - def completions(level: Int) = Completions.single(Completion.suggestion(str.substring(start))) - override def toString = '"' + str + '"' +private final class StringLiteral(str: String, start: Int) extends ValidParser[String] { + assert(0 <= start && start < str.length) + def failMsg = "Expected '" + str + "'" + def resultEmpty = mkFailure(failMsg) + def result = None + def derive(c: Char) = if (str.charAt(start) == c) stringLiteral(str, start + 1) else new Invalid(resultEmpty) + def completions(level: Int) = Completions.single(Completion.suggestion(str.substring(start))) + override def toString = '"' + str + '"' } -private final class CharacterClass(f: Char => Boolean, label: String) extends ValidParser[Char] -{ - def result = None - def resultEmpty = mkFailure("Expected " + label) - def derive(c: Char) = if( f(c) ) success(c) else Invalid(resultEmpty) - def completions(level: Int) = Completions.empty - override def toString = "class(" + label + ")" +private final class CharacterClass(f: Char => Boolean, label: String) extends ValidParser[Char] { + def result = None + def resultEmpty = mkFailure("Expected " + label) + def derive(c: Char) = if (f(c)) success(c) else Invalid(resultEmpty) + def completions(level: Int) = Completions.empty + override def toString = "class(" + label + ")" } -private final class Optional[T](delegate: Parser[T]) extends ValidParser[Option[T]] -{ - def result = delegate.result map some.fn - def resultEmpty = Value(None) - def derive(c: Char) = (delegate derive c).map(some.fn) - def completions(level: Int) = Completion.empty +: delegate.completions(level) - override def toString = delegate.toString + "?" +private final class Optional[T](delegate: Parser[T]) extends ValidParser[Option[T]] { + def result = delegate.result map some.fn + def resultEmpty = Value(None) + def derive(c: Char) = (delegate derive c).map(some.fn) + def completions(level: Int) = Completion.empty +: delegate.completions(level) + override def toString = delegate.toString + "?" } -private final class Repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, accumulatedReverse: List[T]) extends ValidParser[Seq[T]] -{ - assume(0 <= min, "Minimum occurences must be non-negative") - assume(max >= min, "Minimum occurences must be less than the maximum occurences") +private final class Repeat[T](partial: Option[Parser[T]], repeated: Parser[T], min: Int, max: UpperBound, accumulatedReverse: List[T]) extends ValidParser[Seq[T]] { + assume(0 <= min, "Minimum occurences must be non-negative") + assume(max >= min, "Minimum occurences must be less than the maximum occurences") - def derive(c: Char) = - partial match - { - case Some(part) => - val partD = repeat(Some(part derive c), repeated, min, max, accumulatedReverse) - part.resultEmpty match - { - case Value(pv) => partD | repeatDerive(c, pv :: accumulatedReverse) - case _: Failure => partD - } - case None => repeatDerive(c, accumulatedReverse) - } + def derive(c: Char) = + partial match { + case Some(part) => + val partD = repeat(Some(part derive c), repeated, min, max, accumulatedReverse) + part.resultEmpty match { + case Value(pv) => partD | repeatDerive(c, pv :: accumulatedReverse) + case _: Failure => partD + } + case None => repeatDerive(c, accumulatedReverse) + } - def repeatDerive(c: Char, accRev: List[T]): Parser[Seq[T]] = repeat(Some(repeated derive c), repeated, (min - 1) max 0, max.decrement, accRev) + def repeatDerive(c: Char, accRev: List[T]): Parser[Seq[T]] = repeat(Some(repeated derive c), repeated, (min - 1) max 0, max.decrement, accRev) - def completions(level: Int) = - { - def pow(comp: Completions, exp: Completions, n: Int): Completions = - if(n == 1) comp else pow(comp x exp, exp, n - 1) + def completions(level: Int) = + { + def pow(comp: Completions, exp: Completions, n: Int): Completions = + if (n == 1) comp else pow(comp x exp, exp, n - 1) - val repC = repeated.completions(level) - val fin = if(min == 0) Completion.empty +: repC else pow(repC, repC, min) - partial match - { - case Some(p) => p.completions(level) x fin - case None => fin - } - } - def result = None - lazy val resultEmpty: Result[Seq[T]] = - { - val partialAccumulatedOption = - partial match - { - case None => Value(accumulatedReverse) - case Some(partialPattern) => partialPattern.resultEmpty.map(_ :: accumulatedReverse) - } - (partialAccumulatedOption app repeatedParseEmpty)(_ reverse_::: _) - } - private def repeatedParseEmpty: Result[List[T]] = - { - if(min == 0) - Value(Nil) - else - // forced determinism - for(value <- repeated.resultEmpty) yield - makeList(min, value) - } - override def toString = "repeat(" + min + "," + max +"," + partial + "," + repeated + ")" + val repC = repeated.completions(level) + val fin = if (min == 0) Completion.empty +: repC else pow(repC, repC, min) + partial match { + case Some(p) => p.completions(level) x fin + case None => fin + } + } + def result = None + lazy val resultEmpty: Result[Seq[T]] = + { + val partialAccumulatedOption = + partial match { + case None => Value(accumulatedReverse) + case Some(partialPattern) => partialPattern.resultEmpty.map(_ :: accumulatedReverse) + } + (partialAccumulatedOption app repeatedParseEmpty)(_ reverse_::: _) + } + private def repeatedParseEmpty: Result[List[T]] = + { + if (min == 0) + Value(Nil) + else + // forced determinism + for (value <- repeated.resultEmpty) yield makeList(min, value) + } + override def toString = "repeat(" + min + "," + max + "," + partial + "," + repeated + ")" } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/Parsers.scala b/util/complete/src/main/scala/sbt/complete/Parsers.scala index cb1b15d1a..3183929e8 100644 --- a/util/complete/src/main/scala/sbt/complete/Parsers.scala +++ b/util/complete/src/main/scala/sbt/complete/Parsers.scala @@ -3,244 +3,266 @@ */ package sbt.complete - import Parser._ - import java.io.File - import java.net.URI - import java.lang.Character.{getType, MATH_SYMBOL, OTHER_SYMBOL, DASH_PUNCTUATION, OTHER_PUNCTUATION, MODIFIER_SYMBOL, CURRENCY_SYMBOL} +import Parser._ +import java.io.File +import java.net.URI +import java.lang.Character.{ getType, MATH_SYMBOL, OTHER_SYMBOL, DASH_PUNCTUATION, OTHER_PUNCTUATION, MODIFIER_SYMBOL, CURRENCY_SYMBOL } /** Provides standard implementations of commonly useful [[Parser]]s. */ -trait Parsers -{ - /** Matches the end of input, providing no useful result on success. */ - lazy val EOF = not(any) +trait Parsers { + /** Matches the end of input, providing no useful result on success. */ + lazy val EOF = not(any) - /** Parses any single character and provides that character as the result. */ - lazy val any: Parser[Char] = charClass(_ => true, "any character") + /** Parses any single character and provides that character as the result. */ + lazy val any: Parser[Char] = charClass(_ => true, "any character") - /** Set that contains each digit in a String representation.*/ - lazy val DigitSet = Set("0","1","2","3","4","5","6","7","8","9") + /** Set that contains each digit in a String representation.*/ + lazy val DigitSet = Set("0", "1", "2", "3", "4", "5", "6", "7", "8", "9") - /** Parses any single digit and provides that digit as a Char as the result.*/ - lazy val Digit = charClass(_.isDigit, "digit") examples DigitSet + /** Parses any single digit and provides that digit as a Char as the result.*/ + lazy val Digit = charClass(_.isDigit, "digit") examples DigitSet - /** Set containing Chars for hexadecimal digits 0-9 and A-F (but not a-f). */ - lazy val HexDigitSet = Set('0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F') + /** Set containing Chars for hexadecimal digits 0-9 and A-F (but not a-f). */ + lazy val HexDigitSet = Set('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F') - /** Parses a single hexadecimal digit (0-9, a-f, A-F). */ - lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(_.toString) + /** Parses a single hexadecimal digit (0-9, a-f, A-F). */ + lazy val HexDigit = charClass(c => HexDigitSet(c.toUpper), "hex digit") examples HexDigitSet.map(_.toString) - /** Parses a single letter, according to Char.isLetter, into a Char. */ - lazy val Letter = charClass(_.isLetter, "letter") + /** Parses a single letter, according to Char.isLetter, into a Char. */ + lazy val Letter = charClass(_.isLetter, "letter") - /** Parses the first Char in an sbt identifier, which must be a [[Letter]].*/ - def IDStart = Letter + /** Parses the first Char in an sbt identifier, which must be a [[Letter]].*/ + def IDStart = Letter - /** Parses an identifier Char other than the first character. This includes letters, digits, dash `-`, and underscore `_`.*/ - lazy val IDChar = charClass(isIDChar, "ID character") + /** Parses an identifier Char other than the first character. This includes letters, digits, dash `-`, and underscore `_`.*/ + lazy val IDChar = charClass(isIDChar, "ID character") - /** Parses an identifier String, which must start with [[IDStart]] and contain zero or more [[IDChar]]s after that. */ - lazy val ID = identifier(IDStart, IDChar) + /** Parses an identifier String, which must start with [[IDStart]] and contain zero or more [[IDChar]]s after that. */ + lazy val ID = identifier(IDStart, IDChar) - /** Parses a single operator Char, as allowed by [[isOpChar]]. */ - lazy val OpChar = charClass(isOpChar, "symbol") + /** Parses a single operator Char, as allowed by [[isOpChar]]. */ + lazy val OpChar = charClass(isOpChar, "symbol") - /** Parses a non-empty operator String, which consists only of characters allowed by [[OpChar]]. */ - lazy val Op = OpChar.+.string + /** Parses a non-empty operator String, which consists only of characters allowed by [[OpChar]]. */ + lazy val Op = OpChar.+.string - /** Parses either an operator String defined by [[Op]] or a non-symbolic identifier defined by [[ID]]. */ - lazy val OpOrID = ID | Op + /** Parses either an operator String defined by [[Op]] or a non-symbolic identifier defined by [[ID]]. */ + lazy val OpOrID = ID | Op - /** Parses a single, non-symbolic Scala identifier Char. Valid characters are letters, digits, and the underscore character `_`. */ - lazy val ScalaIDChar = charClass(isScalaIDChar, "Scala identifier character") + /** Parses a single, non-symbolic Scala identifier Char. Valid characters are letters, digits, and the underscore character `_`. */ + lazy val ScalaIDChar = charClass(isScalaIDChar, "Scala identifier character") - /** Parses a non-symbolic Scala-like identifier. The identifier must start with [[IDStart]] and contain zero or more [[ScalaIDChar]]s after that.*/ - lazy val ScalaID = identifier(IDStart, ScalaIDChar) + /** Parses a non-symbolic Scala-like identifier. The identifier must start with [[IDStart]] and contain zero or more [[ScalaIDChar]]s after that.*/ + lazy val ScalaID = identifier(IDStart, ScalaIDChar) - /** Parses a String that starts with `start` and is followed by zero or more characters parsed by `rep`.*/ - def identifier(start: Parser[Char], rep: Parser[Char]): Parser[String] = - start ~ rep.* map { case x ~ xs => (x +: xs).mkString } + /** Parses a String that starts with `start` and is followed by zero or more characters parsed by `rep`.*/ + def identifier(start: Parser[Char], rep: Parser[Char]): Parser[String] = + start ~ rep.* map { case x ~ xs => (x +: xs).mkString } - def opOrIDSpaced(s: String): Parser[Char] = - if(DefaultParsers.matches(ID, s)) - OpChar | SpaceClass - else if(DefaultParsers.matches(Op, s)) - IDChar | SpaceClass - else - any + def opOrIDSpaced(s: String): Parser[Char] = + if (DefaultParsers.matches(ID, s)) + OpChar | SpaceClass + else if (DefaultParsers.matches(Op, s)) + IDChar | SpaceClass + else + any - /** Returns true if `c` an operator character. */ - def isOpChar(c: Char) = !isDelimiter(c) && isOpType(getType(c)) - def isOpType(cat: Int) = cat match { case MATH_SYMBOL | OTHER_SYMBOL | DASH_PUNCTUATION | OTHER_PUNCTUATION | MODIFIER_SYMBOL | CURRENCY_SYMBOL => true; case _ => false } - /** Returns true if `c` is a dash `-`, a letter, digit, or an underscore `_`. */ - def isIDChar(c: Char) = isScalaIDChar(c) || c == '-' + /** Returns true if `c` an operator character. */ + def isOpChar(c: Char) = !isDelimiter(c) && isOpType(getType(c)) + def isOpType(cat: Int) = cat match { case MATH_SYMBOL | OTHER_SYMBOL | DASH_PUNCTUATION | OTHER_PUNCTUATION | MODIFIER_SYMBOL | CURRENCY_SYMBOL => true; case _ => false } + /** Returns true if `c` is a dash `-`, a letter, digit, or an underscore `_`. */ + def isIDChar(c: Char) = isScalaIDChar(c) || c == '-' - /** Returns true if `c` is a letter, digit, or an underscore `_`. */ - def isScalaIDChar(c: Char) = c.isLetterOrDigit || c == '_' + /** Returns true if `c` is a letter, digit, or an underscore `_`. */ + def isScalaIDChar(c: Char) = c.isLetterOrDigit || c == '_' - def isDelimiter(c: Char) = c match { case '`' | '\'' | '\"' | /*';' | */',' | '.' => true ; case _ => false } + def isDelimiter(c: Char) = c match { case '`' | '\'' | '\"' | /*';' | */ ',' | '.' => true; case _ => false } - /** Matches a single character that is not a whitespace character. */ - lazy val NotSpaceClass = charClass(!_.isWhitespace, "non-whitespace character") + /** Matches a single character that is not a whitespace character. */ + lazy val NotSpaceClass = charClass(!_.isWhitespace, "non-whitespace character") - /** Matches a single whitespace character, as determined by Char.isWhitespace.*/ - lazy val SpaceClass = charClass(_.isWhitespace, "whitespace character") + /** Matches a single whitespace character, as determined by Char.isWhitespace.*/ + lazy val SpaceClass = charClass(_.isWhitespace, "whitespace character") - /** Matches a non-empty String consisting of non-whitespace characters. */ - lazy val NotSpace = NotSpaceClass.+.string + /** Matches a non-empty String consisting of non-whitespace characters. */ + lazy val NotSpace = NotSpaceClass.+.string - /** Matches a possibly empty String consisting of non-whitespace characters. */ - lazy val OptNotSpace = NotSpaceClass.*.string + /** Matches a possibly empty String consisting of non-whitespace characters. */ + lazy val OptNotSpace = NotSpaceClass.*.string - /** Matches a non-empty String consisting of whitespace characters. - * The suggested tab completion is a single, constant space character.*/ - lazy val Space = SpaceClass.+.examples(" ") + /** + * Matches a non-empty String consisting of whitespace characters. + * The suggested tab completion is a single, constant space character. + */ + lazy val Space = SpaceClass.+.examples(" ") - /** Matches a possibly empty String consisting of whitespace characters. - * The suggested tab completion is a single, constant space character.*/ - lazy val OptSpace = SpaceClass.*.examples(" ") + /** + * Matches a possibly empty String consisting of whitespace characters. + * The suggested tab completion is a single, constant space character. + */ + lazy val OptSpace = SpaceClass.*.examples(" ") - /** Parses a non-empty String that contains only valid URI characters, as defined by [[URIChar]].*/ - lazy val URIClass = URIChar.+.string !!! "Invalid URI" + /** Parses a non-empty String that contains only valid URI characters, as defined by [[URIChar]].*/ + lazy val URIClass = URIChar.+.string !!! "Invalid URI" - /** Triple-quotes, as used for verbatim quoting.*/ - lazy val VerbatimDQuotes = "\"\"\"" + /** Triple-quotes, as used for verbatim quoting.*/ + lazy val VerbatimDQuotes = "\"\"\"" - /** Double quote character. */ - lazy val DQuoteChar = '\"' + /** Double quote character. */ + lazy val DQuoteChar = '\"' - /** Backslash character. */ - lazy val BackslashChar = '\\' + /** Backslash character. */ + lazy val BackslashChar = '\\' - /** Matches a single double quote. */ - lazy val DQuoteClass = charClass(_ == DQuoteChar, "double-quote character") + /** Matches a single double quote. */ + lazy val DQuoteClass = charClass(_ == DQuoteChar, "double-quote character") - /** Matches any character except a double quote or whitespace. */ - lazy val NotDQuoteSpaceClass = - charClass({ c: Char => (c != DQuoteChar) && !c.isWhitespace }, "non-double-quote-space character") + /** Matches any character except a double quote or whitespace. */ + lazy val NotDQuoteSpaceClass = + charClass({ c: Char => (c != DQuoteChar) && !c.isWhitespace }, "non-double-quote-space character") - /** Matches any character except a double quote or backslash. */ - lazy val NotDQuoteBackslashClass = - charClass({ c: Char => (c != DQuoteChar) && (c != BackslashChar) }, "non-double-quote-backslash character") + /** Matches any character except a double quote or backslash. */ + lazy val NotDQuoteBackslashClass = + charClass({ c: Char => (c != DQuoteChar) && (c != BackslashChar) }, "non-double-quote-backslash character") - /** Matches a single character that is valid somewhere in a URI. */ - lazy val URIChar = charClass(alphanum) | chars("_-!.~'()*,;:$&+=?/[]@%#") + /** Matches a single character that is valid somewhere in a URI. */ + lazy val URIChar = charClass(alphanum) | chars("_-!.~'()*,;:$&+=?/[]@%#") - /** Returns true if `c` is an ASCII letter or digit. */ - def alphanum(c: Char) = ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') + /** Returns true if `c` is an ASCII letter or digit. */ + def alphanum(c: Char) = ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') - /** - * @param base the directory used for completion proposals (when the user presses the TAB key). Only paths under this - * directory will be proposed. - * @return the file that was parsed from the input string. The returned path may or may not exist. - */ - def fileParser(base: File): Parser[File] = - OptSpace ~> StringBasic - .examples(new FileExamples(base)) - .map(new File(_)) + /** + * @param base the directory used for completion proposals (when the user presses the TAB key). Only paths under this + * directory will be proposed. + * @return the file that was parsed from the input string. The returned path may or may not exist. + */ + def fileParser(base: File): Parser[File] = + OptSpace ~> StringBasic + .examples(new FileExamples(base)) + .map(new File(_)) - /** Parses a port number. Currently, this accepts any integer and presents a tab completion suggestion of ``. */ - lazy val Port = token(IntBasic, "") + /** Parses a port number. Currently, this accepts any integer and presents a tab completion suggestion of ``. */ + lazy val Port = token(IntBasic, "") - /** Parses a signed integer. */ - lazy val IntBasic = mapOrFail( '-'.? ~ Digit.+ )( Function.tupled(toInt) ) + /** Parses a signed integer. */ + lazy val IntBasic = mapOrFail('-'.? ~ Digit.+)(Function.tupled(toInt)) - /** Parses an unsigned integer. */ - lazy val NatBasic = mapOrFail( Digit.+ )( _.mkString.toInt ) + /** Parses an unsigned integer. */ + lazy val NatBasic = mapOrFail(Digit.+)(_.mkString.toInt) - private[this] def toInt(neg: Option[Char], digits: Seq[Char]): Int = - (neg.toSeq ++ digits).mkString.toInt + private[this] def toInt(neg: Option[Char], digits: Seq[Char]): Int = + (neg.toSeq ++ digits).mkString.toInt - /** Parses the lower-case values `true` and `false` into their respesct Boolean values. */ - lazy val Bool = ("true" ^^^ true) | ("false" ^^^ false) + /** Parses the lower-case values `true` and `false` into their respesct Boolean values. */ + lazy val Bool = ("true" ^^^ true) | ("false" ^^^ false) - /** Parses a potentially quoted String value. The value may be verbatim quoted ([[StringVerbatim]]), - * quoted with interpreted escapes ([[StringEscapable]]), or unquoted ([[NotQuoted]]). */ - lazy val StringBasic = StringVerbatim | StringEscapable | NotQuoted + /** + * Parses a potentially quoted String value. The value may be verbatim quoted ([[StringVerbatim]]), + * quoted with interpreted escapes ([[StringEscapable]]), or unquoted ([[NotQuoted]]). + */ + lazy val StringBasic = StringVerbatim | StringEscapable | NotQuoted - /** Parses a verbatim quoted String value, discarding the quotes in the result. This kind of quoted text starts with triple quotes `"""` - * and ends at the next triple quotes and may contain any character in between. */ - lazy val StringVerbatim: Parser[String] = VerbatimDQuotes ~> - any.+.string.filter(!_.contains(VerbatimDQuotes), _ => "Invalid verbatim string") <~ - VerbatimDQuotes + /** + * Parses a verbatim quoted String value, discarding the quotes in the result. This kind of quoted text starts with triple quotes `"""` + * and ends at the next triple quotes and may contain any character in between. + */ + lazy val StringVerbatim: Parser[String] = VerbatimDQuotes ~> + any.+.string.filter(!_.contains(VerbatimDQuotes), _ => "Invalid verbatim string") <~ + VerbatimDQuotes - /** Parses a string value, interpreting escapes and discarding the surrounding quotes in the result. - * See [[EscapeSequence]] for supported escapes. */ - lazy val StringEscapable: Parser[String] = - (DQuoteChar ~> (NotDQuoteBackslashClass | EscapeSequence).+.string <~ DQuoteChar | - (DQuoteChar ~ DQuoteChar) ^^^ "") + /** + * Parses a string value, interpreting escapes and discarding the surrounding quotes in the result. + * See [[EscapeSequence]] for supported escapes. + */ + lazy val StringEscapable: Parser[String] = + (DQuoteChar ~> (NotDQuoteBackslashClass | EscapeSequence).+.string <~ DQuoteChar | + (DQuoteChar ~ DQuoteChar) ^^^ "") - /** Parses a single escape sequence into the represented Char. - * Escapes start with a backslash and are followed by `u` for a [[UnicodeEscape]] or by `b`, `t`, `n`, `f`, `r`, `"`, `'`, `\` for standard escapes. */ - lazy val EscapeSequence: Parser[Char] = - BackslashChar ~> ('b' ^^^ '\b' | 't' ^^^ '\t' | 'n' ^^^ '\n' | 'f' ^^^ '\f' | 'r' ^^^ '\r' | - '\"' ^^^ '\"' | '\'' ^^^ '\'' | '\\' ^^^ '\\' | UnicodeEscape) + /** + * Parses a single escape sequence into the represented Char. + * Escapes start with a backslash and are followed by `u` for a [[UnicodeEscape]] or by `b`, `t`, `n`, `f`, `r`, `"`, `'`, `\` for standard escapes. + */ + lazy val EscapeSequence: Parser[Char] = + BackslashChar ~> ('b' ^^^ '\b' | 't' ^^^ '\t' | 'n' ^^^ '\n' | 'f' ^^^ '\f' | 'r' ^^^ '\r' | + '\"' ^^^ '\"' | '\'' ^^^ '\'' | '\\' ^^^ '\\' | UnicodeEscape) - /** Parses a single unicode escape sequence into the represented Char. - * A unicode escape begins with a backslash, followed by a `u` and 4 hexadecimal digits representing the unicode value. */ - lazy val UnicodeEscape: Parser[Char] = - ("u" ~> repeat(HexDigit, 4, 4)) map { seq => Integer.parseInt(seq.mkString, 16).toChar } + /** + * Parses a single unicode escape sequence into the represented Char. + * A unicode escape begins with a backslash, followed by a `u` and 4 hexadecimal digits representing the unicode value. + */ + lazy val UnicodeEscape: Parser[Char] = + ("u" ~> repeat(HexDigit, 4, 4)) map { seq => Integer.parseInt(seq.mkString, 16).toChar } - /** Parses an unquoted, non-empty String value that cannot start with a double quote and cannot contain whitespace.*/ - lazy val NotQuoted = (NotDQuoteSpaceClass ~ OptNotSpace) map { case (c, s) => c.toString + s } + /** Parses an unquoted, non-empty String value that cannot start with a double quote and cannot contain whitespace.*/ + lazy val NotQuoted = (NotDQuoteSpaceClass ~ OptNotSpace) map { case (c, s) => c.toString + s } - /** Applies `rep` zero or more times, separated by `sep`. - * The result is the (possibly empty) sequence of results from the multiple `rep` applications. The `sep` results are discarded. */ - def repsep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] = - rep1sep(rep, sep) ?? Nil + /** + * Applies `rep` zero or more times, separated by `sep`. + * The result is the (possibly empty) sequence of results from the multiple `rep` applications. The `sep` results are discarded. + */ + def repsep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] = + rep1sep(rep, sep) ?? Nil - /** Applies `rep` one or more times, separated by `sep`. - * The result is the non-empty sequence of results from the multiple `rep` applications. The `sep` results are discarded. */ - def rep1sep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] = - (rep ~ (sep ~> rep).*).map { case (x ~ xs) => x +: xs } + /** + * Applies `rep` one or more times, separated by `sep`. + * The result is the non-empty sequence of results from the multiple `rep` applications. The `sep` results are discarded. + */ + def rep1sep[T](rep: Parser[T], sep: Parser[_]): Parser[Seq[T]] = + (rep ~ (sep ~> rep).*).map { case (x ~ xs) => x +: xs } - /** Wraps the result of `p` in `Some`.*/ - def some[T](p: Parser[T]): Parser[Option[T]] = p map { v => Some(v) } + /** Wraps the result of `p` in `Some`.*/ + def some[T](p: Parser[T]): Parser[Option[T]] = p map { v => Some(v) } - /** Applies `f` to the result of `p`, transforming any exception when evaluating - * `f` into a parse failure with the exception `toString` as the message.*/ - def mapOrFail[S,T](p: Parser[S])(f: S => T): Parser[T] = - p flatMap { s => try { success(f(s)) } catch { case e: Exception => failure(e.toString) } } + /** + * Applies `f` to the result of `p`, transforming any exception when evaluating + * `f` into a parse failure with the exception `toString` as the message. + */ + def mapOrFail[S, T](p: Parser[S])(f: S => T): Parser[T] = + p flatMap { s => try { success(f(s)) } catch { case e: Exception => failure(e.toString) } } - /** Parses a space-delimited, possibly empty sequence of arguments. - * The arguments may use quotes and escapes according to [[StringBasic]]. */ - def spaceDelimited(display: String): Parser[Seq[String]] = (token(Space) ~> token(StringBasic, display)).* <~ SpaceClass.* + /** + * Parses a space-delimited, possibly empty sequence of arguments. + * The arguments may use quotes and escapes according to [[StringBasic]]. + */ + def spaceDelimited(display: String): Parser[Seq[String]] = (token(Space) ~> token(StringBasic, display)).* <~ SpaceClass.* - /** Applies `p` and uses `true` as the result if it succeeds and turns failure into a result of `false`. */ - def flag[T](p: Parser[T]): Parser[Boolean] = (p ^^^ true) ?? false + /** Applies `p` and uses `true` as the result if it succeeds and turns failure into a result of `false`. */ + def flag[T](p: Parser[T]): Parser[Boolean] = (p ^^^ true) ?? false - /** Defines a sequence parser where the parser used for each part depends on the previously parsed values. - * `p` is applied to the (possibly empty) sequence of already parsed values to obtain the next parser to use. - * The parsers obtained in this way are separated by `sep`, whose result is discarded and only the sequence - * of values from the parsers returned by `p` is used for the result. */ - def repeatDep[A](p: Seq[A] => Parser[A], sep: Parser[Any]): Parser[Seq[A]] = - { - def loop(acc: Seq[A]): Parser[Seq[A]] = { - val next = (sep ~> p(acc)) flatMap { result => loop(acc :+ result) } - next ?? acc - } - p(Vector()) flatMap { first => loop(Seq(first)) } - } + /** + * Defines a sequence parser where the parser used for each part depends on the previously parsed values. + * `p` is applied to the (possibly empty) sequence of already parsed values to obtain the next parser to use. + * The parsers obtained in this way are separated by `sep`, whose result is discarded and only the sequence + * of values from the parsers returned by `p` is used for the result. + */ + def repeatDep[A](p: Seq[A] => Parser[A], sep: Parser[Any]): Parser[Seq[A]] = + { + def loop(acc: Seq[A]): Parser[Seq[A]] = { + val next = (sep ~> p(acc)) flatMap { result => loop(acc :+ result) } + next ?? acc + } + p(Vector()) flatMap { first => loop(Seq(first)) } + } - /** Applies String.trim to the result of `p`. */ - def trimmed(p: Parser[String]) = p map { _.trim } + /** Applies String.trim to the result of `p`. */ + def trimmed(p: Parser[String]) = p map { _.trim } - /** Parses a URI that is valid according to the single argument java.net.URI constructor. */ - lazy val basicUri = mapOrFail(URIClass)( uri => new URI(uri)) + /** Parses a URI that is valid according to the single argument java.net.URI constructor. */ + lazy val basicUri = mapOrFail(URIClass)(uri => new URI(uri)) - /** Parses a URI that is valid according to the single argument java.net.URI constructor, using `ex` as tab completion examples. */ - def Uri(ex: Set[URI]) = basicUri examples(ex.map(_.toString)) + /** Parses a URI that is valid according to the single argument java.net.URI constructor, using `ex` as tab completion examples. */ + def Uri(ex: Set[URI]) = basicUri examples (ex.map(_.toString)) } /** Provides standard [[Parser]] implementations. */ object Parsers extends Parsers /** Provides common [[Parser]] implementations and helper methods.*/ -object DefaultParsers extends Parsers with ParserMain -{ - /** Applies parser `p` to input `s` and returns `true` if the parse was successful. */ - def matches(p: Parser[_], s: String): Boolean = - apply(p)(s).resultEmpty.isValid +object DefaultParsers extends Parsers with ParserMain { + /** Applies parser `p` to input `s` and returns `true` if the parse was successful. */ + def matches(p: Parser[_], s: String): Boolean = + apply(p)(s).resultEmpty.isValid - /** Returns `true` if `s` parses successfully according to [[ID]].*/ - def validID(s: String): Boolean = matches(ID, s) + /** Returns `true` if `s` parses successfully according to [[ID]].*/ + def validID(s: String): Boolean = matches(ID, s) } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/ProcessError.scala b/util/complete/src/main/scala/sbt/complete/ProcessError.scala index 76ea2f71d..7e6c9794e 100644 --- a/util/complete/src/main/scala/sbt/complete/ProcessError.scala +++ b/util/complete/src/main/scala/sbt/complete/ProcessError.scala @@ -1,30 +1,29 @@ package sbt.complete -object ProcessError -{ - def apply(command: String, msgs: Seq[String], index: Int): String = - { - val (line, modIndex) = extractLine(command, index) - val point = pointerSpace(command, modIndex) - msgs.mkString("\n") + "\n" + line + "\n" + point + "^" - } - def extractLine(s: String, i: Int): (String, Int) = - { - val notNewline = (c: Char) => c != '\n' && c != '\r' - val left = takeRightWhile( s.substring(0, i) )( notNewline ) - val right = s substring i takeWhile notNewline - (left + right, left.length) - } - def takeRightWhile(s: String)(pred: Char => Boolean): String = - { - def loop(i: Int): String = - if(i < 0) - s - else if( pred(s(i)) ) - loop(i-1) - else - s.substring(i+1) - loop(s.length - 1) - } - def pointerSpace(s: String, i: Int): String = (s take i) map { case '\t' => '\t'; case _ => ' ' } mkString; +object ProcessError { + def apply(command: String, msgs: Seq[String], index: Int): String = + { + val (line, modIndex) = extractLine(command, index) + val point = pointerSpace(command, modIndex) + msgs.mkString("\n") + "\n" + line + "\n" + point + "^" + } + def extractLine(s: String, i: Int): (String, Int) = + { + val notNewline = (c: Char) => c != '\n' && c != '\r' + val left = takeRightWhile(s.substring(0, i))(notNewline) + val right = s substring i takeWhile notNewline + (left + right, left.length) + } + def takeRightWhile(s: String)(pred: Char => Boolean): String = + { + def loop(i: Int): String = + if (i < 0) + s + else if (pred(s(i))) + loop(i - 1) + else + s.substring(i + 1) + loop(s.length - 1) + } + def pointerSpace(s: String, i: Int): String = (s take i) map { case '\t' => '\t'; case _ => ' ' } mkString; } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/TokenCompletions.scala b/util/complete/src/main/scala/sbt/complete/TokenCompletions.scala index aee6353db..96e70d2f1 100644 --- a/util/complete/src/main/scala/sbt/complete/TokenCompletions.scala +++ b/util/complete/src/main/scala/sbt/complete/TokenCompletions.scala @@ -1,38 +1,37 @@ package sbt.complete - import Completion.{displayStrict, token => ctoken, tokenDisplay} +import Completion.{ displayStrict, token => ctoken, tokenDisplay } sealed trait TokenCompletions { - def hideWhen(f: Int => Boolean): TokenCompletions + def hideWhen(f: Int => Boolean): TokenCompletions } -object TokenCompletions -{ - private[sbt] abstract class Delegating extends TokenCompletions { outer => - def completions(seen: String, level: Int, delegate: Completions): Completions - final def hideWhen(hide: Int => Boolean): TokenCompletions = new Delegating { - def completions(seen: String, level: Int, delegate: Completions): Completions = - if(hide(level)) Completions.nil else outer.completions(seen, level, delegate) - } - } - private[sbt] abstract class Fixed extends TokenCompletions { outer => - def completions(seen: String, level: Int): Completions - final def hideWhen(hide: Int => Boolean): TokenCompletions = new Fixed { - def completions(seen: String, level: Int) = - if(hide(level)) Completions.nil else outer.completions(seen, level) - } - } +object TokenCompletions { + private[sbt] abstract class Delegating extends TokenCompletions { outer => + def completions(seen: String, level: Int, delegate: Completions): Completions + final def hideWhen(hide: Int => Boolean): TokenCompletions = new Delegating { + def completions(seen: String, level: Int, delegate: Completions): Completions = + if (hide(level)) Completions.nil else outer.completions(seen, level, delegate) + } + } + private[sbt] abstract class Fixed extends TokenCompletions { outer => + def completions(seen: String, level: Int): Completions + final def hideWhen(hide: Int => Boolean): TokenCompletions = new Fixed { + def completions(seen: String, level: Int) = + if (hide(level)) Completions.nil else outer.completions(seen, level) + } + } - val default: TokenCompletions = mapDelegateCompletions((seen,level,c) => ctoken(seen, c.append)) + val default: TokenCompletions = mapDelegateCompletions((seen, level, c) => ctoken(seen, c.append)) - def displayOnly(msg: String): TokenCompletions = new Fixed { - def completions(seen: String, level: Int) = Completions.single(displayStrict(msg)) - } - def overrideDisplay(msg: String): TokenCompletions = mapDelegateCompletions((seen,level,c) => tokenDisplay(display = msg, append = c.append)) + def displayOnly(msg: String): TokenCompletions = new Fixed { + def completions(seen: String, level: Int) = Completions.single(displayStrict(msg)) + } + def overrideDisplay(msg: String): TokenCompletions = mapDelegateCompletions((seen, level, c) => tokenDisplay(display = msg, append = c.append)) - def fixed(f: (String, Int) => Completions): TokenCompletions = new Fixed { - def completions(seen: String, level: Int) = f(seen, level) - } - def mapDelegateCompletions(f: (String, Int, Completion) => Completion): TokenCompletions = new Delegating { - def completions(seen: String, level: Int, delegate: Completions) = Completions( delegate.get.map(c => f(seen, level, c)) ) - } + def fixed(f: (String, Int) => Completions): TokenCompletions = new Fixed { + def completions(seen: String, level: Int) = f(seen, level) + } + def mapDelegateCompletions(f: (String, Int, Completion) => Completion): TokenCompletions = new Delegating { + def completions(seen: String, level: Int, delegate: Completions) = Completions(delegate.get.map(c => f(seen, level, c))) + } } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/TypeString.scala b/util/complete/src/main/scala/sbt/complete/TypeString.scala index 976b672e2..6bf89ac05 100644 --- a/util/complete/src/main/scala/sbt/complete/TypeString.scala +++ b/util/complete/src/main/scala/sbt/complete/TypeString.scala @@ -1,77 +1,79 @@ package sbt.complete - import DefaultParsers._ - import TypeString._ +import DefaultParsers._ +import TypeString._ -/** Basic representation of types parsed from Manifest.toString. -* This can only represent the structure of parameterized types. -* All other types are represented by a TypeString with an empty `args`. */ -private[sbt] final class TypeString(val base: String, val args: List[TypeString]) -{ - override def toString = - if(base.startsWith(FunctionName)) - args.dropRight(1).mkString("(", ",", ")") + " => " + args.last - else if(base.startsWith(TupleName)) - args.mkString("(",",",")") - else - cleanupTypeName(base) + (if(args.isEmpty) "" else args.mkString("[", ",", "]")) +/** + * Basic representation of types parsed from Manifest.toString. + * This can only represent the structure of parameterized types. + * All other types are represented by a TypeString with an empty `args`. + */ +private[sbt] final class TypeString(val base: String, val args: List[TypeString]) { + override def toString = + if (base.startsWith(FunctionName)) + args.dropRight(1).mkString("(", ",", ")") + " => " + args.last + else if (base.startsWith(TupleName)) + args.mkString("(", ",", ")") + else + cleanupTypeName(base) + (if (args.isEmpty) "" else args.mkString("[", ",", "]")) } -private[sbt] object TypeString -{ - /** Makes the string representation of a type as returned by Manifest.toString more readable.*/ - def cleanup(typeString: String): String = - parse(typeString, typeStringParser) match { - case Right(ts) => ts.toString - case Left(err) => typeString - } +private[sbt] object TypeString { + /** Makes the string representation of a type as returned by Manifest.toString more readable.*/ + def cleanup(typeString: String): String = + parse(typeString, typeStringParser) match { + case Right(ts) => ts.toString + case Left(err) => typeString + } - /** Makes a fully qualified type name provided by Manifest.toString more readable. - * The argument should be just a name (like scala.Tuple2) and not a full type (like scala.Tuple2[Int,Boolean])*/ - def cleanupTypeName(base: String): String = - dropPrefix(base).replace('$', '.') + /** + * Makes a fully qualified type name provided by Manifest.toString more readable. + * The argument should be just a name (like scala.Tuple2) and not a full type (like scala.Tuple2[Int,Boolean]) + */ + def cleanupTypeName(base: String): String = + dropPrefix(base).replace('$', '.') - /** Removes prefixes from a fully qualified type name that are unnecessary in the presence of standard imports for an sbt setting. - * This does not use the compiler and is therefore a conservative approximation.*/ - def dropPrefix(base: String): String = - if(base.startsWith(SbtPrefix)) - base.substring(SbtPrefix.length) - else if(base.startsWith(CollectionPrefix)) - { - val simple = base.substring(CollectionPrefix.length) - if(ShortenCollection(simple)) simple else base - } - else if(base.startsWith(ScalaPrefix)) - base.substring(ScalaPrefix.length) - else if(base.startsWith(JavaPrefix)) - base.substring(JavaPrefix.length) - else - TypeMap.getOrElse(base, base) + /** + * Removes prefixes from a fully qualified type name that are unnecessary in the presence of standard imports for an sbt setting. + * This does not use the compiler and is therefore a conservative approximation. + */ + def dropPrefix(base: String): String = + if (base.startsWith(SbtPrefix)) + base.substring(SbtPrefix.length) + else if (base.startsWith(CollectionPrefix)) { + val simple = base.substring(CollectionPrefix.length) + if (ShortenCollection(simple)) simple else base + } else if (base.startsWith(ScalaPrefix)) + base.substring(ScalaPrefix.length) + else if (base.startsWith(JavaPrefix)) + base.substring(JavaPrefix.length) + else + TypeMap.getOrElse(base, base) - final val CollectionPrefix = "scala.collection." - final val FunctionName = "scala.Function" - final val TupleName = "scala.Tuple" - final val SbtPrefix = "sbt." - final val ScalaPrefix = "scala." - final val JavaPrefix = "java.lang." - /* scala.collection.X -> X */ - val ShortenCollection = Set("Seq", "List", "Set", "Map", "Iterable") - val TypeMap = Map( - "java.io.File" -> "File", - "java.net.URL" -> "URL", - "java.net.URI" -> "URI" - ) + final val CollectionPrefix = "scala.collection." + final val FunctionName = "scala.Function" + final val TupleName = "scala.Tuple" + final val SbtPrefix = "sbt." + final val ScalaPrefix = "scala." + final val JavaPrefix = "java.lang." + /* scala.collection.X -> X */ + val ShortenCollection = Set("Seq", "List", "Set", "Map", "Iterable") + val TypeMap = Map( + "java.io.File" -> "File", + "java.net.URL" -> "URL", + "java.net.URI" -> "URI" + ) - /** A Parser that extracts basic structure from the string representation of a type from Manifest.toString. - * This is rudimentary and essentially only decomposes the string into names and arguments for parameterized types. - * */ - lazy val typeStringParser: Parser[TypeString] = - { - def isFullScalaIDChar(c: Char) = isScalaIDChar(c) || c == '.' || c == '$' - lazy val fullScalaID = identifier(IDStart, charClass(isFullScalaIDChar, "Scala identifier character") ) - lazy val tpe: Parser[TypeString] = - for( id <- fullScalaID; args <- ('[' ~> rep1sep(tpe, ',') <~ ']').?) yield - new TypeString(id, args.toList.flatten) - tpe - } + /** + * A Parser that extracts basic structure from the string representation of a type from Manifest.toString. + * This is rudimentary and essentially only decomposes the string into names and arguments for parameterized types. + */ + lazy val typeStringParser: Parser[TypeString] = + { + def isFullScalaIDChar(c: Char) = isScalaIDChar(c) || c == '.' || c == '$' + lazy val fullScalaID = identifier(IDStart, charClass(isFullScalaIDChar, "Scala identifier character")) + lazy val tpe: Parser[TypeString] = + for (id <- fullScalaID; args <- ('[' ~> rep1sep(tpe, ',') <~ ']').?) yield new TypeString(id, args.toList.flatten) + tpe + } } \ No newline at end of file diff --git a/util/complete/src/main/scala/sbt/complete/UpperBound.scala b/util/complete/src/main/scala/sbt/complete/UpperBound.scala index ba1a69ef9..66a32e1a2 100644 --- a/util/complete/src/main/scala/sbt/complete/UpperBound.scala +++ b/util/complete/src/main/scala/sbt/complete/UpperBound.scala @@ -3,45 +3,45 @@ */ package sbt.complete -sealed trait UpperBound -{ - /** True if and only if the given value meets this bound.*/ - def >=(min: Int): Boolean - /** True if and only if this bound is one.*/ - def isOne: Boolean - /** True if and only if this bound is zero.*/ - def isZero: Boolean - /** If this bound is zero or Infinite, `decrement` returns this bound. - * Otherwise, this bound is finite and greater than zero and `decrement` returns the bound that is one less than this bound.*/ - def decrement: UpperBound - /** True if and only if this is unbounded.*/ - def isInfinite: Boolean +sealed trait UpperBound { + /** True if and only if the given value meets this bound.*/ + def >=(min: Int): Boolean + /** True if and only if this bound is one.*/ + def isOne: Boolean + /** True if and only if this bound is zero.*/ + def isZero: Boolean + /** + * If this bound is zero or Infinite, `decrement` returns this bound. + * Otherwise, this bound is finite and greater than zero and `decrement` returns the bound that is one less than this bound. + */ + def decrement: UpperBound + /** True if and only if this is unbounded.*/ + def isInfinite: Boolean } /** Represents unbounded. */ -case object Infinite extends UpperBound -{ - /** All finite numbers meet this bound. */ - def >=(min: Int) = true - def isOne = false - def isZero = false - def decrement = this - def isInfinite = true - override def toString = "Infinity" +case object Infinite extends UpperBound { + /** All finite numbers meet this bound. */ + def >=(min: Int) = true + def isOne = false + def isZero = false + def decrement = this + def isInfinite = true + override def toString = "Infinity" } -/** Represents a finite upper bound. The maximum allowed value is 'value', inclusive. -* It must positive. */ -final case class Finite(value: Int) extends UpperBound -{ - assume(value >= 0, "Maximum occurences must be nonnegative.") +/** + * Represents a finite upper bound. The maximum allowed value is 'value', inclusive. + * It must positive. + */ +final case class Finite(value: Int) extends UpperBound { + assume(value >= 0, "Maximum occurences must be nonnegative.") - def >=(min: Int) = value >= min - def isOne = value == 1 - def isZero = value == 0 - def decrement = Finite( (value - 1) max 0 ) - def isInfinite = false - override def toString = value.toString + def >=(min: Int) = value >= min + def isOne = value == 1 + def isZero = value == 0 + def decrement = Finite((value - 1) max 0) + def isInfinite = false + override def toString = value.toString } -object UpperBound -{ - implicit def intToFinite(i: Int): Finite = Finite(i) +object UpperBound { + implicit def intToFinite(i: Int): Finite = Finite(i) } \ No newline at end of file diff --git a/util/control/src/main/scala/sbt/ErrorHandling.scala b/util/control/src/main/scala/sbt/ErrorHandling.scala index b6e616ae3..70eba7d2f 100644 --- a/util/control/src/main/scala/sbt/ErrorHandling.scala +++ b/util/control/src/main/scala/sbt/ErrorHandling.scala @@ -3,41 +3,36 @@ */ package sbt - import java.io.IOException +import java.io.IOException -object ErrorHandling -{ - def translate[T](msg: => String)(f: => T) = - try { f } - catch { - case e: IOException => throw new TranslatedIOException(msg + e.toString, e) - case e: Exception => throw new TranslatedException(msg + e.toString, e) - } +object ErrorHandling { + def translate[T](msg: => String)(f: => T) = + try { f } + catch { + case e: IOException => throw new TranslatedIOException(msg + e.toString, e) + case e: Exception => throw new TranslatedException(msg + e.toString, e) + } - def wideConvert[T](f: => T): Either[Throwable, T] = - try { Right(f) } - catch - { - case ex @ (_: Exception | _: StackOverflowError) => Left(ex) - case err @ (_: ThreadDeath | _: VirtualMachineError) => throw err - case x: Throwable => Left(x) - } + def wideConvert[T](f: => T): Either[Throwable, T] = + try { Right(f) } + catch { + case ex @ (_: Exception | _: StackOverflowError) => Left(ex) + case err @ (_: ThreadDeath | _: VirtualMachineError) => throw err + case x: Throwable => Left(x) + } - def convert[T](f: => T): Either[Exception, T] = - try { Right(f) } - catch { case e: Exception => Left(e) } + def convert[T](f: => T): Either[Exception, T] = + try { Right(f) } + catch { case e: Exception => Left(e) } - def reducedToString(e: Throwable): String = - if(e.getClass == classOf[RuntimeException]) - { - val msg = e.getMessage - if(msg == null || msg.isEmpty) e.toString else msg - } - else - e.toString + def reducedToString(e: Throwable): String = + if (e.getClass == classOf[RuntimeException]) { + val msg = e.getMessage + if (msg == null || msg.isEmpty) e.toString else msg + } else + e.toString } -sealed class TranslatedException private[sbt](msg: String, cause: Throwable) extends RuntimeException(msg, cause) -{ - override def toString = msg +sealed class TranslatedException private[sbt] (msg: String, cause: Throwable) extends RuntimeException(msg, cause) { + override def toString = msg } -final class TranslatedIOException private[sbt](msg: String, cause: IOException) extends TranslatedException(msg, cause) +final class TranslatedIOException private[sbt] (msg: String, cause: IOException) extends TranslatedException(msg, cause) diff --git a/util/control/src/main/scala/sbt/ExitHook.scala b/util/control/src/main/scala/sbt/ExitHook.scala index de85bff42..8ee5ddf86 100644 --- a/util/control/src/main/scala/sbt/ExitHook.scala +++ b/util/control/src/main/scala/sbt/ExitHook.scala @@ -4,21 +4,18 @@ package sbt /** Defines a function to call as sbt exits.*/ -trait ExitHook -{ - /** Subclasses should implement this method, which is called when this hook is executed. */ - def runBeforeExiting(): Unit +trait ExitHook { + /** Subclasses should implement this method, which is called when this hook is executed. */ + def runBeforeExiting(): Unit } -object ExitHook -{ - def apply(f: => Unit): ExitHook = new ExitHook { def runBeforeExiting() = f } +object ExitHook { + def apply(f: => Unit): ExitHook = new ExitHook { def runBeforeExiting() = f } } -object ExitHooks -{ - /** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */ - def runExitHooks(exitHooks: Seq[ExitHook]): Seq[Throwable] = - exitHooks.flatMap( hook => - ErrorHandling.wideConvert( hook.runBeforeExiting() ).left.toOption - ) +object ExitHooks { + /** Calls each registered exit hook, trapping any exceptions so that each hook is given a chance to run. */ + def runExitHooks(exitHooks: Seq[ExitHook]): Seq[Throwable] = + exitHooks.flatMap(hook => + ErrorHandling.wideConvert(hook.runBeforeExiting()).left.toOption + ) } \ No newline at end of file diff --git a/util/control/src/main/scala/sbt/MessageOnlyException.scala b/util/control/src/main/scala/sbt/MessageOnlyException.scala index 75b7737d8..ab4727b95 100644 --- a/util/control/src/main/scala/sbt/MessageOnlyException.scala +++ b/util/control/src/main/scala/sbt/MessageOnlyException.scala @@ -5,14 +5,20 @@ package sbt final class MessageOnlyException(override val toString: String) extends RuntimeException(toString) -/** A dummy exception for the top-level exception handler to know that an exception -* has been handled, but is being passed further up to indicate general failure. */ +/** + * A dummy exception for the top-level exception handler to know that an exception + * has been handled, but is being passed further up to indicate general failure. + */ final class AlreadyHandledException(val underlying: Throwable) extends RuntimeException -/** A marker trait for a top-level exception handler to know that this exception -* doesn't make sense to display. */ +/** + * A marker trait for a top-level exception handler to know that this exception + * doesn't make sense to display. + */ trait UnprintableException extends Throwable -/** A marker trait that refines UnprintableException to indicate to a top-level exception handler -* that the code throwing this exception has already provided feedback to the user about the error condition. */ +/** + * A marker trait that refines UnprintableException to indicate to a top-level exception handler + * that the code throwing this exception has already provided feedback to the user about the error condition. + */ trait FeedbackProvidedException extends UnprintableException diff --git a/util/log/src/main/scala/sbt/BasicLogger.scala b/util/log/src/main/scala/sbt/BasicLogger.scala index c58dc57c6..7fe59e8c0 100644 --- a/util/log/src/main/scala/sbt/BasicLogger.scala +++ b/util/log/src/main/scala/sbt/BasicLogger.scala @@ -4,15 +4,14 @@ package sbt /** Implements the level-setting methods of Logger.*/ -abstract class BasicLogger extends AbstractLogger -{ - private var traceEnabledVar = java.lang.Integer.MAX_VALUE - private var level: Level.Value = Level.Info - private var successEnabledVar = true - def successEnabled = synchronized { successEnabledVar } - def setSuccessEnabled(flag: Boolean): Unit = synchronized { successEnabledVar = flag } - def getLevel = synchronized { level } - def setLevel(newLevel: Level.Value): Unit = synchronized { level = newLevel } - def setTrace(level: Int): Unit = synchronized { traceEnabledVar = level } - def getTrace = synchronized { traceEnabledVar } +abstract class BasicLogger extends AbstractLogger { + private var traceEnabledVar = java.lang.Integer.MAX_VALUE + private var level: Level.Value = Level.Info + private var successEnabledVar = true + def successEnabled = synchronized { successEnabledVar } + def setSuccessEnabled(flag: Boolean): Unit = synchronized { successEnabledVar = flag } + def getLevel = synchronized { level } + def setLevel(newLevel: Level.Value): Unit = synchronized { level = newLevel } + def setTrace(level: Int): Unit = synchronized { traceEnabledVar = level } + def getTrace = synchronized { traceEnabledVar } } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/BufferedLogger.scala b/util/log/src/main/scala/sbt/BufferedLogger.scala index 0b9d7a593..a40d3f1be 100644 --- a/util/log/src/main/scala/sbt/BufferedLogger.scala +++ b/util/log/src/main/scala/sbt/BufferedLogger.scala @@ -3,94 +3,93 @@ */ package sbt - import scala.collection.mutable.ListBuffer +import scala.collection.mutable.ListBuffer -/** A logger that can buffer the logging done on it and then can flush the buffer -* to the delegate logger provided in the constructor. Use 'startRecording' to -* start buffering and then 'play' from to flush the buffer to the backing logger. -* The logging level set at the time a message is originally logged is used, not -* the level at the time 'play' is called. -* -* This class assumes that it is the only client of the delegate logger. -* */ -class BufferedLogger(delegate: AbstractLogger) extends BasicLogger -{ - private[this] val buffer = new ListBuffer[LogEvent] - private[this] var recording = false +/** + * A logger that can buffer the logging done on it and then can flush the buffer + * to the delegate logger provided in the constructor. Use 'startRecording' to + * start buffering and then 'play' from to flush the buffer to the backing logger. + * The logging level set at the time a message is originally logged is used, not + * the level at the time 'play' is called. + * + * This class assumes that it is the only client of the delegate logger. + */ +class BufferedLogger(delegate: AbstractLogger) extends BasicLogger { + private[this] val buffer = new ListBuffer[LogEvent] + private[this] var recording = false - /** Enables buffering. */ - def record() = synchronized { recording = true } - def buffer[T](f: => T): T = { - record() - try { f } - finally { stopQuietly() } - } - def bufferQuietly[T](f: => T): T = { - record() - try - { - val result = f - clear() - result - } - catch { case e: Throwable => stopQuietly(); throw e } - } - def stopQuietly() = synchronized { try { stop() } catch { case e: Exception => () } } + /** Enables buffering. */ + def record() = synchronized { recording = true } + def buffer[T](f: => T): T = { + record() + try { f } + finally { stopQuietly() } + } + def bufferQuietly[T](f: => T): T = { + record() + try { + val result = f + clear() + result + } catch { case e: Throwable => stopQuietly(); throw e } + } + def stopQuietly() = synchronized { try { stop() } catch { case e: Exception => () } } - /** Flushes the buffer to the delegate logger. This method calls logAll on the delegate - * so that the messages are written consecutively. The buffer is cleared in the process. */ - def play(): Unit = synchronized { delegate.logAll(buffer.readOnly); buffer.clear() } - /** Clears buffered events and disables buffering. */ - def clear(): Unit = synchronized { buffer.clear(); recording = false } - /** Plays buffered events and disables buffering. */ - def stop(): Unit = synchronized { play(); clear() } + /** + * Flushes the buffer to the delegate logger. This method calls logAll on the delegate + * so that the messages are written consecutively. The buffer is cleared in the process. + */ + def play(): Unit = synchronized { delegate.logAll(buffer.readOnly); buffer.clear() } + /** Clears buffered events and disables buffering. */ + def clear(): Unit = synchronized { buffer.clear(); recording = false } + /** Plays buffered events and disables buffering. */ + def stop(): Unit = synchronized { play(); clear() } - override def ansiCodesSupported = delegate.ansiCodesSupported - override def setLevel(newLevel: Level.Value): Unit = synchronized { - super.setLevel(newLevel) - if(recording) - buffer += new SetLevel(newLevel) - else - delegate.setLevel(newLevel) - } - override def setSuccessEnabled(flag: Boolean): Unit = synchronized { - super.setSuccessEnabled(flag) - if(recording) - buffer += new SetSuccess(flag) - else - delegate.setSuccessEnabled(flag) - } - override def setTrace(level: Int): Unit = synchronized { - super.setTrace(level) - if(recording) - buffer += new SetTrace(level) - else - delegate.setTrace(level) - } + override def ansiCodesSupported = delegate.ansiCodesSupported + override def setLevel(newLevel: Level.Value): Unit = synchronized { + super.setLevel(newLevel) + if (recording) + buffer += new SetLevel(newLevel) + else + delegate.setLevel(newLevel) + } + override def setSuccessEnabled(flag: Boolean): Unit = synchronized { + super.setSuccessEnabled(flag) + if (recording) + buffer += new SetSuccess(flag) + else + delegate.setSuccessEnabled(flag) + } + override def setTrace(level: Int): Unit = synchronized { + super.setTrace(level) + if (recording) + buffer += new SetTrace(level) + else + delegate.setTrace(level) + } - def trace(t: => Throwable): Unit = - doBufferableIf(traceEnabled, new Trace(t), _.trace(t)) - def success(message: => String): Unit = - doBufferable(Level.Info, new Success(message), _.success(message)) - def log(level: Level.Value, message: => String): Unit = - doBufferable(level, new Log(level, message), _.log(level, message)) - def logAll(events: Seq[LogEvent]): Unit = synchronized { - if(recording) - buffer ++= events - else - delegate.logAll(events) - } - def control(event: ControlEvent.Value, message: => String): Unit = - doBufferable(Level.Info, new ControlEvent(event, message), _.control(event, message)) - private def doBufferable(level: Level.Value, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit = - doBufferableIf(atLevel(level), appendIfBuffered, doUnbuffered) - private def doBufferableIf(condition: => Boolean, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit = synchronized { - if(condition) - { - if(recording) - buffer += appendIfBuffered - else - doUnbuffered(delegate) - } - } + def trace(t: => Throwable): Unit = + doBufferableIf(traceEnabled, new Trace(t), _.trace(t)) + def success(message: => String): Unit = + doBufferable(Level.Info, new Success(message), _.success(message)) + def log(level: Level.Value, message: => String): Unit = + doBufferable(level, new Log(level, message), _.log(level, message)) + def logAll(events: Seq[LogEvent]): Unit = synchronized { + if (recording) + buffer ++= events + else + delegate.logAll(events) + } + def control(event: ControlEvent.Value, message: => String): Unit = + doBufferable(Level.Info, new ControlEvent(event, message), _.control(event, message)) + private def doBufferable(level: Level.Value, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit = + doBufferableIf(atLevel(level), appendIfBuffered, doUnbuffered) + private def doBufferableIf(condition: => Boolean, appendIfBuffered: => LogEvent, doUnbuffered: AbstractLogger => Unit): Unit = synchronized { + if (condition) { + if (recording) + buffer += appendIfBuffered + else + doUnbuffered(delegate) + } + } } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/ConsoleLogger.scala b/util/log/src/main/scala/sbt/ConsoleLogger.scala index e5c8f040f..a614e4315 100644 --- a/util/log/src/main/scala/sbt/ConsoleLogger.scala +++ b/util/log/src/main/scala/sbt/ConsoleLogger.scala @@ -3,182 +3,175 @@ */ package sbt -import java.io.{BufferedWriter, PrintStream, PrintWriter} +import java.io.{ BufferedWriter, PrintStream, PrintWriter } import java.util.Locale -object ConsoleLogger -{ - @deprecated("Moved to ConsoleOut", "0.13.0") - def systemOut: ConsoleOut = ConsoleOut.systemOut +object ConsoleLogger { + @deprecated("Moved to ConsoleOut", "0.13.0") + def systemOut: ConsoleOut = ConsoleOut.systemOut - @deprecated("Moved to ConsoleOut", "0.13.0") - def overwriteContaining(s: String): (String,String) => Boolean = ConsoleOut.overwriteContaining(s) + @deprecated("Moved to ConsoleOut", "0.13.0") + def overwriteContaining(s: String): (String, String) => Boolean = ConsoleOut.overwriteContaining(s) - @deprecated("Moved to ConsoleOut", "0.13.0") - def systemOutOverwrite(f: (String,String) => Boolean): ConsoleOut = ConsoleOut.systemOutOverwrite(f) + @deprecated("Moved to ConsoleOut", "0.13.0") + def systemOutOverwrite(f: (String, String) => Boolean): ConsoleOut = ConsoleOut.systemOutOverwrite(f) - @deprecated("Moved to ConsoleOut", "0.13.0") - def printStreamOut(out: PrintStream): ConsoleOut = ConsoleOut.printStreamOut(out) + @deprecated("Moved to ConsoleOut", "0.13.0") + def printStreamOut(out: PrintStream): ConsoleOut = ConsoleOut.printStreamOut(out) - @deprecated("Moved to ConsoleOut", "0.13.0") - def printWriterOut(out: PrintWriter): ConsoleOut = ConsoleOut.printWriterOut(out) + @deprecated("Moved to ConsoleOut", "0.13.0") + def printWriterOut(out: PrintWriter): ConsoleOut = ConsoleOut.printWriterOut(out) - @deprecated("Moved to ConsoleOut", "0.13.0") - def bufferedWriterOut(out: BufferedWriter): ConsoleOut = bufferedWriterOut(out) + @deprecated("Moved to ConsoleOut", "0.13.0") + def bufferedWriterOut(out: BufferedWriter): ConsoleOut = bufferedWriterOut(out) - /** Escape character, used to introduce an escape sequence. */ - final val ESC = '\u001B' + /** Escape character, used to introduce an escape sequence. */ + final val ESC = '\u001B' - /** An escape terminator is a character in the range `@` (decimal value 64) to `~` (decimal value 126). - * It is the final character in an escape sequence. */ - def isEscapeTerminator(c: Char): Boolean = - c >= '@' && c <= '~' + /** + * An escape terminator is a character in the range `@` (decimal value 64) to `~` (decimal value 126). + * It is the final character in an escape sequence. + */ + def isEscapeTerminator(c: Char): Boolean = + c >= '@' && c <= '~' - /** Returns true if the string contains the ESC character. */ - def hasEscapeSequence(s: String): Boolean = - s.indexOf(ESC) >= 0 + /** Returns true if the string contains the ESC character. */ + def hasEscapeSequence(s: String): Boolean = + s.indexOf(ESC) >= 0 - /** Returns the string `s` with escape sequences removed. - * An escape sequence starts with the ESC character (decimal value 27) and ends with an escape terminator. - * @see isEscapeTerminator - */ - def removeEscapeSequences(s: String): String = - if(s.isEmpty || !hasEscapeSequence(s)) - s - else - { - val sb = new java.lang.StringBuilder - nextESC(s, 0, sb) - sb.toString - } - private[this] def nextESC(s: String, start: Int, sb: java.lang.StringBuilder) - { - val escIndex = s.indexOf(ESC, start) - if(escIndex < 0) - sb.append(s, start, s.length) - else { - sb.append(s, start, escIndex) - val next = skipESC(s, escIndex+1) - nextESC(s, next, sb) - } - } - + /** + * Returns the string `s` with escape sequences removed. + * An escape sequence starts with the ESC character (decimal value 27) and ends with an escape terminator. + * @see isEscapeTerminator + */ + def removeEscapeSequences(s: String): String = + if (s.isEmpty || !hasEscapeSequence(s)) + s + else { + val sb = new java.lang.StringBuilder + nextESC(s, 0, sb) + sb.toString + } + private[this] def nextESC(s: String, start: Int, sb: java.lang.StringBuilder) { + val escIndex = s.indexOf(ESC, start) + if (escIndex < 0) + sb.append(s, start, s.length) + else { + sb.append(s, start, escIndex) + val next = skipESC(s, escIndex + 1) + nextESC(s, next, sb) + } + } - /** Skips the escape sequence starting at `i-1`. `i` should be positioned at the character after the ESC that starts the sequence. */ - private[this] def skipESC(s: String, i: Int): Int = - if(i >= s.length) - i - else if( isEscapeTerminator(s.charAt(i)) ) - i+1 - else - skipESC(s, i+1) + /** Skips the escape sequence starting at `i-1`. `i` should be positioned at the character after the ESC that starts the sequence. */ + private[this] def skipESC(s: String, i: Int): Int = + if (i >= s.length) + i + else if (isEscapeTerminator(s.charAt(i))) + i + 1 + else + skipESC(s, i + 1) - val formatEnabled = - { - import java.lang.Boolean.{getBoolean, parseBoolean} - val value = System.getProperty("sbt.log.format") - if(value eq null) (ansiSupported && !getBoolean("sbt.log.noformat")) else parseBoolean(value) - } - private[this] def jline1to2CompatMsg = "Found class jline.Terminal, but interface was expected" + val formatEnabled = + { + import java.lang.Boolean.{ getBoolean, parseBoolean } + val value = System.getProperty("sbt.log.format") + if (value eq null) (ansiSupported && !getBoolean("sbt.log.noformat")) else parseBoolean(value) + } + private[this] def jline1to2CompatMsg = "Found class jline.Terminal, but interface was expected" - private[this] def ansiSupported = - try { - val terminal = jline.TerminalFactory.get - terminal.restore // #460 - terminal.isAnsiSupported - } catch { - case e: Exception => !isWindows + private[this] def ansiSupported = + try { + val terminal = jline.TerminalFactory.get + terminal.restore // #460 + terminal.isAnsiSupported + } catch { + case e: Exception => !isWindows - // sbt 0.13 drops JLine 1.0 from the launcher and uses 2.x as a normal dependency - // when 0.13 is used with a 0.12 launcher or earlier, the JLine classes from the launcher get loaded - // this results in a linkage error as detected below. The detection is likely jvm specific, but the priority - // is avoiding mistakenly identifying something as a launcher incompatibility when it is not - case e: IncompatibleClassChangeError if e.getMessage == jline1to2CompatMsg => - throw new IncompatibleClassChangeError("JLine incompatibility detected. Check that the sbt launcher is version 0.13.x or later.") - } + // sbt 0.13 drops JLine 1.0 from the launcher and uses 2.x as a normal dependency + // when 0.13 is used with a 0.12 launcher or earlier, the JLine classes from the launcher get loaded + // this results in a linkage error as detected below. The detection is likely jvm specific, but the priority + // is avoiding mistakenly identifying something as a launcher incompatibility when it is not + case e: IncompatibleClassChangeError if e.getMessage == jline1to2CompatMsg => + throw new IncompatibleClassChangeError("JLine incompatibility detected. Check that the sbt launcher is version 0.13.x or later.") + } - val noSuppressedMessage = (_: SuppressedTraceContext) => None + val noSuppressedMessage = (_: SuppressedTraceContext) => None - private[this] def os = System.getProperty("os.name") - private[this] def isWindows = os.toLowerCase(Locale.ENGLISH).indexOf("windows") >= 0 - - def apply(out: PrintStream): ConsoleLogger = apply(ConsoleOut.printStreamOut(out)) - def apply(out: PrintWriter): ConsoleLogger = apply(ConsoleOut.printWriterOut(out)) - def apply(out: ConsoleOut = ConsoleOut.systemOut, ansiCodesSupported: Boolean = formatEnabled, - useColor: Boolean = formatEnabled, suppressedMessage: SuppressedTraceContext => Option[String] = noSuppressedMessage): ConsoleLogger = - new ConsoleLogger(out, ansiCodesSupported, useColor, suppressedMessage) + private[this] def os = System.getProperty("os.name") + private[this] def isWindows = os.toLowerCase(Locale.ENGLISH).indexOf("windows") >= 0 - private[this] val EscapeSequence = (27.toChar + "[^@-~]*[@-~]").r - def stripEscapeSequences(s: String): String = - EscapeSequence.pattern.matcher(s).replaceAll("") + def apply(out: PrintStream): ConsoleLogger = apply(ConsoleOut.printStreamOut(out)) + def apply(out: PrintWriter): ConsoleLogger = apply(ConsoleOut.printWriterOut(out)) + def apply(out: ConsoleOut = ConsoleOut.systemOut, ansiCodesSupported: Boolean = formatEnabled, + useColor: Boolean = formatEnabled, suppressedMessage: SuppressedTraceContext => Option[String] = noSuppressedMessage): ConsoleLogger = + new ConsoleLogger(out, ansiCodesSupported, useColor, suppressedMessage) + + private[this] val EscapeSequence = (27.toChar + "[^@-~]*[@-~]").r + def stripEscapeSequences(s: String): String = + EscapeSequence.pattern.matcher(s).replaceAll("") } -/** A logger that logs to the console. On supported systems, the level labels are -* colored. -* -* This logger is not thread-safe.*/ -class ConsoleLogger private[ConsoleLogger](val out: ConsoleOut, override val ansiCodesSupported: Boolean, val useColor: Boolean, val suppressedMessage: SuppressedTraceContext => Option[String]) extends BasicLogger -{ - import scala.Console.{BLUE, GREEN, RED, RESET, YELLOW} - def messageColor(level: Level.Value) = RESET - def labelColor(level: Level.Value) = - level match - { - case Level.Error => RED - case Level.Warn => YELLOW - case _ => RESET - } - def successLabelColor = GREEN - def successMessageColor = RESET - override def success(message: => String) - { - if(successEnabled) - log(successLabelColor, Level.SuccessLabel, successMessageColor, message) - } - def trace(t: => Throwable): Unit = - out.lockObject.synchronized - { - val traceLevel = getTrace - if(traceLevel >= 0) - out.print(StackTrace.trimmed(t, traceLevel)) - if(traceLevel <= 2) - for(msg <- suppressedMessage(new SuppressedTraceContext(traceLevel, ansiCodesSupported && useColor))) - printLabeledLine(labelColor(Level.Error), "trace", messageColor(Level.Error), msg) - } - def log(level: Level.Value, message: => String) - { - if(atLevel(level)) - log(labelColor(level), level.toString, messageColor(level), message) - } - private def reset(): Unit = setColor(RESET) - - private def setColor(color: String) - { - if(ansiCodesSupported && useColor) - out.lockObject.synchronized { out.print(color) } - } - private def log(labelColor: String, label: String, messageColor: String, message: String): Unit = - out.lockObject.synchronized - { - for(line <- message.split("""\n""")) - printLabeledLine(labelColor, label, messageColor, line) - } - private def printLabeledLine(labelColor: String, label: String, messageColor: String, line: String): Unit = - { - reset() - out.print("[") - setColor(labelColor) - out.print(label) - reset() - out.print("] ") - setColor(messageColor) - out.print(line) - reset() - out.println() - } +/** + * A logger that logs to the console. On supported systems, the level labels are + * colored. + * + * This logger is not thread-safe. + */ +class ConsoleLogger private[ConsoleLogger] (val out: ConsoleOut, override val ansiCodesSupported: Boolean, val useColor: Boolean, val suppressedMessage: SuppressedTraceContext => Option[String]) extends BasicLogger { + import scala.Console.{ BLUE, GREEN, RED, RESET, YELLOW } + def messageColor(level: Level.Value) = RESET + def labelColor(level: Level.Value) = + level match { + case Level.Error => RED + case Level.Warn => YELLOW + case _ => RESET + } + def successLabelColor = GREEN + def successMessageColor = RESET + override def success(message: => String) { + if (successEnabled) + log(successLabelColor, Level.SuccessLabel, successMessageColor, message) + } + def trace(t: => Throwable): Unit = + out.lockObject.synchronized { + val traceLevel = getTrace + if (traceLevel >= 0) + out.print(StackTrace.trimmed(t, traceLevel)) + if (traceLevel <= 2) + for (msg <- suppressedMessage(new SuppressedTraceContext(traceLevel, ansiCodesSupported && useColor))) + printLabeledLine(labelColor(Level.Error), "trace", messageColor(Level.Error), msg) + } + def log(level: Level.Value, message: => String) { + if (atLevel(level)) + log(labelColor(level), level.toString, messageColor(level), message) + } + private def reset(): Unit = setColor(RESET) - def logAll(events: Seq[LogEvent]) = out.lockObject.synchronized { events.foreach(log) } - def control(event: ControlEvent.Value, message: => String) - { log(labelColor(Level.Info), Level.Info.toString, BLUE, message) } + private def setColor(color: String) { + if (ansiCodesSupported && useColor) + out.lockObject.synchronized { out.print(color) } + } + private def log(labelColor: String, label: String, messageColor: String, message: String): Unit = + out.lockObject.synchronized { + for (line <- message.split("""\n""")) + printLabeledLine(labelColor, label, messageColor, line) + } + private def printLabeledLine(labelColor: String, label: String, messageColor: String, line: String): Unit = + { + reset() + out.print("[") + setColor(labelColor) + out.print(label) + reset() + out.print("] ") + setColor(messageColor) + out.print(line) + reset() + out.println() + } + + def logAll(events: Seq[LogEvent]) = out.lockObject.synchronized { events.foreach(log) } + def control(event: ControlEvent.Value, message: => String) { log(labelColor(Level.Info), Level.Info.toString, BLUE, message) } } final class SuppressedTraceContext(val traceLevel: Int, val useColor: Boolean) diff --git a/util/log/src/main/scala/sbt/ConsoleOut.scala b/util/log/src/main/scala/sbt/ConsoleOut.scala index 07f17ff72..41367757b 100644 --- a/util/log/src/main/scala/sbt/ConsoleOut.scala +++ b/util/log/src/main/scala/sbt/ConsoleOut.scala @@ -1,62 +1,62 @@ package sbt - import java.io.{BufferedWriter, PrintStream, PrintWriter} +import java.io.{ BufferedWriter, PrintStream, PrintWriter } -sealed trait ConsoleOut -{ - val lockObject: AnyRef - def print(s: String): Unit - def println(s: String): Unit - def println(): Unit +sealed trait ConsoleOut { + val lockObject: AnyRef + def print(s: String): Unit + def println(s: String): Unit + def println(): Unit } -object ConsoleOut -{ - def systemOut: ConsoleOut = printStreamOut(System.out) +object ConsoleOut { + def systemOut: ConsoleOut = printStreamOut(System.out) - def overwriteContaining(s: String): (String,String) => Boolean = (cur, prev) => - cur.contains(s) && prev.contains(s) + def overwriteContaining(s: String): (String, String) => Boolean = (cur, prev) => + cur.contains(s) && prev.contains(s) - /** Move to beginning of previous line and clear the line. */ - private[this] final val OverwriteLine = "\r\u001BM\u001B[2K" + /** Move to beginning of previous line and clear the line. */ + private[this] final val OverwriteLine = "\r\u001BM\u001B[2K" - /** ConsoleOut instance that is backed by System.out. It overwrites the previously printed line - * if the function `f(lineToWrite, previousLine)` returns true. - * - * The ConsoleOut returned by this method assumes that the only newlines are from println calls - * and not in the String arguments. */ - def systemOutOverwrite(f: (String,String) => Boolean): ConsoleOut = new ConsoleOut { - val lockObject = System.out - private[this] var last: Option[String] = None - private[this] var current = new java.lang.StringBuffer - def print(s: String): Unit = synchronized { current.append(s) } - def println(s: String): Unit = synchronized { current.append(s); println() } - def println(): Unit = synchronized { - val s = current.toString - if(ConsoleLogger.formatEnabled && last.exists(lmsg => f(s, lmsg))) - lockObject.print(OverwriteLine) - lockObject.println(s) - last = Some(s) - current = new java.lang.StringBuffer - } - } + /** + * ConsoleOut instance that is backed by System.out. It overwrites the previously printed line + * if the function `f(lineToWrite, previousLine)` returns true. + * + * The ConsoleOut returned by this method assumes that the only newlines are from println calls + * and not in the String arguments. + */ + def systemOutOverwrite(f: (String, String) => Boolean): ConsoleOut = new ConsoleOut { + val lockObject = System.out + private[this] var last: Option[String] = None + private[this] var current = new java.lang.StringBuffer + def print(s: String): Unit = synchronized { current.append(s) } + def println(s: String): Unit = synchronized { current.append(s); println() } + def println(): Unit = synchronized { + val s = current.toString + if (ConsoleLogger.formatEnabled && last.exists(lmsg => f(s, lmsg))) + lockObject.print(OverwriteLine) + lockObject.println(s) + last = Some(s) + current = new java.lang.StringBuffer + } + } - def printStreamOut(out: PrintStream): ConsoleOut = new ConsoleOut { - val lockObject = out - def print(s: String) = out.print(s) - def println(s: String) = out.println(s) - def println() = out.println() - } - def printWriterOut(out: PrintWriter): ConsoleOut = new ConsoleOut { - val lockObject = out - def print(s: String) = out.print(s) - def println(s: String) = { out.println(s); out.flush() } - def println() = { out.println(); out.flush() } - } - def bufferedWriterOut(out: BufferedWriter): ConsoleOut = new ConsoleOut { - val lockObject = out - def print(s: String) = out.write(s) - def println(s: String) = { out.write(s); println() } - def println() = { out.newLine(); out.flush() } - } + def printStreamOut(out: PrintStream): ConsoleOut = new ConsoleOut { + val lockObject = out + def print(s: String) = out.print(s) + def println(s: String) = out.println(s) + def println() = out.println() + } + def printWriterOut(out: PrintWriter): ConsoleOut = new ConsoleOut { + val lockObject = out + def print(s: String) = out.print(s) + def println(s: String) = { out.println(s); out.flush() } + def println() = { out.println(); out.flush() } + } + def bufferedWriterOut(out: BufferedWriter): ConsoleOut = new ConsoleOut { + val lockObject = out + def print(s: String) = out.write(s) + def println(s: String) = { out.write(s); println() } + def println() = { out.newLine(); out.flush() } + } } diff --git a/util/log/src/main/scala/sbt/FilterLogger.scala b/util/log/src/main/scala/sbt/FilterLogger.scala index 59048c381..d3547f34f 100644 --- a/util/log/src/main/scala/sbt/FilterLogger.scala +++ b/util/log/src/main/scala/sbt/FilterLogger.scala @@ -3,35 +3,31 @@ */ package sbt -/** A filter logger is used to delegate messages but not the logging level to another logger. This means -* that messages are logged at the higher of the two levels set by this logger and its delegate. -* */ -class FilterLogger(delegate: AbstractLogger) extends BasicLogger -{ - override lazy val ansiCodesSupported = delegate.ansiCodesSupported - def trace(t: => Throwable) - { - if(traceEnabled) - delegate.trace(t) - } - override def setSuccessEnabled(flag: Boolean) { delegate.setSuccessEnabled(flag) } - override def successEnabled = delegate.successEnabled - override def setTrace(level: Int) { delegate.setTrace(level) } - override def getTrace = delegate.getTrace - def log(level: Level.Value, message: => String) - { - if(atLevel(level)) - delegate.log(level, message) - } - def success(message: => String) - { - if(successEnabled) - delegate.success(message) - } - def control(event: ControlEvent.Value, message: => String) - { - if(atLevel(Level.Info)) - delegate.control(event, message) - } - def logAll(events: Seq[LogEvent]): Unit = delegate.logAll(events) +/** + * A filter logger is used to delegate messages but not the logging level to another logger. This means + * that messages are logged at the higher of the two levels set by this logger and its delegate. + */ +class FilterLogger(delegate: AbstractLogger) extends BasicLogger { + override lazy val ansiCodesSupported = delegate.ansiCodesSupported + def trace(t: => Throwable) { + if (traceEnabled) + delegate.trace(t) + } + override def setSuccessEnabled(flag: Boolean) { delegate.setSuccessEnabled(flag) } + override def successEnabled = delegate.successEnabled + override def setTrace(level: Int) { delegate.setTrace(level) } + override def getTrace = delegate.getTrace + def log(level: Level.Value, message: => String) { + if (atLevel(level)) + delegate.log(level, message) + } + def success(message: => String) { + if (successEnabled) + delegate.success(message) + } + def control(event: ControlEvent.Value, message: => String) { + if (atLevel(Level.Info)) + delegate.control(event, message) + } + def logAll(events: Seq[LogEvent]): Unit = delegate.logAll(events) } diff --git a/util/log/src/main/scala/sbt/FullLogger.scala b/util/log/src/main/scala/sbt/FullLogger.scala index ca88f0b4d..968712317 100644 --- a/util/log/src/main/scala/sbt/FullLogger.scala +++ b/util/log/src/main/scala/sbt/FullLogger.scala @@ -4,32 +4,27 @@ package sbt /** Promotes the simple Logger interface to the full AbstractLogger interface. */ -class FullLogger(delegate: Logger) extends BasicLogger -{ - override val ansiCodesSupported: Boolean = delegate.ansiCodesSupported - def trace(t: => Throwable) - { - if(traceEnabled) - delegate.trace(t) - } - def log(level: Level.Value, message: => String) - { - if(atLevel(level)) - delegate.log(level, message) - } - def success(message: => String): Unit = - if(successEnabled) - delegate.success(message) - def control(event: ControlEvent.Value, message: => String): Unit = - info(message) - def logAll(events: Seq[LogEvent]): Unit = events.foreach(log) +class FullLogger(delegate: Logger) extends BasicLogger { + override val ansiCodesSupported: Boolean = delegate.ansiCodesSupported + def trace(t: => Throwable) { + if (traceEnabled) + delegate.trace(t) + } + def log(level: Level.Value, message: => String) { + if (atLevel(level)) + delegate.log(level, message) + } + def success(message: => String): Unit = + if (successEnabled) + delegate.success(message) + def control(event: ControlEvent.Value, message: => String): Unit = + info(message) + def logAll(events: Seq[LogEvent]): Unit = events.foreach(log) } -object FullLogger -{ - def apply(delegate: Logger): AbstractLogger = - delegate match - { - case d: AbstractLogger => d - case _ => new FullLogger(delegate) - } +object FullLogger { + def apply(delegate: Logger): AbstractLogger = + delegate match { + case d: AbstractLogger => d + case _ => new FullLogger(delegate) + } } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/GlobalLogging.scala b/util/log/src/main/scala/sbt/GlobalLogging.scala index 63eb9805a..1cd32653b 100644 --- a/util/log/src/main/scala/sbt/GlobalLogging.scala +++ b/util/log/src/main/scala/sbt/GlobalLogging.scala @@ -3,41 +3,44 @@ */ package sbt - import java.io.{File, PrintWriter} +import java.io.{ File, PrintWriter } -/** Provides the current global logging configuration. -* -* `full` is the current global logger. It should not be set directly because it is generated as needed from `backing.newLogger`. -* `console` is where all logging from all ConsoleLoggers should go. -* `backed` is the Logger that other loggers should feed into. -* `backing` tracks the files that persist the global logging. -* `newLogger` creates a new global logging configuration from a sink and backing configuration. -*/ +/** + * Provides the current global logging configuration. + * + * `full` is the current global logger. It should not be set directly because it is generated as needed from `backing.newLogger`. + * `console` is where all logging from all ConsoleLoggers should go. + * `backed` is the Logger that other loggers should feed into. + * `backing` tracks the files that persist the global logging. + * `newLogger` creates a new global logging configuration from a sink and backing configuration. + */ final case class GlobalLogging(full: Logger, console: ConsoleOut, backed: AbstractLogger, backing: GlobalLogBacking, newLogger: (PrintWriter, GlobalLogBacking) => GlobalLogging) -/** Tracks the files that persist the global logging. -* `file` is the current backing file. `last` is the previous backing file, if there is one. -* `newBackingFile` creates a new temporary location for the next backing file. */ -final case class GlobalLogBacking(file: File, last: Option[File], newBackingFile: () => File) -{ - /** Shifts the current backing file to `last` and sets the current backing to `newFile`. */ - def shift(newFile: File) = GlobalLogBacking(newFile, Some(file), newBackingFile) +/** + * Tracks the files that persist the global logging. + * `file` is the current backing file. `last` is the previous backing file, if there is one. + * `newBackingFile` creates a new temporary location for the next backing file. + */ +final case class GlobalLogBacking(file: File, last: Option[File], newBackingFile: () => File) { + /** Shifts the current backing file to `last` and sets the current backing to `newFile`. */ + def shift(newFile: File) = GlobalLogBacking(newFile, Some(file), newBackingFile) - /** Shifts the current backing file to `last` and sets the current backing to a new temporary file generated by `newBackingFile`. */ - def shiftNew() = shift(newBackingFile()) + /** Shifts the current backing file to `last` and sets the current backing to a new temporary file generated by `newBackingFile`. */ + def shiftNew() = shift(newBackingFile()) - /** If there is a previous backing file in `last`, that becomes the current backing file and the previous backing is cleared. - * Otherwise, no changes are made. */ - def unshift = GlobalLogBacking(last getOrElse file, None, newBackingFile) + /** + * If there is a previous backing file in `last`, that becomes the current backing file and the previous backing is cleared. + * Otherwise, no changes are made. + */ + def unshift = GlobalLogBacking(last getOrElse file, None, newBackingFile) } object GlobalLogBacking { - def apply(newBackingFile: => File): GlobalLogBacking = GlobalLogBacking(newBackingFile, None, newBackingFile _) + def apply(newBackingFile: => File): GlobalLogBacking = GlobalLogBacking(newBackingFile, None, newBackingFile _) } -object GlobalLogging -{ - def initial(newLogger: (PrintWriter, GlobalLogBacking) => GlobalLogging, newBackingFile: => File, console: ConsoleOut): GlobalLogging = - { - val log = ConsoleLogger(console) - GlobalLogging(log, console, log, GlobalLogBacking(newBackingFile), newLogger) - } +object GlobalLogging { + def initial(newLogger: (PrintWriter, GlobalLogBacking) => GlobalLogging, newBackingFile: => File, console: ConsoleOut): GlobalLogging = + { + val log = ConsoleLogger(console) + GlobalLogging(log, console, log, GlobalLogBacking(newBackingFile), newLogger) + } } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/Level.scala b/util/log/src/main/scala/sbt/Level.scala index f501cd40c..7744b9495 100644 --- a/util/log/src/main/scala/sbt/Level.scala +++ b/util/log/src/main/scala/sbt/Level.scala @@ -1,25 +1,28 @@ /* sbt -- Simple Build Tool * Copyright 2008, 2009 Mark Harrah */ - package sbt +package sbt -/** An enumeration defining the levels available for logging. A level includes all of the levels -* with id larger than its own id. For example, Warn (id=3) includes Error (id=4).*/ -object Level extends Enumeration -{ - val Debug = Value(1, "debug") - val Info = Value(2, "info") - val Warn = Value(3, "warn") - val Error = Value(4, "error") - /** Defines the label to use for success messages. - * Because the label for levels is defined in this module, the success label is also defined here. */ - val SuccessLabel = "success" +/** + * An enumeration defining the levels available for logging. A level includes all of the levels + * with id larger than its own id. For example, Warn (id=3) includes Error (id=4). + */ +object Level extends Enumeration { + val Debug = Value(1, "debug") + val Info = Value(2, "info") + val Warn = Value(3, "warn") + val Error = Value(4, "error") + /** + * Defines the label to use for success messages. + * Because the label for levels is defined in this module, the success label is also defined here. + */ + val SuccessLabel = "success" - def union(a: Value, b: Value) = if(a.id < b.id) a else b - def unionAll(vs: Seq[Value]) = vs reduceLeft union + def union(a: Value, b: Value) = if (a.id < b.id) a else b + def unionAll(vs: Seq[Value]) = vs reduceLeft union - /** Returns the level with the given name wrapped in Some, or None if no level exists for that name. */ - def apply(s: String) = values.find(s == _.toString) - /** Same as apply, defined for use in pattern matching. */ - private[sbt] def unapply(s: String) = apply(s) + /** Returns the level with the given name wrapped in Some, or None if no level exists for that name. */ + def apply(s: String) = values.find(s == _.toString) + /** Same as apply, defined for use in pattern matching. */ + private[sbt] def unapply(s: String) = apply(s) } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/LogEvent.scala b/util/log/src/main/scala/sbt/LogEvent.scala index 7bd91c2a4..d48957c75 100644 --- a/util/log/src/main/scala/sbt/LogEvent.scala +++ b/util/log/src/main/scala/sbt/LogEvent.scala @@ -1,7 +1,7 @@ /* sbt -- Simple Build Tool * Copyright 2008, 2009 Mark Harrah */ - package sbt +package sbt sealed trait LogEvent extends NotNull final class Success(val msg: String) extends LogEvent @@ -12,7 +12,6 @@ final class SetTrace(val level: Int) extends LogEvent final class SetSuccess(val enabled: Boolean) extends LogEvent final class ControlEvent(val event: ControlEvent.Value, val msg: String) extends LogEvent -object ControlEvent extends Enumeration -{ - val Start, Header, Finish = Value +object ControlEvent extends Enumeration { + val Start, Header, Finish = Value } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/Logger.scala b/util/log/src/main/scala/sbt/Logger.scala index c556f620c..c507484ce 100644 --- a/util/log/src/main/scala/sbt/Logger.scala +++ b/util/log/src/main/scala/sbt/Logger.scala @@ -1,138 +1,133 @@ /* sbt -- Simple Build Tool * Copyright 2008, 2009, 2010 Mark Harrah */ - package sbt +package sbt - import xsbti.{Logger => xLogger, F0} - import xsbti.{Maybe,Position,Problem,Severity} +import xsbti.{ Logger => xLogger, F0 } +import xsbti.{ Maybe, Position, Problem, Severity } - import java.io.File +import java.io.File -abstract class AbstractLogger extends Logger -{ - def getLevel: Level.Value - def setLevel(newLevel: Level.Value) - def setTrace(flag: Int) - def getTrace: Int - final def traceEnabled = getTrace >= 0 - def successEnabled: Boolean - def setSuccessEnabled(flag: Boolean): Unit +abstract class AbstractLogger extends Logger { + def getLevel: Level.Value + def setLevel(newLevel: Level.Value) + def setTrace(flag: Int) + def getTrace: Int + final def traceEnabled = getTrace >= 0 + def successEnabled: Boolean + def setSuccessEnabled(flag: Boolean): Unit - def atLevel(level: Level.Value) = level.id >= getLevel.id - def control(event: ControlEvent.Value, message: => String): Unit + def atLevel(level: Level.Value) = level.id >= getLevel.id + def control(event: ControlEvent.Value, message: => String): Unit - def logAll(events: Seq[LogEvent]): Unit - /** Defined in terms of other methods in Logger and should not be called from them. */ - final def log(event: LogEvent) - { - event match - { - case s: Success => success(s.msg) - case l: Log => log(l.level, l.msg) - case t: Trace => trace(t.exception) - case setL: SetLevel => setLevel(setL.newLevel) - case setT: SetTrace => setTrace(setT.level) - case setS: SetSuccess => setSuccessEnabled(setS.enabled) - case c: ControlEvent => control(c.event, c.msg) - } - } + def logAll(events: Seq[LogEvent]): Unit + /** Defined in terms of other methods in Logger and should not be called from them. */ + final def log(event: LogEvent) { + event match { + case s: Success => success(s.msg) + case l: Log => log(l.level, l.msg) + case t: Trace => trace(t.exception) + case setL: SetLevel => setLevel(setL.newLevel) + case setT: SetTrace => setTrace(setT.level) + case setS: SetSuccess => setSuccessEnabled(setS.enabled) + case c: ControlEvent => control(c.event, c.msg) + } + } } -object Logger -{ - def transferLevels(oldLog: AbstractLogger, newLog: AbstractLogger) { - newLog.setLevel(oldLog.getLevel) - newLog.setTrace(oldLog.getTrace) - } +object Logger { + def transferLevels(oldLog: AbstractLogger, newLog: AbstractLogger) { + newLog.setLevel(oldLog.getLevel) + newLog.setTrace(oldLog.getTrace) + } - // make public in 0.13 - private[sbt] val Null: AbstractLogger = new AbstractLogger { - def getLevel: Level.Value = Level.Error - def setLevel(newLevel: Level.Value) {} - def getTrace = 0 - def setTrace(flag: Int) {} - def successEnabled = false - def setSuccessEnabled(flag: Boolean) {} - def control(event: ControlEvent.Value, message: => String) {} - def logAll(events: Seq[LogEvent]) {} - def trace(t: => Throwable) {} - def success(message: => String) {} - def log(level: Level.Value, message: => String) {} - } + // make public in 0.13 + private[sbt] val Null: AbstractLogger = new AbstractLogger { + def getLevel: Level.Value = Level.Error + def setLevel(newLevel: Level.Value) {} + def getTrace = 0 + def setTrace(flag: Int) {} + def successEnabled = false + def setSuccessEnabled(flag: Boolean) {} + def control(event: ControlEvent.Value, message: => String) {} + def logAll(events: Seq[LogEvent]) {} + def trace(t: => Throwable) {} + def success(message: => String) {} + def log(level: Level.Value, message: => String) {} + } - implicit def absLog2PLog(log: AbstractLogger): ProcessLogger = new BufferedLogger(log) with ProcessLogger - implicit def log2PLog(log: Logger): ProcessLogger = absLog2PLog(new FullLogger(log)) - implicit def xlog2Log(lg: xLogger): Logger = lg match { - case l: Logger => l - case _ => wrapXLogger(lg) - } - private[this] def wrapXLogger(lg: xLogger): Logger = new Logger { - override def debug(msg: F0[String]): Unit = lg.debug(msg) - override def warn(msg: F0[String]): Unit = lg.warn(msg) - override def info(msg: F0[String]): Unit = lg.info(msg) - override def error(msg: F0[String]): Unit = lg.error(msg) - override def trace(msg: F0[Throwable]) = lg.trace(msg) - override def log(level: Level.Value, msg: F0[String]) = lg.log(level, msg) - def trace(t: => Throwable) = trace(f0(t)) - def success(s: => String) = info(f0(s)) - def log(level: Level.Value, msg: => String) = - { - val fmsg = f0(msg) - level match - { - case Level.Debug => lg.debug(fmsg) - case Level.Info => lg.info(fmsg) - case Level.Warn => lg.warn(fmsg) - case Level.Error => lg.error(fmsg) - } - } - } - def f0[T](t: =>T): F0[T] = new F0[T] { def apply = t } + implicit def absLog2PLog(log: AbstractLogger): ProcessLogger = new BufferedLogger(log) with ProcessLogger + implicit def log2PLog(log: Logger): ProcessLogger = absLog2PLog(new FullLogger(log)) + implicit def xlog2Log(lg: xLogger): Logger = lg match { + case l: Logger => l + case _ => wrapXLogger(lg) + } + private[this] def wrapXLogger(lg: xLogger): Logger = new Logger { + override def debug(msg: F0[String]): Unit = lg.debug(msg) + override def warn(msg: F0[String]): Unit = lg.warn(msg) + override def info(msg: F0[String]): Unit = lg.info(msg) + override def error(msg: F0[String]): Unit = lg.error(msg) + override def trace(msg: F0[Throwable]) = lg.trace(msg) + override def log(level: Level.Value, msg: F0[String]) = lg.log(level, msg) + def trace(t: => Throwable) = trace(f0(t)) + def success(s: => String) = info(f0(s)) + def log(level: Level.Value, msg: => String) = + { + val fmsg = f0(msg) + level match { + case Level.Debug => lg.debug(fmsg) + case Level.Info => lg.info(fmsg) + case Level.Warn => lg.warn(fmsg) + case Level.Error => lg.error(fmsg) + } + } + } + def f0[T](t: => T): F0[T] = new F0[T] { def apply = t } - def m2o[S](m: Maybe[S]): Option[S] = if(m.isDefined) Some(m.get) else None - def o2m[S](o: Option[S]): Maybe[S] = o match { case Some(v) => Maybe.just(v); case None => Maybe.nothing() } + def m2o[S](m: Maybe[S]): Option[S] = if (m.isDefined) Some(m.get) else None + def o2m[S](o: Option[S]): Maybe[S] = o match { case Some(v) => Maybe.just(v); case None => Maybe.nothing() } - def position(line0: Option[Integer], content: String, offset0: Option[Integer], pointer0: Option[Integer], pointerSpace0: Option[String], sourcePath0: Option[String], sourceFile0: Option[File]): Position = - new Position { - val line = o2m(line0) - val lineContent = content - val offset = o2m(offset0) - val pointer = o2m(pointer0) - val pointerSpace = o2m(pointerSpace0) - val sourcePath = o2m(sourcePath0) - val sourceFile = o2m(sourceFile0) - } + def position(line0: Option[Integer], content: String, offset0: Option[Integer], pointer0: Option[Integer], pointerSpace0: Option[String], sourcePath0: Option[String], sourceFile0: Option[File]): Position = + new Position { + val line = o2m(line0) + val lineContent = content + val offset = o2m(offset0) + val pointer = o2m(pointer0) + val pointerSpace = o2m(pointerSpace0) + val sourcePath = o2m(sourcePath0) + val sourceFile = o2m(sourceFile0) + } - def problem(cat: String, pos: Position, msg: String, sev: Severity): Problem = - new Problem - { - val category = cat - val position = pos - val message = msg - val severity = sev - } + def problem(cat: String, pos: Position, msg: String, sev: Severity): Problem = + new Problem { + val category = cat + val position = pos + val message = msg + val severity = sev + } } -/** This is intended to be the simplest logging interface for use by code that wants to log. -* It does not include configuring the logger. */ -trait Logger extends xLogger -{ - final def verbose(message: => String): Unit = debug(message) - final def debug(message: => String): Unit = log(Level.Debug, message) - final def info(message: => String): Unit = log(Level.Info, message) - final def warn(message: => String): Unit = log(Level.Warn, message) - final def error(message: => String): Unit = log(Level.Error, message) +/** + * This is intended to be the simplest logging interface for use by code that wants to log. + * It does not include configuring the logger. + */ +trait Logger extends xLogger { + final def verbose(message: => String): Unit = debug(message) + final def debug(message: => String): Unit = log(Level.Debug, message) + final def info(message: => String): Unit = log(Level.Info, message) + final def warn(message: => String): Unit = log(Level.Warn, message) + final def error(message: => String): Unit = log(Level.Error, message) - def ansiCodesSupported = false - - def trace(t: => Throwable): Unit - def success(message: => String): Unit - def log(level: Level.Value, message: => String): Unit - - def debug(msg: F0[String]): Unit = log(Level.Debug, msg) - def warn(msg: F0[String]): Unit = log(Level.Warn, msg) - def info(msg: F0[String]): Unit = log(Level.Info, msg) - def error(msg: F0[String]): Unit = log(Level.Error, msg) - def trace(msg: F0[Throwable]) = trace(msg.apply) - def log(level: Level.Value, msg: F0[String]): Unit = log(level, msg.apply) + def ansiCodesSupported = false + + def trace(t: => Throwable): Unit + def success(message: => String): Unit + def log(level: Level.Value, message: => String): Unit + + def debug(msg: F0[String]): Unit = log(Level.Debug, msg) + def warn(msg: F0[String]): Unit = log(Level.Warn, msg) + def info(msg: F0[String]): Unit = log(Level.Info, msg) + def error(msg: F0[String]): Unit = log(Level.Error, msg) + def trace(msg: F0[Throwable]) = trace(msg.apply) + def log(level: Level.Value, msg: F0[String]): Unit = log(level, msg.apply) } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/LoggerWriter.scala b/util/log/src/main/scala/sbt/LoggerWriter.scala index aeb67ce72..0165676f5 100644 --- a/util/log/src/main/scala/sbt/LoggerWriter.scala +++ b/util/log/src/main/scala/sbt/LoggerWriter.scala @@ -3,49 +3,47 @@ */ package sbt -/** Provides a `java.io.Writer` interface to a `Logger`. Content is line-buffered and logged at `level`. -* A line is delimited by `nl`, which is by default the platform line separator.*/ -class LoggerWriter(delegate: Logger, unbufferedLevel: Option[Level.Value], nl: String = System.getProperty("line.separator")) extends java.io.Writer -{ - def this(delegate: Logger, level: Level.Value) = this(delegate, Some(level)) - def this(delegate: Logger) = this(delegate, None) - - private[this] val buffer = new StringBuilder - private[this] val lines = new collection.mutable.ListBuffer[String] +/** + * Provides a `java.io.Writer` interface to a `Logger`. Content is line-buffered and logged at `level`. + * A line is delimited by `nl`, which is by default the platform line separator. + */ +class LoggerWriter(delegate: Logger, unbufferedLevel: Option[Level.Value], nl: String = System.getProperty("line.separator")) extends java.io.Writer { + def this(delegate: Logger, level: Level.Value) = this(delegate, Some(level)) + def this(delegate: Logger) = this(delegate, None) - override def close() = flush() - override def flush(): Unit = - synchronized { - if(buffer.length > 0) - { - log(buffer.toString) - buffer.clear() - } - } - def flushLines(level: Level.Value): Unit = - synchronized { - for(line <- lines) - delegate.log(level, line) - lines.clear() - } - override def write(content: Array[Char], offset: Int, length: Int): Unit = - synchronized { - buffer.appendAll(content, offset, length) - process() - } + private[this] val buffer = new StringBuilder + private[this] val lines = new collection.mutable.ListBuffer[String] - private[this] def process() - { - val i = buffer.indexOf(nl) - if(i >= 0) - { - log(buffer.substring(0, i)) - buffer.delete(0, i + nl.length) - process() - } - } - private[this] def log(s: String): Unit = unbufferedLevel match { - case None => lines += s - case Some(level) => delegate.log(level, s) - } + override def close() = flush() + override def flush(): Unit = + synchronized { + if (buffer.length > 0) { + log(buffer.toString) + buffer.clear() + } + } + def flushLines(level: Level.Value): Unit = + synchronized { + for (line <- lines) + delegate.log(level, line) + lines.clear() + } + override def write(content: Array[Char], offset: Int, length: Int): Unit = + synchronized { + buffer.appendAll(content, offset, length) + process() + } + + private[this] def process() { + val i = buffer.indexOf(nl) + if (i >= 0) { + log(buffer.substring(0, i)) + buffer.delete(0, i + nl.length) + process() + } + } + private[this] def log(s: String): Unit = unbufferedLevel match { + case None => lines += s + case Some(level) => delegate.log(level, s) + } } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/MainLogging.scala b/util/log/src/main/scala/sbt/MainLogging.scala index 5611dbd48..48015ad44 100644 --- a/util/log/src/main/scala/sbt/MainLogging.scala +++ b/util/log/src/main/scala/sbt/MainLogging.scala @@ -1,52 +1,51 @@ package sbt - import java.io.PrintWriter +import java.io.PrintWriter -object MainLogging -{ - def multiLogger(config: MultiLoggerConfig): Logger = - { - import config._ - val multi = new MultiLogger(console :: backed :: extra) - // sets multi to the most verbose for clients that inspect the current level - multi setLevel Level.unionAll(backingLevel :: screenLevel :: extra.map(_.getLevel)) - // set the specific levels - console setLevel screenLevel - backed setLevel backingLevel - console setTrace screenTrace - backed setTrace backingTrace - multi: Logger - } +object MainLogging { + def multiLogger(config: MultiLoggerConfig): Logger = + { + import config._ + val multi = new MultiLogger(console :: backed :: extra) + // sets multi to the most verbose for clients that inspect the current level + multi setLevel Level.unionAll(backingLevel :: screenLevel :: extra.map(_.getLevel)) + // set the specific levels + console setLevel screenLevel + backed setLevel backingLevel + console setTrace screenTrace + backed setTrace backingTrace + multi: Logger + } - def globalDefault(console: ConsoleOut): (PrintWriter, GlobalLogBacking) => GlobalLogging = - { - lazy val f: (PrintWriter, GlobalLogBacking) => GlobalLogging = (writer, backing) => { - val backed = defaultBacked()(writer) - val full = multiLogger(defaultMultiConfig(console, backed ) ) - GlobalLogging(full, console, backed, backing, f) - } - f - } + def globalDefault(console: ConsoleOut): (PrintWriter, GlobalLogBacking) => GlobalLogging = + { + lazy val f: (PrintWriter, GlobalLogBacking) => GlobalLogging = (writer, backing) => { + val backed = defaultBacked()(writer) + val full = multiLogger(defaultMultiConfig(console, backed)) + GlobalLogging(full, console, backed, backing, f) + } + f + } - @deprecated("Explicitly specify the console output.", "0.13.0") - def defaultMultiConfig(backing: AbstractLogger): MultiLoggerConfig = - defaultMultiConfig(ConsoleOut.systemOut, backing) - def defaultMultiConfig(console: ConsoleOut, backing: AbstractLogger): MultiLoggerConfig = - new MultiLoggerConfig(defaultScreen(console, ConsoleLogger.noSuppressedMessage), backing, Nil, Level.Info, Level.Debug, -1, Int.MaxValue) + @deprecated("Explicitly specify the console output.", "0.13.0") + def defaultMultiConfig(backing: AbstractLogger): MultiLoggerConfig = + defaultMultiConfig(ConsoleOut.systemOut, backing) + def defaultMultiConfig(console: ConsoleOut, backing: AbstractLogger): MultiLoggerConfig = + new MultiLoggerConfig(defaultScreen(console, ConsoleLogger.noSuppressedMessage), backing, Nil, Level.Info, Level.Debug, -1, Int.MaxValue) - @deprecated("Explicitly specify the console output.", "0.13.0") - def defaultScreen(): AbstractLogger = ConsoleLogger() + @deprecated("Explicitly specify the console output.", "0.13.0") + def defaultScreen(): AbstractLogger = ConsoleLogger() - @deprecated("Explicitly specify the console output.", "0.13.0") - def defaultScreen(suppressedMessage: SuppressedTraceContext => Option[String]): AbstractLogger = ConsoleLogger(suppressedMessage = suppressedMessage) + @deprecated("Explicitly specify the console output.", "0.13.0") + def defaultScreen(suppressedMessage: SuppressedTraceContext => Option[String]): AbstractLogger = ConsoleLogger(suppressedMessage = suppressedMessage) - def defaultScreen(console: ConsoleOut): AbstractLogger = ConsoleLogger(console) - def defaultScreen(console: ConsoleOut, suppressedMessage: SuppressedTraceContext => Option[String]): AbstractLogger = - ConsoleLogger(console, suppressedMessage = suppressedMessage) - - def defaultBacked(useColor: Boolean = ConsoleLogger.formatEnabled): PrintWriter => ConsoleLogger = - to => ConsoleLogger(ConsoleOut.printWriterOut(to), useColor = useColor) + def defaultScreen(console: ConsoleOut): AbstractLogger = ConsoleLogger(console) + def defaultScreen(console: ConsoleOut, suppressedMessage: SuppressedTraceContext => Option[String]): AbstractLogger = + ConsoleLogger(console, suppressedMessage = suppressedMessage) + + def defaultBacked(useColor: Boolean = ConsoleLogger.formatEnabled): PrintWriter => ConsoleLogger = + to => ConsoleLogger(ConsoleOut.printWriterOut(to), useColor = useColor) } final case class MultiLoggerConfig(console: AbstractLogger, backed: AbstractLogger, extra: List[AbstractLogger], - screenLevel: Level.Value, backingLevel: Level.Value, screenTrace: Int, backingTrace: Int) \ No newline at end of file + screenLevel: Level.Value, backingLevel: Level.Value, screenTrace: Int, backingTrace: Int) \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/MultiLogger.scala b/util/log/src/main/scala/sbt/MultiLogger.scala index cd73bf2c3..77c4c11d4 100644 --- a/util/log/src/main/scala/sbt/MultiLogger.scala +++ b/util/log/src/main/scala/sbt/MultiLogger.scala @@ -6,50 +6,45 @@ package sbt // note that setting the logging level on this logger has no effect on its behavior, only // on the behavior of the delegates. -class MultiLogger(delegates: List[AbstractLogger]) extends BasicLogger -{ - override lazy val ansiCodesSupported = delegates exists supported - private[this] lazy val allSupportCodes = delegates forall supported - private[this] def supported = (_: AbstractLogger).ansiCodesSupported +class MultiLogger(delegates: List[AbstractLogger]) extends BasicLogger { + override lazy val ansiCodesSupported = delegates exists supported + private[this] lazy val allSupportCodes = delegates forall supported + private[this] def supported = (_: AbstractLogger).ansiCodesSupported - override def setLevel(newLevel: Level.Value) - { - super.setLevel(newLevel) - dispatch(new SetLevel(newLevel)) - } - override def setTrace(level: Int) - { - super.setTrace(level) - dispatch(new SetTrace(level)) - } - override def setSuccessEnabled(flag: Boolean) - { - super.setSuccessEnabled(flag) - dispatch(new SetSuccess(flag)) - } - def trace(t: => Throwable) { dispatch(new Trace(t)) } - def log(level: Level.Value, message: => String) { dispatch(new Log(level, message)) } - def success(message: => String) { dispatch(new Success(message)) } - def logAll(events: Seq[LogEvent]) { delegates.foreach(_.logAll(events)) } - def control(event: ControlEvent.Value, message: => String) { delegates.foreach(_.control(event, message)) } - private[this] def dispatch(event: LogEvent) - { - val plainEvent = if(allSupportCodes) event else removeEscapes(event) - for( d <- delegates) - if(d.ansiCodesSupported) - d.log(event) - else - d.log(plainEvent) - } + override def setLevel(newLevel: Level.Value) { + super.setLevel(newLevel) + dispatch(new SetLevel(newLevel)) + } + override def setTrace(level: Int) { + super.setTrace(level) + dispatch(new SetTrace(level)) + } + override def setSuccessEnabled(flag: Boolean) { + super.setSuccessEnabled(flag) + dispatch(new SetSuccess(flag)) + } + def trace(t: => Throwable) { dispatch(new Trace(t)) } + def log(level: Level.Value, message: => String) { dispatch(new Log(level, message)) } + def success(message: => String) { dispatch(new Success(message)) } + def logAll(events: Seq[LogEvent]) { delegates.foreach(_.logAll(events)) } + def control(event: ControlEvent.Value, message: => String) { delegates.foreach(_.control(event, message)) } + private[this] def dispatch(event: LogEvent) { + val plainEvent = if (allSupportCodes) event else removeEscapes(event) + for (d <- delegates) + if (d.ansiCodesSupported) + d.log(event) + else + d.log(plainEvent) + } - private[this] def removeEscapes(event: LogEvent): LogEvent = - { - import ConsoleLogger.{removeEscapeSequences => rm} - event match { - case s: Success => new Success(rm(s.msg)) - case l: Log => new Log(l.level, rm(l.msg)) - case ce: ControlEvent => new ControlEvent(ce.event, rm(ce.msg)) - case _: Trace | _: SetLevel | _: SetTrace | _: SetSuccess => event - } - } + private[this] def removeEscapes(event: LogEvent): LogEvent = + { + import ConsoleLogger.{ removeEscapeSequences => rm } + event match { + case s: Success => new Success(rm(s.msg)) + case l: Log => new Log(l.level, rm(l.msg)) + case ce: ControlEvent => new ControlEvent(ce.event, rm(ce.msg)) + case _: Trace | _: SetLevel | _: SetTrace | _: SetSuccess => event + } + } } \ No newline at end of file diff --git a/util/log/src/main/scala/sbt/StackTrace.scala b/util/log/src/main/scala/sbt/StackTrace.scala index 1ecd6e8bf..70554c5ec 100644 --- a/util/log/src/main/scala/sbt/StackTrace.scala +++ b/util/log/src/main/scala/sbt/StackTrace.scala @@ -3,61 +3,60 @@ */ package sbt -object StackTrace -{ - def isSbtClass(name: String) = name.startsWith("sbt") || name.startsWith("xsbt") - /** - * Return a printable representation of the stack trace associated - * with t. Information about t and its Throwable causes is included. - * The number of lines to be included for each Throwable is configured - * via d which should be greater than or equal to zero. If d is zero, - * then all elements are included up to (but not including) the first - * element that comes from sbt. If d is greater than zero, then up to - * that many lines are included, where the line for the Throwable is - * counted plus one line for each stack element. Less lines will be - * included if there are not enough stack elements. - */ - def trimmed(t : Throwable, d : Int) : String = { - require(d >= 0) - val b = new StringBuilder () +object StackTrace { + def isSbtClass(name: String) = name.startsWith("sbt") || name.startsWith("xsbt") + /** + * Return a printable representation of the stack trace associated + * with t. Information about t and its Throwable causes is included. + * The number of lines to be included for each Throwable is configured + * via d which should be greater than or equal to zero. If d is zero, + * then all elements are included up to (but not including) the first + * element that comes from sbt. If d is greater than zero, then up to + * that many lines are included, where the line for the Throwable is + * counted plus one line for each stack element. Less lines will be + * included if there are not enough stack elements. + */ + def trimmed(t: Throwable, d: Int): String = { + require(d >= 0) + val b = new StringBuilder() - def appendStackTrace (t : Throwable, first : Boolean) { + def appendStackTrace(t: Throwable, first: Boolean) { - val include : StackTraceElement => Boolean = - if (d == 0) - element => !isSbtClass(element.getClassName) - else { - var count = d - 1 - (_ => { count -= 1; count >= 0 }) - } + val include: StackTraceElement => Boolean = + if (d == 0) + element => !isSbtClass(element.getClassName) + else { + var count = d - 1 + (_ => { count -= 1; count >= 0 }) + } - def appendElement (e : StackTraceElement) { - b.append ("\tat ") - b.append (e) - b.append ('\n') - } + def appendElement(e: StackTraceElement) { + b.append("\tat ") + b.append(e) + b.append('\n') + } - if (!first) - b.append ("Caused by: ") - b.append (t) - b.append ('\n') + if (!first) + b.append("Caused by: ") + b.append(t) + b.append('\n') - val els = t.getStackTrace () - var i = 0 - while ((i < els.size) && include (els (i))) { - appendElement (els (i)) - i += 1 - } + val els = t.getStackTrace() + var i = 0 + while ((i < els.size) && include(els(i))) { + appendElement(els(i)) + i += 1 + } - } + } - appendStackTrace (t, true) - var c = t - while (c.getCause () != null) { - c = c.getCause () - appendStackTrace (c, false) - } - b.toString () + appendStackTrace(t, true) + var c = t + while (c.getCause() != null) { + c = c.getCause() + appendStackTrace(c, false) + } + b.toString() - } + } } \ No newline at end of file diff --git a/util/logic/src/main/scala/sbt/logic/Logic.scala b/util/logic/src/main/scala/sbt/logic/Logic.scala index 4eb8e64b1..7ec73c15e 100644 --- a/util/logic/src/main/scala/sbt/logic/Logic.scala +++ b/util/logic/src/main/scala/sbt/logic/Logic.scala @@ -1,8 +1,8 @@ package sbt package logic - import scala.annotation.tailrec - import Formula.{And, True} +import scala.annotation.tailrec +import Formula.{ And, True } /* Defines a propositional logic with negation as failure and only allows stratified rule sets (negation must be acyclic) in order to have a unique minimal model. @@ -26,10 +26,9 @@ as is this: + http://www.w3.org/2005/rules/wg/wiki/negation */ - /** Disjunction (or) of the list of clauses. */ final case class Clauses(clauses: List[Clause]) { - assert(clauses.nonEmpty, "At least one clause is required.") + assert(clauses.nonEmpty, "At least one clause is required.") } /** When the `body` Formula succeeds, atoms in `head` are true. */ @@ -37,289 +36,301 @@ final case class Clause(body: Formula, head: Set[Atom]) /** A literal is an [[Atom]] or its [[negation|Negated]]. */ sealed abstract class Literal extends Formula { - /** The underlying (positive) atom. */ - def atom: Atom - /** Negates this literal.*/ - def unary_! : Literal + /** The underlying (positive) atom. */ + def atom: Atom + /** Negates this literal.*/ + def unary_! : Literal } /** A variable with name `label`. */ final case class Atom(label: String) extends Literal { - def atom = this - def unary_! : Negated = Negated(this) + def atom = this + def unary_! : Negated = Negated(this) } -/** A negated atom, in the sense of negation as failure, not logical negation. -* That is, it is true if `atom` is not known/defined. */ +/** + * A negated atom, in the sense of negation as failure, not logical negation. + * That is, it is true if `atom` is not known/defined. + */ final case class Negated(atom: Atom) extends Literal { - def unary_! : Atom = atom + def unary_! : Atom = atom } -/** A formula consists of variables, negation, and conjunction (and). -* (Disjunction is not currently included- it is modeled at the level of a sequence of clauses. -* This is less convenient when defining clauses, but is not less powerful.) */ +/** + * A formula consists of variables, negation, and conjunction (and). + * (Disjunction is not currently included- it is modeled at the level of a sequence of clauses. + * This is less convenient when defining clauses, but is not less powerful.) + */ sealed abstract class Formula { - /** Constructs a clause that proves `atoms` when this formula is true. */ - def proves(atom: Atom, atoms: Atom*): Clause = Clause(this, (atom +: atoms).toSet) + /** Constructs a clause that proves `atoms` when this formula is true. */ + def proves(atom: Atom, atoms: Atom*): Clause = Clause(this, (atom +: atoms).toSet) - /** Constructs a formula that is true iff this formula and `f` are both true.*/ - def && (f: Formula): Formula = (this, f) match { - case (True, x) => x - case (x, True) => x - case (And(as), And(bs)) => And(as ++ bs) - case (And(as), b: Literal) => And(as + b) - case (a: Literal, And(bs)) => And(bs + a) - case (a: Literal, b: Literal) => And( Set(a,b) ) - } + /** Constructs a formula that is true iff this formula and `f` are both true.*/ + def &&(f: Formula): Formula = (this, f) match { + case (True, x) => x + case (x, True) => x + case (And(as), And(bs)) => And(as ++ bs) + case (And(as), b: Literal) => And(as + b) + case (a: Literal, And(bs)) => And(bs + a) + case (a: Literal, b: Literal) => And(Set(a, b)) + } } - object Formula { - /** A conjunction of literals. */ - final case class And(literals: Set[Literal]) extends Formula { - assert(literals.nonEmpty, "'And' requires at least one literal.") - } - final case object True extends Formula + /** A conjunction of literals. */ + final case class And(literals: Set[Literal]) extends Formula { + assert(literals.nonEmpty, "'And' requires at least one literal.") + } + final case object True extends Formula } -object Logic -{ - def reduceAll(clauses: List[Clause], initialFacts: Set[Literal]): Either[LogicException, Matched] = - reduce(Clauses(clauses), initialFacts) +object Logic { + def reduceAll(clauses: List[Clause], initialFacts: Set[Literal]): Either[LogicException, Matched] = + reduce(Clauses(clauses), initialFacts) - /** Computes the variables in the unique stable model for the program represented by `clauses` and `initialFacts`. - * `clause` may not have any negative feedback (that is, negation is acyclic) - * and `initialFacts` cannot be in the head of any clauses in `clause`. - * These restrictions ensure that the logic program has a unique minimal model. */ - def reduce(clauses: Clauses, initialFacts: Set[Literal]): Either[LogicException, Matched] = - { - val (posSeq, negSeq) = separate(initialFacts.toSeq) - val (pos, neg) = (posSeq.toSet, negSeq.toSet) + /** + * Computes the variables in the unique stable model for the program represented by `clauses` and `initialFacts`. + * `clause` may not have any negative feedback (that is, negation is acyclic) + * and `initialFacts` cannot be in the head of any clauses in `clause`. + * These restrictions ensure that the logic program has a unique minimal model. + */ + def reduce(clauses: Clauses, initialFacts: Set[Literal]): Either[LogicException, Matched] = + { + val (posSeq, negSeq) = separate(initialFacts.toSeq) + val (pos, neg) = (posSeq.toSet, negSeq.toSet) - val problem = - checkContradictions(pos, neg) orElse - checkOverlap(clauses, pos) orElse - checkAcyclic(clauses) + val problem = + checkContradictions(pos, neg) orElse + checkOverlap(clauses, pos) orElse + checkAcyclic(clauses) - problem.toLeft( - reduce0(clauses, initialFacts, Matched.empty) - ) - } + problem.toLeft( + reduce0(clauses, initialFacts, Matched.empty) + ) + } + /** + * Verifies `initialFacts` are not in the head of any `clauses`. + * This avoids the situation where an atom is proved but no clauses prove it. + * This isn't necessarily a problem, but the main sbt use cases expects + * a proven atom to have at least one clause satisfied. + */ + private[this] def checkOverlap(clauses: Clauses, initialFacts: Set[Atom]): Option[InitialOverlap] = { + val as = atoms(clauses) + val initialOverlap = initialFacts.filter(as.inHead) + if (initialOverlap.nonEmpty) Some(new InitialOverlap(initialOverlap)) else None + } - /** Verifies `initialFacts` are not in the head of any `clauses`. - * This avoids the situation where an atom is proved but no clauses prove it. - * This isn't necessarily a problem, but the main sbt use cases expects - * a proven atom to have at least one clause satisfied. */ - private[this] def checkOverlap(clauses: Clauses, initialFacts: Set[Atom]): Option[InitialOverlap] = { - val as = atoms(clauses) - val initialOverlap = initialFacts.filter(as.inHead) - if(initialOverlap.nonEmpty) Some(new InitialOverlap(initialOverlap)) else None - } + private[this] def checkContradictions(pos: Set[Atom], neg: Set[Atom]): Option[InitialContradictions] = { + val contradictions = pos intersect neg + if (contradictions.nonEmpty) Some(new InitialContradictions(contradictions)) else None + } - private[this] def checkContradictions(pos: Set[Atom], neg: Set[Atom]): Option[InitialContradictions] = { - val contradictions = pos intersect neg - if(contradictions.nonEmpty) Some(new InitialContradictions(contradictions)) else None - } + private[this] def checkAcyclic(clauses: Clauses): Option[CyclicNegation] = { + val deps = dependencyMap(clauses) + val cycle = Dag.findNegativeCycle(graph(deps)) + if (cycle.nonEmpty) Some(new CyclicNegation(cycle)) else None + } + private[this] def graph(deps: Map[Atom, Set[Literal]]) = new Dag.DirectedSignedGraph[Atom] { + type Arrow = Literal + def nodes = deps.keys.toList + def dependencies(a: Atom) = deps.getOrElse(a, Set.empty).toList + def isNegative(b: Literal) = b match { + case Negated(_) => true + case Atom(_) => false + } + def head(b: Literal) = b.atom + } - private[this] def checkAcyclic(clauses: Clauses): Option[CyclicNegation] = { - val deps = dependencyMap(clauses) - val cycle = Dag.findNegativeCycle(graph(deps)) - if(cycle.nonEmpty) Some(new CyclicNegation(cycle)) else None - } - private[this] def graph(deps: Map[Atom, Set[Literal]]) = new Dag.DirectedSignedGraph[Atom] { - type Arrow = Literal - def nodes = deps.keys.toList - def dependencies(a: Atom) = deps.getOrElse(a, Set.empty).toList - def isNegative(b: Literal) = b match { - case Negated(_) => true - case Atom(_) => false - } - def head(b: Literal) = b.atom - } + private[this] def dependencyMap(clauses: Clauses): Map[Atom, Set[Literal]] = + (Map.empty[Atom, Set[Literal]] /: clauses.clauses) { + case (m, Clause(formula, heads)) => + val deps = literals(formula) + (m /: heads) { (n, head) => n.updated(head, n.getOrElse(head, Set.empty) ++ deps) } + } - private[this] def dependencyMap(clauses: Clauses): Map[Atom, Set[Literal]] = - (Map.empty[Atom, Set[Literal]] /: clauses.clauses) { - case (m, Clause(formula, heads)) => - val deps = literals(formula) - (m /: heads) { (n, head) => n.updated(head, n.getOrElse(head, Set.empty) ++ deps) } - } + sealed abstract class LogicException(override val toString: String) + final class InitialContradictions(val literals: Set[Atom]) extends LogicException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")) + final class InitialOverlap(val literals: Set[Atom]) extends LogicException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")) + final class CyclicNegation(val cycle: List[Literal]) extends LogicException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) - sealed abstract class LogicException(override val toString: String) - final class InitialContradictions(val literals: Set[Atom]) extends LogicException("Initial facts cannot be both true and false:\n\t" + literals.mkString("\n\t")) - final class InitialOverlap(val literals: Set[Atom]) extends LogicException("Initial positive facts cannot be implied by any clauses:\n\t" + literals.mkString("\n\t")) - final class CyclicNegation(val cycle: List[Literal]) extends LogicException("Negation may not be involved in a cycle:\n\t" + cycle.mkString("\n\t")) + /** Tracks proven atoms in the reverse order they were proved. */ + final class Matched private (val provenSet: Set[Atom], reverseOrdered: List[Atom]) { + def add(atoms: Set[Atom]): Matched = add(atoms.toList) + def add(atoms: List[Atom]): Matched = { + val newOnly = atoms.filterNot(provenSet) + new Matched(provenSet ++ newOnly, newOnly ::: reverseOrdered) + } + def ordered: List[Atom] = reverseOrdered.reverse + override def toString = ordered.map(_.label).mkString("Matched(", ",", ")") + } + object Matched { + val empty = new Matched(Set.empty, Nil) + } - /** Tracks proven atoms in the reverse order they were proved. */ - final class Matched private(val provenSet: Set[Atom], reverseOrdered: List[Atom]) { - def add(atoms: Set[Atom]): Matched = add(atoms.toList) - def add(atoms: List[Atom]): Matched = { - val newOnly = atoms.filterNot(provenSet) - new Matched(provenSet ++ newOnly, newOnly ::: reverseOrdered) - } - def ordered: List[Atom] = reverseOrdered.reverse - override def toString = ordered.map(_.label).mkString("Matched(", ",", ")") - } - object Matched { - val empty = new Matched(Set.empty, Nil) - } + /** Separates a sequence of literals into `(pos, neg)` atom sequences. */ + private[this] def separate(lits: Seq[Literal]): (Seq[Atom], Seq[Atom]) = Util.separate(lits) { + case a: Atom => Left(a) + case Negated(n) => Right(n) + } - /** Separates a sequence of literals into `(pos, neg)` atom sequences. */ - private[this] def separate(lits: Seq[Literal]): (Seq[Atom], Seq[Atom]) = Util.separate(lits) { - case a: Atom => Left(a) - case Negated(n) => Right(n) - } + /** + * Finds clauses that have no body and thus prove their head. + * Returns `(, )`. + */ + private[this] def findProven(c: Clauses): (Set[Atom], List[Clause]) = + { + val (proven, unproven) = c.clauses.partition(_.body == True) + (proven.flatMap(_.head).toSet, unproven) + } + private[this] def keepPositive(lits: Set[Literal]): Set[Atom] = + lits.collect { case a: Atom => a }.toSet - /** Finds clauses that have no body and thus prove their head. - * Returns `(, )`. */ - private[this] def findProven(c: Clauses): (Set[Atom], List[Clause]) = - { - val (proven, unproven) = c.clauses.partition(_.body == True) - (proven.flatMap(_.head).toSet, unproven) - } - private[this] def keepPositive(lits: Set[Literal]): Set[Atom] = - lits.collect{ case a: Atom => a}.toSet + // precondition: factsToProcess contains no contradictions + @tailrec + private[this] def reduce0(clauses: Clauses, factsToProcess: Set[Literal], state: Matched): Matched = + applyAll(clauses, factsToProcess) match { + case None => // all of the remaining clauses failed on the new facts + state + case Some(applied) => + val (proven, unprovenClauses) = findProven(applied) + val processedFacts = state add keepPositive(factsToProcess) + val newlyProven = proven -- processedFacts.provenSet + val newState = processedFacts add newlyProven + if (unprovenClauses.isEmpty) + newState // no remaining clauses, done. + else { + val unproven = Clauses(unprovenClauses) + val nextFacts: Set[Literal] = if (newlyProven.nonEmpty) newlyProven.toSet else inferFailure(unproven) + reduce0(unproven, nextFacts, newState) + } + } - // precondition: factsToProcess contains no contradictions - @tailrec - private[this] def reduce0(clauses: Clauses, factsToProcess: Set[Literal], state: Matched): Matched = - applyAll(clauses, factsToProcess) match { - case None => // all of the remaining clauses failed on the new facts - state - case Some(applied) => - val (proven, unprovenClauses) = findProven(applied) - val processedFacts = state add keepPositive(factsToProcess) - val newlyProven = proven -- processedFacts.provenSet - val newState = processedFacts add newlyProven - if(unprovenClauses.isEmpty) - newState // no remaining clauses, done. - else { - val unproven = Clauses(unprovenClauses) - val nextFacts: Set[Literal] = if(newlyProven.nonEmpty) newlyProven.toSet else inferFailure(unproven) - reduce0(unproven, nextFacts, newState) - } - } - - /** Finds negated atoms under the negation as failure rule and returns them. - * This should be called only after there are no more known atoms to be substituted. */ - private[this] def inferFailure(clauses: Clauses): Set[Literal] = - { - /* At this point, there is at least one clause and one of the following is the case as the result of the acyclic negation rule: + /** + * Finds negated atoms under the negation as failure rule and returns them. + * This should be called only after there are no more known atoms to be substituted. + */ + private[this] def inferFailure(clauses: Clauses): Set[Literal] = + { + /* At this point, there is at least one clause and one of the following is the case as the result of the acyclic negation rule: i. there is at least one variable that occurs in a clause body but not in the head of a clause ii. there is at least one variable that occurs in the head of a clause and does not transitively depend on a negated variable In either case, each such variable x cannot be proven true and therefore proves 'not x' (negation as failure, !x in the code). */ - val allAtoms = atoms(clauses) - val newFacts: Set[Literal] = negated(allAtoms.triviallyFalse) - if(newFacts.nonEmpty) - newFacts - else { - val possiblyTrue = hasNegatedDependency(clauses.clauses, Relation.empty, Relation.empty) - val newlyFalse: Set[Literal] = negated(allAtoms.inHead -- possiblyTrue) - if(newlyFalse.nonEmpty) - newlyFalse - else // should never happen due to the acyclic negation rule - error(s"No progress:\n\tclauses: $clauses\n\tpossibly true: $possiblyTrue") - } - } + val allAtoms = atoms(clauses) + val newFacts: Set[Literal] = negated(allAtoms.triviallyFalse) + if (newFacts.nonEmpty) + newFacts + else { + val possiblyTrue = hasNegatedDependency(clauses.clauses, Relation.empty, Relation.empty) + val newlyFalse: Set[Literal] = negated(allAtoms.inHead -- possiblyTrue) + if (newlyFalse.nonEmpty) + newlyFalse + else // should never happen due to the acyclic negation rule + error(s"No progress:\n\tclauses: $clauses\n\tpossibly true: $possiblyTrue") + } + } - private[this] def negated(atoms: Set[Atom]): Set[Literal] = atoms.map(a => Negated(a)) + private[this] def negated(atoms: Set[Atom]): Set[Literal] = atoms.map(a => Negated(a)) - /** Computes the set of atoms in `clauses` that directly or transitively take a negated atom as input. - * For example, for the following clauses, this method would return `List(a, d)` : - * a :- b, not c - * d :- a - */ - @tailrec - def hasNegatedDependency(clauses: Seq[Clause], posDeps: Relation[Atom, Atom], negDeps: Relation[Atom, Atom]): List[Atom] = - clauses match { - case Seq() => - // because cycles between positive literals are allowed, this isn't strictly a topological sort - Dag.topologicalSortUnchecked(negDeps._1s)(posDeps.reverse) - case Clause(formula, head) +: tail => - // collect direct positive and negative literals and track them in separate graphs - val (pos, neg) = directDeps(formula) - val (newPos, newNeg) = ( (posDeps, negDeps) /: head) { case ( (pdeps, ndeps), d) => - (pdeps + (d, pos), ndeps + (d, neg) ) - } - hasNegatedDependency(tail, newPos, newNeg) - } + /** + * Computes the set of atoms in `clauses` that directly or transitively take a negated atom as input. + * For example, for the following clauses, this method would return `List(a, d)` : + * a :- b, not c + * d :- a + */ + @tailrec + def hasNegatedDependency(clauses: Seq[Clause], posDeps: Relation[Atom, Atom], negDeps: Relation[Atom, Atom]): List[Atom] = + clauses match { + case Seq() => + // because cycles between positive literals are allowed, this isn't strictly a topological sort + Dag.topologicalSortUnchecked(negDeps._1s)(posDeps.reverse) + case Clause(formula, head) +: tail => + // collect direct positive and negative literals and track them in separate graphs + val (pos, neg) = directDeps(formula) + val (newPos, newNeg) = ((posDeps, negDeps) /: head) { + case ((pdeps, ndeps), d) => + (pdeps + (d, pos), ndeps + (d, neg)) + } + hasNegatedDependency(tail, newPos, newNeg) + } - /** Computes the `(positive, negative)` literals in `formula`. */ - private[this] def directDeps(formula: Formula): (Seq[Atom], Seq[Atom]) = - Util.separate(literals(formula).toSeq) { - case Negated(a) => Right(a) - case a: Atom => Left(a) - } - private[this] def literals(formula: Formula): Set[Literal] = formula match { - case And(lits) => lits - case l: Literal => Set(l) - case True => Set.empty - } + /** Computes the `(positive, negative)` literals in `formula`. */ + private[this] def directDeps(formula: Formula): (Seq[Atom], Seq[Atom]) = + Util.separate(literals(formula).toSeq) { + case Negated(a) => Right(a) + case a: Atom => Left(a) + } + private[this] def literals(formula: Formula): Set[Literal] = formula match { + case And(lits) => lits + case l: Literal => Set(l) + case True => Set.empty + } - /** Computes the atoms in the heads and bodies of the clauses in `clause`. */ - def atoms(cs: Clauses): Atoms = cs.clauses.map(c => Atoms(c.head, atoms(c.body))).reduce(_ ++ _) + /** Computes the atoms in the heads and bodies of the clauses in `clause`. */ + def atoms(cs: Clauses): Atoms = cs.clauses.map(c => Atoms(c.head, atoms(c.body))).reduce(_ ++ _) - /** Computes the set of all atoms in `formula`. */ - def atoms(formula: Formula): Set[Atom] = formula match { - case And(lits) => lits.map(_.atom) - case Negated(lit) => Set(lit) - case a: Atom => Set(a) - case True => Set() - } + /** Computes the set of all atoms in `formula`. */ + def atoms(formula: Formula): Set[Atom] = formula match { + case And(lits) => lits.map(_.atom) + case Negated(lit) => Set(lit) + case a: Atom => Set(a) + case True => Set() + } - /** Represents the set of atoms in the heads of clauses and in the bodies (formulas) of clauses. */ - final case class Atoms(val inHead: Set[Atom], val inFormula: Set[Atom]) { - /** Concatenates this with `as`. */ - def ++ (as: Atoms): Atoms = Atoms(inHead ++ as.inHead, inFormula ++ as.inFormula) - /** Atoms that cannot be true because they do not occur in a head. */ - def triviallyFalse: Set[Atom] = inFormula -- inHead - } + /** Represents the set of atoms in the heads of clauses and in the bodies (formulas) of clauses. */ + final case class Atoms(val inHead: Set[Atom], val inFormula: Set[Atom]) { + /** Concatenates this with `as`. */ + def ++(as: Atoms): Atoms = Atoms(inHead ++ as.inHead, inFormula ++ as.inFormula) + /** Atoms that cannot be true because they do not occur in a head. */ + def triviallyFalse: Set[Atom] = inFormula -- inHead + } - /** Applies known facts to `clause`s, deriving a new, possibly empty list of clauses. - * 1. If a fact is in the body of a clause, the derived clause has that fact removed from the body. - * 2. If the negation of a fact is in a body of a clause, that clause fails and is removed. - * 3. If a fact or its negation is in the head of a clause, the derived clause has that fact (or its negation) removed from the head. - * 4. If a head is empty, the clause proves nothing and is removed. - * - * NOTE: empty bodies do not cause a clause to succeed yet. - * All known facts must be applied before this can be done in order to avoid inconsistencies. - * Precondition: no contradictions in `facts` - * Postcondition: no atom in `facts` is present in the result - * Postcondition: No clauses have an empty head - * */ - def applyAll(cs: Clauses, facts: Set[Literal]): Option[Clauses] = - { - val newClauses = - if(facts.isEmpty) - cs.clauses.filter(_.head.nonEmpty) // still need to drop clauses with an empty head - else - cs.clauses.map(c => applyAll(c, facts)).flatMap(_.toList) - if(newClauses.isEmpty) None else Some(Clauses(newClauses)) - } + /** + * Applies known facts to `clause`s, deriving a new, possibly empty list of clauses. + * 1. If a fact is in the body of a clause, the derived clause has that fact removed from the body. + * 2. If the negation of a fact is in a body of a clause, that clause fails and is removed. + * 3. If a fact or its negation is in the head of a clause, the derived clause has that fact (or its negation) removed from the head. + * 4. If a head is empty, the clause proves nothing and is removed. + * + * NOTE: empty bodies do not cause a clause to succeed yet. + * All known facts must be applied before this can be done in order to avoid inconsistencies. + * Precondition: no contradictions in `facts` + * Postcondition: no atom in `facts` is present in the result + * Postcondition: No clauses have an empty head + */ + def applyAll(cs: Clauses, facts: Set[Literal]): Option[Clauses] = + { + val newClauses = + if (facts.isEmpty) + cs.clauses.filter(_.head.nonEmpty) // still need to drop clauses with an empty head + else + cs.clauses.map(c => applyAll(c, facts)).flatMap(_.toList) + if (newClauses.isEmpty) None else Some(Clauses(newClauses)) + } - def applyAll(c: Clause, facts: Set[Literal]): Option[Clause] = - { - val atoms = facts.map(_.atom) - val newHead = c.head -- atoms // 3. - if(newHead.isEmpty) // 4. empty head - None - else - substitute(c.body, facts).map( f => Clause(f, newHead) ) // 1, 2 - } + def applyAll(c: Clause, facts: Set[Literal]): Option[Clause] = + { + val atoms = facts.map(_.atom) + val newHead = c.head -- atoms // 3. + if (newHead.isEmpty) // 4. empty head + None + else + substitute(c.body, facts).map(f => Clause(f, newHead)) // 1, 2 + } - /** Derives the formula that results from substituting `facts` into `formula`. */ - @tailrec - def substitute(formula: Formula, facts: Set[Literal]): Option[Formula] = formula match { - case And(lits) => - def negated(lits: Set[Literal]): Set[Literal] = lits.map(a => !a) - if( lits.exists( negated(facts) ) ) // 2. - None - else { - val newLits = lits -- facts - val newF = if(newLits.isEmpty) True else And(newLits) - Some(newF) // 1. - } - case True => Some(True) - case lit: Literal => // define in terms of And - substitute(And(Set(lit)), facts) - } + /** Derives the formula that results from substituting `facts` into `formula`. */ + @tailrec + def substitute(formula: Formula, facts: Set[Literal]): Option[Formula] = formula match { + case And(lits) => + def negated(lits: Set[Literal]): Set[Literal] = lits.map(a => !a) + if (lits.exists(negated(facts))) // 2. + None + else { + val newLits = lits -- facts + val newF = if (newLits.isEmpty) True else And(newLits) + Some(newF) // 1. + } + case True => Some(True) + case lit: Literal => // define in terms of And + substitute(And(Set(lit)), facts) + } } diff --git a/util/process/src/main/scala/sbt/InheritInput.scala b/util/process/src/main/scala/sbt/InheritInput.scala index 1c9ef0ee8..9502cee49 100755 --- a/util/process/src/main/scala/sbt/InheritInput.scala +++ b/util/process/src/main/scala/sbt/InheritInput.scala @@ -3,18 +3,19 @@ */ package sbt -import java.lang.{ProcessBuilder => JProcessBuilder} +import java.lang.{ ProcessBuilder => JProcessBuilder } /** On java 7, inherit System.in for a ProcessBuilder. */ private[sbt] object InheritInput { - def apply(p: JProcessBuilder): Boolean = (redirectInput, inherit) match { - case (Some(m), Some(f)) => m.invoke(p, f); true - case _ => false - } + def apply(p: JProcessBuilder): Boolean = (redirectInput, inherit) match { + case (Some(m), Some(f)) => + m.invoke(p, f); true + case _ => false + } - private[this] val pbClass = Class.forName("java.lang.ProcessBuilder") - private[this] val redirectClass = pbClass.getClasses find (_.getSimpleName == "Redirect") + private[this] val pbClass = Class.forName("java.lang.ProcessBuilder") + private[this] val redirectClass = pbClass.getClasses find (_.getSimpleName == "Redirect") - private[this] val redirectInput = redirectClass map (pbClass.getMethod("redirectInput", _)) - private[this] val inherit = redirectClass map (_ getField "INHERIT" get null) + private[this] val redirectInput = redirectClass map (pbClass.getMethod("redirectInput", _)) + private[this] val inherit = redirectClass map (_ getField "INHERIT" get null) } diff --git a/util/process/src/main/scala/sbt/Process.scala b/util/process/src/main/scala/sbt/Process.scala index a370048e4..66b7e03c6 100644 --- a/util/process/src/main/scala/sbt/Process.scala +++ b/util/process/src/main/scala/sbt/Process.scala @@ -3,196 +3,219 @@ */ package sbt -import java.lang.{Process => JProcess, ProcessBuilder => JProcessBuilder} -import java.io.{Closeable, File, IOException} -import java.io.{BufferedReader, InputStream, InputStreamReader, OutputStream, PipedInputStream, PipedOutputStream} +import java.lang.{ Process => JProcess, ProcessBuilder => JProcessBuilder } +import java.io.{ Closeable, File, IOException } +import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, PipedInputStream, PipedOutputStream } import java.net.URL -trait ProcessExtra -{ - import Process._ - implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder) - implicit def fileToProcess(file: File): FilePartialBuilder = apply(file) - implicit def urlToProcess(url: URL): URLPartialBuilder = apply(url) - @deprecated("Use string interpolation", "0.13.0") - implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = apply(command) - implicit def buildersToProcess[T](builders: Seq[T])(implicit convert: T => SourcePartialBuilder): Seq[SourcePartialBuilder] = applySeq(builders) +trait ProcessExtra { + import Process._ + implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder) + implicit def fileToProcess(file: File): FilePartialBuilder = apply(file) + implicit def urlToProcess(url: URL): URLPartialBuilder = apply(url) + @deprecated("Use string interpolation", "0.13.0") + implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = apply(command) + implicit def buildersToProcess[T](builders: Seq[T])(implicit convert: T => SourcePartialBuilder): Seq[SourcePartialBuilder] = applySeq(builders) - implicit def stringToProcess(command: String): ProcessBuilder = apply(command) - implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command) + implicit def stringToProcess(command: String): ProcessBuilder = apply(command) + implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command) } /** Methods for constructing simple commands that can then be combined. */ -object Process extends ProcessExtra -{ - def apply(command: String): ProcessBuilder = apply(command, None) +object Process extends ProcessExtra { + def apply(command: String): ProcessBuilder = apply(command, None) - def apply(command: Seq[String]): ProcessBuilder = apply (command.toArray, None) + def apply(command: Seq[String]): ProcessBuilder = apply(command.toArray, None) - def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command :: arguments.toList, None) - /** create ProcessBuilder with working dir set to File and extra environment variables */ - def apply(command: String, cwd: File, extraEnv: (String,String)*): ProcessBuilder = - apply(command, Some(cwd), extraEnv : _*) - /** create ProcessBuilder with working dir set to File and extra environment variables */ - def apply(command: Seq[String], cwd: File, extraEnv: (String,String)*): ProcessBuilder = - apply(command, Some(cwd), extraEnv : _*) - /** create ProcessBuilder with working dir optionaly set to File and extra environment variables */ - def apply(command: String, cwd: Option[File], extraEnv: (String,String)*): ProcessBuilder = { - apply(command.split("""\s+"""), cwd, extraEnv : _*) - // not smart to use this on windows, because CommandParser uses \ to escape ". - /*CommandParser.parse(command) match { + def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command :: arguments.toList, None) + /** create ProcessBuilder with working dir set to File and extra environment variables */ + def apply(command: String, cwd: File, extraEnv: (String, String)*): ProcessBuilder = + apply(command, Some(cwd), extraEnv: _*) + /** create ProcessBuilder with working dir set to File and extra environment variables */ + def apply(command: Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder = + apply(command, Some(cwd), extraEnv: _*) + /** create ProcessBuilder with working dir optionaly set to File and extra environment variables */ + def apply(command: String, cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = { + apply(command.split("""\s+"""), cwd, extraEnv: _*) + // not smart to use this on windows, because CommandParser uses \ to escape ". + /*CommandParser.parse(command) match { case Left(errorMsg) => error(errorMsg) case Right((cmd, args)) => apply(cmd :: args, cwd, extraEnv : _*) }*/ - } - /** create ProcessBuilder with working dir optionaly set to File and extra environment variables */ - def apply(command: Seq[String], cwd: Option[File], extraEnv: (String,String)*): ProcessBuilder = { - val jpb = new JProcessBuilder(command.toArray : _*) - cwd.foreach(jpb directory _) - extraEnv.foreach { case (k, v) => jpb.environment.put(k, v) } - apply(jpb) - } - def apply(builder: JProcessBuilder): ProcessBuilder = new SimpleProcessBuilder(builder) - def apply(file: File): FilePartialBuilder = new FileBuilder(file) - def apply(url: URL): URLPartialBuilder = new URLBuilder(url) - @deprecated("Use string interpolation", "0.13.0") - def apply(command: scala.xml.Elem): ProcessBuilder = apply(command.text.trim) - def applySeq[T](builders: Seq[T])(implicit convert: T => SourcePartialBuilder): Seq[SourcePartialBuilder] = builders.map(convert) + } + /** create ProcessBuilder with working dir optionaly set to File and extra environment variables */ + def apply(command: Seq[String], cwd: Option[File], extraEnv: (String, String)*): ProcessBuilder = { + val jpb = new JProcessBuilder(command.toArray: _*) + cwd.foreach(jpb directory _) + extraEnv.foreach { case (k, v) => jpb.environment.put(k, v) } + apply(jpb) + } + def apply(builder: JProcessBuilder): ProcessBuilder = new SimpleProcessBuilder(builder) + def apply(file: File): FilePartialBuilder = new FileBuilder(file) + def apply(url: URL): URLPartialBuilder = new URLBuilder(url) + @deprecated("Use string interpolation", "0.13.0") + def apply(command: scala.xml.Elem): ProcessBuilder = apply(command.text.trim) + def applySeq[T](builders: Seq[T])(implicit convert: T => SourcePartialBuilder): Seq[SourcePartialBuilder] = builders.map(convert) - def apply(value: Boolean): ProcessBuilder = apply(value.toString, if(value) 0 else 1) - def apply(name: String, exitValue: => Int): ProcessBuilder = new DummyProcessBuilder(name, exitValue) + def apply(value: Boolean): ProcessBuilder = apply(value.toString, if (value) 0 else 1) + def apply(name: String, exitValue: => Int): ProcessBuilder = new DummyProcessBuilder(name, exitValue) - def cat(file: SourcePartialBuilder, files: SourcePartialBuilder*): ProcessBuilder = cat(file :: files.toList) - def cat(files: Seq[SourcePartialBuilder]): ProcessBuilder = - { - require(!files.isEmpty) - files.map(_.cat).reduceLeft(_ #&& _) - } + def cat(file: SourcePartialBuilder, files: SourcePartialBuilder*): ProcessBuilder = cat(file :: files.toList) + def cat(files: Seq[SourcePartialBuilder]): ProcessBuilder = + { + require(!files.isEmpty) + files.map(_.cat).reduceLeft(_ #&& _) + } } -trait SourcePartialBuilder extends NotNull -{ - /** Writes the output stream of this process to the given file. */ - def #> (f: File): ProcessBuilder = toFile(f, false) - /** Appends the output stream of this process to the given file. */ - def #>> (f: File): ProcessBuilder = toFile(f, true) - /** Writes the output stream of this process to the given OutputStream. The - * argument is call-by-name, so the stream is recreated, written, and closed each - * time this process is executed. */ - def #>(out: => OutputStream): ProcessBuilder = #> (new OutputStreamBuilder(out)) - def #>(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(toSource, b, false, ExitCodes.firstIfNonzero) - private def toFile(f: File, append: Boolean) = #> (new FileOutput(f, append)) - def cat = toSource - protected def toSource: ProcessBuilder +trait SourcePartialBuilder extends NotNull { + /** Writes the output stream of this process to the given file. */ + def #>(f: File): ProcessBuilder = toFile(f, false) + /** Appends the output stream of this process to the given file. */ + def #>>(f: File): ProcessBuilder = toFile(f, true) + /** + * Writes the output stream of this process to the given OutputStream. The + * argument is call-by-name, so the stream is recreated, written, and closed each + * time this process is executed. + */ + def #>(out: => OutputStream): ProcessBuilder = #>(new OutputStreamBuilder(out)) + def #>(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(toSource, b, false, ExitCodes.firstIfNonzero) + private def toFile(f: File, append: Boolean) = #>(new FileOutput(f, append)) + def cat = toSource + protected def toSource: ProcessBuilder } -trait SinkPartialBuilder extends NotNull -{ - /** Reads the given file into the input stream of this process. */ - def #< (f: File): ProcessBuilder = #< (new FileInput(f)) - /** Reads the given URL into the input stream of this process. */ - def #< (f: URL): ProcessBuilder = #< (new URLInput(f)) - /** Reads the given InputStream into the input stream of this process. The - * argument is call-by-name, so the stream is recreated, read, and closed each - * time this process is executed. */ - def #<(in: => InputStream): ProcessBuilder = #< (new InputStreamBuilder(in)) - def #<(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(b, toSink, false, ExitCodes.firstIfNonzero) - protected def toSink: ProcessBuilder +trait SinkPartialBuilder extends NotNull { + /** Reads the given file into the input stream of this process. */ + def #<(f: File): ProcessBuilder = #<(new FileInput(f)) + /** Reads the given URL into the input stream of this process. */ + def #<(f: URL): ProcessBuilder = #<(new URLInput(f)) + /** + * Reads the given InputStream into the input stream of this process. The + * argument is call-by-name, so the stream is recreated, read, and closed each + * time this process is executed. + */ + def #<(in: => InputStream): ProcessBuilder = #<(new InputStreamBuilder(in)) + def #<(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(b, toSink, false, ExitCodes.firstIfNonzero) + protected def toSink: ProcessBuilder } trait URLPartialBuilder extends SourcePartialBuilder -trait FilePartialBuilder extends SinkPartialBuilder with SourcePartialBuilder -{ - def #<<(f: File): ProcessBuilder - def #<<(u: URL): ProcessBuilder - def #<<(i: => InputStream): ProcessBuilder - def #<<(p: ProcessBuilder): ProcessBuilder +trait FilePartialBuilder extends SinkPartialBuilder with SourcePartialBuilder { + def #<<(f: File): ProcessBuilder + def #<<(u: URL): ProcessBuilder + def #<<(i: => InputStream): ProcessBuilder + def #<<(p: ProcessBuilder): ProcessBuilder } -/** Represents a process that is running or has finished running. -* It may be a compound process with several underlying native processes (such as 'a #&& b`).*/ -trait Process extends NotNull -{ - /** Blocks until this process exits and returns the exit code.*/ - def exitValue(): Int - /** Destroys this process. */ - def destroy(): Unit +/** + * Represents a process that is running or has finished running. + * It may be a compound process with several underlying native processes (such as 'a #&& b`). + */ +trait Process extends NotNull { + /** Blocks until this process exits and returns the exit code.*/ + def exitValue(): Int + /** Destroys this process. */ + def destroy(): Unit } /** Represents a runnable process. */ -trait ProcessBuilder extends SourcePartialBuilder with SinkPartialBuilder -{ - /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is - * sent to the console. If the exit code is non-zero, an exception is thrown.*/ - def !! : String - /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is - * sent to the provided ProcessLogger. If the exit code is non-zero, an exception is thrown.*/ - def !!(log: ProcessLogger) : String - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination and then throw an exception. */ - def lines: Stream[String] - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination but will not throw an exception. */ - def lines(log: ProcessLogger): Stream[String] - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination but will not throw an exception. */ - def lines_! : Stream[String] - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination but will not throw an exception. */ - def lines_!(log: ProcessLogger): Stream[String] - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the console.*/ - def ! : Int - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the given ProcessLogger.*/ - def !(log: ProcessLogger): Int - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the console. The newly started process reads from standard input of the current process.*/ - def !< : Int - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the given ProcessLogger. The newly started process reads from standard input of the current process.*/ - def !<(log: ProcessLogger) : Int - /** Starts the process represented by this builder. Standard output and error are sent to the console.*/ - def run(): Process - /** Starts the process represented by this builder. Standard output and error are sent to the given ProcessLogger.*/ - def run(log: ProcessLogger): Process - /** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/ - def run(io: ProcessIO): Process - /** Starts the process represented by this builder. Standard output and error are sent to the console. - * The newly started process reads from standard input of the current process if `connectInput` is true.*/ - def run(connectInput: Boolean): Process - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the given ProcessLogger. - * The newly started process reads from standard input of the current process if `connectInput` is true.*/ - def run(log: ProcessLogger, connectInput: Boolean): Process +trait ProcessBuilder extends SourcePartialBuilder with SinkPartialBuilder { + /** + * Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is + * sent to the console. If the exit code is non-zero, an exception is thrown. + */ + def !! : String + /** + * Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is + * sent to the provided ProcessLogger. If the exit code is non-zero, an exception is thrown. + */ + def !!(log: ProcessLogger): String + /** + * Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available + * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value, + * the Stream will provide all lines up to termination and then throw an exception. + */ + def lines: Stream[String] + /** + * Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available + * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value, + * the Stream will provide all lines up to termination but will not throw an exception. + */ + def lines(log: ProcessLogger): Stream[String] + /** + * Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available + * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value, + * the Stream will provide all lines up to termination but will not throw an exception. + */ + def lines_! : Stream[String] + /** + * Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available + * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value, + * the Stream will provide all lines up to termination but will not throw an exception. + */ + def lines_!(log: ProcessLogger): Stream[String] + /** + * Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are + * sent to the console. + */ + def ! : Int + /** + * Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are + * sent to the given ProcessLogger. + */ + def !(log: ProcessLogger): Int + /** + * Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are + * sent to the console. The newly started process reads from standard input of the current process. + */ + def !< : Int + /** + * Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are + * sent to the given ProcessLogger. The newly started process reads from standard input of the current process. + */ + def !<(log: ProcessLogger): Int + /** Starts the process represented by this builder. Standard output and error are sent to the console.*/ + def run(): Process + /** Starts the process represented by this builder. Standard output and error are sent to the given ProcessLogger.*/ + def run(log: ProcessLogger): Process + /** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/ + def run(io: ProcessIO): Process + /** + * Starts the process represented by this builder. Standard output and error are sent to the console. + * The newly started process reads from standard input of the current process if `connectInput` is true. + */ + def run(connectInput: Boolean): Process + /** + * Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are + * sent to the given ProcessLogger. + * The newly started process reads from standard input of the current process if `connectInput` is true. + */ + def run(log: ProcessLogger, connectInput: Boolean): Process - def runBuffered(log: ProcessLogger, connectInput: Boolean): Process + def runBuffered(log: ProcessLogger, connectInput: Boolean): Process - /** Constructs a command that runs this command first and then `other` if this command succeeds.*/ - def #&& (other: ProcessBuilder): ProcessBuilder - /** Constructs a command that runs this command first and then `other` if this command does not succeed.*/ - def #|| (other: ProcessBuilder): ProcessBuilder - /** Constructs a command that will run this command and pipes the output to `other`. - * `other` must be a simple command. - * The exit code will be that of `other` regardless of whether this command succeeds. */ - def #| (other: ProcessBuilder): ProcessBuilder - /** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/ - def ### (other: ProcessBuilder): ProcessBuilder + /** Constructs a command that runs this command first and then `other` if this command succeeds.*/ + def #&&(other: ProcessBuilder): ProcessBuilder + /** Constructs a command that runs this command first and then `other` if this command does not succeed.*/ + def #||(other: ProcessBuilder): ProcessBuilder + /** + * Constructs a command that will run this command and pipes the output to `other`. + * `other` must be a simple command. + * The exit code will be that of `other` regardless of whether this command succeeds. + */ + def #|(other: ProcessBuilder): ProcessBuilder + /** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/ + def ###(other: ProcessBuilder): ProcessBuilder - def canPipeTo: Boolean + def canPipeTo: Boolean } /** Each method will be called in a separate thread.*/ -final class ProcessIO(val writeInput: OutputStream => Unit, val processOutput: InputStream => Unit, val processError: InputStream => Unit, val inheritInput: JProcessBuilder => Boolean) extends NotNull -{ - def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError, inheritInput) - def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process, inheritInput) - def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError, inheritInput) +final class ProcessIO(val writeInput: OutputStream => Unit, val processOutput: InputStream => Unit, val processError: InputStream => Unit, val inheritInput: JProcessBuilder => Boolean) extends NotNull { + def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError, inheritInput) + def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process, inheritInput) + def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError, inheritInput) } -trait ProcessLogger -{ - def info(s: => String): Unit - def error(s: => String): Unit - def buffer[T](f: => T): T +trait ProcessLogger { + def info(s: => String): Unit + def error(s: => String): Unit + def buffer[T](f: => T): T } diff --git a/util/process/src/main/scala/sbt/ProcessImpl.scala b/util/process/src/main/scala/sbt/ProcessImpl.scala index 9a3aae606..10c2460ad 100644 --- a/util/process/src/main/scala/sbt/ProcessImpl.scala +++ b/util/process/src/main/scala/sbt/ProcessImpl.scala @@ -3,423 +3,385 @@ */ package sbt -import java.lang.{Process => JProcess, ProcessBuilder => JProcessBuilder} -import java.io.{BufferedReader, Closeable, InputStream, InputStreamReader, IOException, OutputStream, PrintStream} -import java.io.{FilterInputStream, FilterOutputStream, PipedInputStream, PipedOutputStream} -import java.io.{File, FileInputStream, FileOutputStream} +import java.lang.{ Process => JProcess, ProcessBuilder => JProcessBuilder } +import java.io.{ BufferedReader, Closeable, InputStream, InputStreamReader, IOException, OutputStream, PrintStream } +import java.io.{ FilterInputStream, FilterOutputStream, PipedInputStream, PipedOutputStream } +import java.io.{ File, FileInputStream, FileOutputStream } import java.net.URL /** Runs provided code in a new Thread and returns the Thread instance. */ -private object Spawn -{ - def apply(f: => Unit): Thread = apply(f, false) - def apply(f: => Unit, daemon: Boolean): Thread = - { - val thread = new Thread() { override def run() = { f } } - thread.setDaemon(daemon) - thread.start() - thread - } +private object Spawn { + def apply(f: => Unit): Thread = apply(f, false) + def apply(f: => Unit, daemon: Boolean): Thread = + { + val thread = new Thread() { override def run() = { f } } + thread.setDaemon(daemon) + thread.start() + thread + } } -private object Future -{ - def apply[T](f: => T): () => T = - { - val result = new SyncVar[Either[Throwable, T]] - def run(): Unit = - try { result.set(Right(f)) } - catch { case e: Exception => result.set(Left(e)) } - Spawn(run) - () => - result.get match - { - case Right(value) => value - case Left(exception) => throw exception - } - } +private object Future { + def apply[T](f: => T): () => T = + { + val result = new SyncVar[Either[Throwable, T]] + def run(): Unit = + try { result.set(Right(f)) } + catch { case e: Exception => result.set(Left(e)) } + Spawn(run) + () => + result.get match { + case Right(value) => value + case Left(exception) => throw exception + } + } } -object BasicIO -{ - def apply(buffer: StringBuffer, log: Option[ProcessLogger], withIn: Boolean) = new ProcessIO(input(withIn), processFully(buffer), getErr(log), inheritInput(withIn)) - def apply(log: ProcessLogger, withIn: Boolean) = new ProcessIO(input(withIn), processInfoFully(log), processErrFully(log), inheritInput(withIn)) +object BasicIO { + def apply(buffer: StringBuffer, log: Option[ProcessLogger], withIn: Boolean) = new ProcessIO(input(withIn), processFully(buffer), getErr(log), inheritInput(withIn)) + def apply(log: ProcessLogger, withIn: Boolean) = new ProcessIO(input(withIn), processInfoFully(log), processErrFully(log), inheritInput(withIn)) - def getErr(log: Option[ProcessLogger]) = log match { case Some(lg) => processErrFully(lg); case None => toStdErr } + def getErr(log: Option[ProcessLogger]) = log match { case Some(lg) => processErrFully(lg); case None => toStdErr } - private def processErrFully(log: ProcessLogger) = processFully(s => log.error(s)) - private def processInfoFully(log: ProcessLogger) = processFully(s => log.info(s)) + private def processErrFully(log: ProcessLogger) = processFully(s => log.error(s)) + private def processInfoFully(log: ProcessLogger) = processFully(s => log.info(s)) - def closeOut = (_: OutputStream).close() - final val BufferSize = 8192 - final val Newline = System.getProperty("line.separator") + def closeOut = (_: OutputStream).close() + final val BufferSize = 8192 + final val Newline = System.getProperty("line.separator") - def close(c: java.io.Closeable) = try { c.close() } catch { case _: java.io.IOException => () } - def processFully(buffer: Appendable): InputStream => Unit = processFully(appendLine(buffer)) - def processFully(processLine: String => Unit): InputStream => Unit = - in => - { - val reader = new BufferedReader(new InputStreamReader(in)) - processLinesFully(processLine)(reader.readLine) - reader.close() - } - def processLinesFully(processLine: String => Unit)(readLine: () => String) - { - def readFully() - { - val line = readLine() - if(line != null) - { - processLine(line) - readFully() - } - } - readFully() - } - def connectToIn(o: OutputStream) { transferFully(Uncloseable protect System.in, o) } - def input(connect: Boolean): OutputStream => Unit = if(connect) connectToIn else closeOut - def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput), inheritInput(connectInput)) - def standard(in: OutputStream => Unit, inheritIn: JProcessBuilder => Boolean): ProcessIO = new ProcessIO(in, toStdOut, toStdErr, inheritIn) + def close(c: java.io.Closeable) = try { c.close() } catch { case _: java.io.IOException => () } + def processFully(buffer: Appendable): InputStream => Unit = processFully(appendLine(buffer)) + def processFully(processLine: String => Unit): InputStream => Unit = + in => + { + val reader = new BufferedReader(new InputStreamReader(in)) + processLinesFully(processLine)(reader.readLine) + reader.close() + } + def processLinesFully(processLine: String => Unit)(readLine: () => String) { + def readFully() { + val line = readLine() + if (line != null) { + processLine(line) + readFully() + } + } + readFully() + } + def connectToIn(o: OutputStream) { transferFully(Uncloseable protect System.in, o) } + def input(connect: Boolean): OutputStream => Unit = if (connect) connectToIn else closeOut + def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput), inheritInput(connectInput)) + def standard(in: OutputStream => Unit, inheritIn: JProcessBuilder => Boolean): ProcessIO = new ProcessIO(in, toStdOut, toStdErr, inheritIn) - def toStdErr = (in: InputStream) => transferFully(in, System.err) - def toStdOut = (in: InputStream) => transferFully(in, System.out) + def toStdErr = (in: InputStream) => transferFully(in, System.err) + def toStdOut = (in: InputStream) => transferFully(in, System.out) - def transferFully(in: InputStream, out: OutputStream): Unit = - try { transferFullyImpl(in, out) } - catch { case _: InterruptedException => () } + def transferFully(in: InputStream, out: OutputStream): Unit = + try { transferFullyImpl(in, out) } + catch { case _: InterruptedException => () } - private[this] def appendLine(buffer: Appendable): String => Unit = - line => - { - buffer.append(line) - buffer.append(Newline) - } + private[this] def appendLine(buffer: Appendable): String => Unit = + line => + { + buffer.append(line) + buffer.append(Newline) + } - private[this] def transferFullyImpl(in: InputStream, out: OutputStream) - { - val continueCount = 1//if(in.isInstanceOf[PipedInputStream]) 1 else 0 - val buffer = new Array[Byte](BufferSize) - def read() - { - val byteCount = in.read(buffer) - if(byteCount >= continueCount) - { - out.write(buffer, 0, byteCount) - out.flush() - read - } - } - read - in.close() - } + private[this] def transferFullyImpl(in: InputStream, out: OutputStream) { + val continueCount = 1 //if(in.isInstanceOf[PipedInputStream]) 1 else 0 + val buffer = new Array[Byte](BufferSize) + def read() { + val byteCount = in.read(buffer) + if (byteCount >= continueCount) { + out.write(buffer, 0, byteCount) + out.flush() + read + } + } + read + in.close() + } - def inheritInput(connect: Boolean) = { p: JProcessBuilder => if (connect) InheritInput(p) else false } + def inheritInput(connect: Boolean) = { p: JProcessBuilder => if (connect) InheritInput(p) else false } } private[sbt] object ExitCodes { - def ignoreFirst: (Int, Int) => Int = (a,b) => b - def firstIfNonzero: (Int, Int) => Int = (a,b) => if(a != 0) a else b + def ignoreFirst: (Int, Int) => Int = (a, b) => b + def firstIfNonzero: (Int, Int) => Int = (a, b) => if (a != 0) a else b } +private abstract class AbstractProcessBuilder extends ProcessBuilder with SinkPartialBuilder with SourcePartialBuilder { + def #&&(other: ProcessBuilder): ProcessBuilder = new AndProcessBuilder(this, other) + def #||(other: ProcessBuilder): ProcessBuilder = new OrProcessBuilder(this, other) + def #|(other: ProcessBuilder): ProcessBuilder = + { + require(other.canPipeTo, "Piping to multiple processes is not supported.") + new PipedProcessBuilder(this, other, false, exitCode = ExitCodes.ignoreFirst) + } + def ###(other: ProcessBuilder): ProcessBuilder = new SequenceProcessBuilder(this, other) -private abstract class AbstractProcessBuilder extends ProcessBuilder with SinkPartialBuilder with SourcePartialBuilder -{ - def #&&(other: ProcessBuilder): ProcessBuilder = new AndProcessBuilder(this, other) - def #||(other: ProcessBuilder): ProcessBuilder = new OrProcessBuilder(this, other) - def #|(other: ProcessBuilder): ProcessBuilder = - { - require(other.canPipeTo, "Piping to multiple processes is not supported.") - new PipedProcessBuilder(this, other, false, exitCode = ExitCodes.ignoreFirst) - } - def ###(other: ProcessBuilder): ProcessBuilder = new SequenceProcessBuilder(this, other) - - protected def toSource = this - protected def toSink = this - - def run(): Process = run(false) - def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput)) - def run(log: ProcessLogger): Process = run(log, false) - def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(log, connectInput)) + protected def toSource = this + protected def toSink = this - private[this] def getString(log: Option[ProcessLogger], withIn: Boolean): String = - { - val buffer = new StringBuffer - val code = this ! BasicIO(buffer, log, withIn) - if(code == 0) buffer.toString else error("Nonzero exit value: " + code) - } - def !! = getString(None, false) - def !!(log: ProcessLogger) = getString(Some(log), false) - def !!< = getString(None, true) - def !!<(log: ProcessLogger) = getString(Some(log), true) + def run(): Process = run(false) + def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput)) + def run(log: ProcessLogger): Process = run(log, false) + def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(log, connectInput)) - def lines: Stream[String] = lines(false, true, None) - def lines(log: ProcessLogger): Stream[String] = lines(false, true, Some(log)) - def lines_! : Stream[String] = lines(false, false, None) - def lines_!(log: ProcessLogger): Stream[String] = lines(false, false, Some(log)) + private[this] def getString(log: Option[ProcessLogger], withIn: Boolean): String = + { + val buffer = new StringBuffer + val code = this ! BasicIO(buffer, log, withIn) + if (code == 0) buffer.toString else error("Nonzero exit value: " + code) + } + def !! = getString(None, false) + def !!(log: ProcessLogger) = getString(Some(log), false) + def !!< = getString(None, true) + def !!<(log: ProcessLogger) = getString(Some(log), true) - private[this] def lines(withInput: Boolean, nonZeroException: Boolean, log: Option[ProcessLogger]): Stream[String] = - { - val streamed = Streamed[String](nonZeroException) - val process = run(new ProcessIO(BasicIO.input(withInput), BasicIO.processFully(streamed.process), BasicIO.getErr(log), BasicIO.inheritInput(withInput))) - Spawn { streamed.done(process.exitValue()) } - streamed.stream() - } + def lines: Stream[String] = lines(false, true, None) + def lines(log: ProcessLogger): Stream[String] = lines(false, true, Some(log)) + def lines_! : Stream[String] = lines(false, false, None) + def lines_!(log: ProcessLogger): Stream[String] = lines(false, false, Some(log)) - def ! = run(false).exitValue() - def !< = run(true).exitValue() - def !(log: ProcessLogger) = runBuffered(log, false).exitValue() - def !<(log: ProcessLogger) = runBuffered(log, true).exitValue() - def runBuffered(log: ProcessLogger, connectInput: Boolean) = - log.buffer { run(log, connectInput) } - def !(io: ProcessIO) = run(io).exitValue() + private[this] def lines(withInput: Boolean, nonZeroException: Boolean, log: Option[ProcessLogger]): Stream[String] = + { + val streamed = Streamed[String](nonZeroException) + val process = run(new ProcessIO(BasicIO.input(withInput), BasicIO.processFully(streamed.process), BasicIO.getErr(log), BasicIO.inheritInput(withInput))) + Spawn { streamed.done(process.exitValue()) } + streamed.stream() + } - def canPipeTo = false + def ! = run(false).exitValue() + def !< = run(true).exitValue() + def !(log: ProcessLogger) = runBuffered(log, false).exitValue() + def !<(log: ProcessLogger) = runBuffered(log, true).exitValue() + def runBuffered(log: ProcessLogger, connectInput: Boolean) = + log.buffer { run(log, connectInput) } + def !(io: ProcessIO) = run(io).exitValue() + + def canPipeTo = false } -private[sbt] class URLBuilder(url: URL) extends URLPartialBuilder with SourcePartialBuilder -{ - protected def toSource = new URLInput(url) +private[sbt] class URLBuilder(url: URL) extends URLPartialBuilder with SourcePartialBuilder { + protected def toSource = new URLInput(url) } -private[sbt] class FileBuilder(base: File) extends FilePartialBuilder with SinkPartialBuilder with SourcePartialBuilder -{ - protected def toSource = new FileInput(base) - protected def toSink = new FileOutput(base, false) - def #<<(f: File): ProcessBuilder = #<<(new FileInput(f)) - def #<<(u: URL): ProcessBuilder = #<<(new URLInput(u)) - def #<<(s: => InputStream): ProcessBuilder = #<<(new InputStreamBuilder(s)) - def #<<(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(b, new FileOutput(base, true), false, ExitCodes.firstIfNonzero) +private[sbt] class FileBuilder(base: File) extends FilePartialBuilder with SinkPartialBuilder with SourcePartialBuilder { + protected def toSource = new FileInput(base) + protected def toSink = new FileOutput(base, false) + def #<<(f: File): ProcessBuilder = #<<(new FileInput(f)) + def #<<(u: URL): ProcessBuilder = #<<(new URLInput(u)) + def #<<(s: => InputStream): ProcessBuilder = #<<(new InputStreamBuilder(s)) + def #<<(b: ProcessBuilder): ProcessBuilder = new PipedProcessBuilder(b, new FileOutput(base, true), false, ExitCodes.firstIfNonzero) } -private abstract class BasicBuilder extends AbstractProcessBuilder -{ - protected[this] def checkNotThis(a: ProcessBuilder) = require(a != this, "Compound process '" + a + "' cannot contain itself.") - final def run(io: ProcessIO): Process = - { - val p = createProcess(io) - p.start() - p - } - protected[this] def createProcess(io: ProcessIO): BasicProcess +private abstract class BasicBuilder extends AbstractProcessBuilder { + protected[this] def checkNotThis(a: ProcessBuilder) = require(a != this, "Compound process '" + a + "' cannot contain itself.") + final def run(io: ProcessIO): Process = + { + val p = createProcess(io) + p.start() + p + } + protected[this] def createProcess(io: ProcessIO): BasicProcess } -private abstract class BasicProcess extends Process -{ - def start(): Unit +private abstract class BasicProcess extends Process { + def start(): Unit } -private abstract class CompoundProcess extends BasicProcess -{ - def destroy() { destroyer() } - def exitValue() = getExitValue().getOrElse(error("No exit code: process destroyed.")) +private abstract class CompoundProcess extends BasicProcess { + def destroy() { destroyer() } + def exitValue() = getExitValue().getOrElse(error("No exit code: process destroyed.")) - def start() = getExitValue - - protected lazy val (getExitValue, destroyer) = - { - val code = new SyncVar[Option[Int]]() - code.set(None) - val thread = Spawn(code.set(runAndExitValue())) - - ( - Future { thread.join(); code.get }, - () => thread.interrupt() - ) - } - - /** Start and block until the exit value is available and then return it in Some. Return None if destroyed (use 'run')*/ - protected[this] def runAndExitValue(): Option[Int] + def start() = getExitValue - protected[this] def runInterruptible[T](action: => T)(destroyImpl: => Unit): Option[T] = - { - try { Some(action) } - catch { case _: InterruptedException => destroyImpl; None } - } + protected lazy val (getExitValue, destroyer) = + { + val code = new SyncVar[Option[Int]]() + code.set(None) + val thread = Spawn(code.set(runAndExitValue())) + + ( + Future { thread.join(); code.get }, + () => thread.interrupt() + ) + } + + /** Start and block until the exit value is available and then return it in Some. Return None if destroyed (use 'run')*/ + protected[this] def runAndExitValue(): Option[Int] + + protected[this] def runInterruptible[T](action: => T)(destroyImpl: => Unit): Option[T] = + { + try { Some(action) } + catch { case _: InterruptedException => destroyImpl; None } + } } -private abstract class SequentialProcessBuilder(a: ProcessBuilder, b: ProcessBuilder, operatorString: String) extends BasicBuilder -{ - checkNotThis(a) - checkNotThis(b) - override def toString = " ( " + a + " " + operatorString + " " + b + " ) " +private abstract class SequentialProcessBuilder(a: ProcessBuilder, b: ProcessBuilder, operatorString: String) extends BasicBuilder { + checkNotThis(a) + checkNotThis(b) + override def toString = " ( " + a + " " + operatorString + " " + b + " ) " } -private class PipedProcessBuilder(first: ProcessBuilder, second: ProcessBuilder, toError: Boolean, exitCode: (Int,Int) => Int) extends SequentialProcessBuilder(first, second, if(toError) "#|!" else "#|") -{ - override def createProcess(io: ProcessIO) = new PipedProcesses(first, second, io, toError, exitCode) +private class PipedProcessBuilder(first: ProcessBuilder, second: ProcessBuilder, toError: Boolean, exitCode: (Int, Int) => Int) extends SequentialProcessBuilder(first, second, if (toError) "#|!" else "#|") { + override def createProcess(io: ProcessIO) = new PipedProcesses(first, second, io, toError, exitCode) } -private class AndProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#&&") -{ - override def createProcess(io: ProcessIO) = new AndProcess(first, second, io) +private class AndProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#&&") { + override def createProcess(io: ProcessIO) = new AndProcess(first, second, io) } -private class OrProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#||") -{ - override def createProcess(io: ProcessIO) = new OrProcess(first, second, io) +private class OrProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "#||") { + override def createProcess(io: ProcessIO) = new OrProcess(first, second, io) } -private class SequenceProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "###") -{ - override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io) +private class SequenceProcessBuilder(first: ProcessBuilder, second: ProcessBuilder) extends SequentialProcessBuilder(first, second, "###") { + override def createProcess(io: ProcessIO) = new ProcessSequence(first, second, io) } -private class SequentialProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO, evaluateSecondProcess: Int => Boolean) extends CompoundProcess -{ - protected[this] override def runAndExitValue() = - { - val first = a.run(io) - runInterruptible(first.exitValue)(first.destroy()) flatMap - { codeA => - if(evaluateSecondProcess(codeA)) - { - val second = b.run(io) - runInterruptible(second.exitValue)(second.destroy()) - } - else - Some(codeA) - } - } +private class SequentialProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO, evaluateSecondProcess: Int => Boolean) extends CompoundProcess { + protected[this] override def runAndExitValue() = + { + val first = a.run(io) + runInterruptible(first.exitValue)(first.destroy()) flatMap + { codeA => + if (evaluateSecondProcess(codeA)) { + val second = b.run(io) + runInterruptible(second.exitValue)(second.destroy()) + } else + Some(codeA) + } + } } private class AndProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, _ == 0) private class OrProcess(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, _ != 0) private class ProcessSequence(a: ProcessBuilder, b: ProcessBuilder, io: ProcessIO) extends SequentialProcess(a, b, io, ignore => true) +private class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean, exitCode: (Int, Int) => Int) extends CompoundProcess { + protected[this] override def runAndExitValue() = + { + val currentSource = new SyncVar[Option[InputStream]] + val pipeOut = new PipedOutputStream + val source = new PipeSource(currentSource, pipeOut, a.toString) + source.start() -private class PipedProcesses(a: ProcessBuilder, b: ProcessBuilder, defaultIO: ProcessIO, toError: Boolean, exitCode: (Int, Int) => Int) extends CompoundProcess -{ - protected[this] override def runAndExitValue() = - { - val currentSource = new SyncVar[Option[InputStream]] - val pipeOut = new PipedOutputStream - val source = new PipeSource(currentSource, pipeOut, a.toString) - source.start() - - val pipeIn = new PipedInputStream(pipeOut) - val currentSink = new SyncVar[Option[OutputStream]] - val sink = new PipeSink(pipeIn, currentSink, b.toString) - sink.start() + val pipeIn = new PipedInputStream(pipeOut) + val currentSink = new SyncVar[Option[OutputStream]] + val sink = new PipeSink(pipeIn, currentSink, b.toString) + sink.start() - def handleOutOrError(fromOutput: InputStream) = currentSource.put(Some(fromOutput)) + def handleOutOrError(fromOutput: InputStream) = currentSource.put(Some(fromOutput)) - val firstIO = - if(toError) - defaultIO.withError(handleOutOrError) - else - defaultIO.withOutput(handleOutOrError) - val secondIO = defaultIO.withInput(toInput => currentSink.put(Some(toInput)) ) - - val second = b.run(secondIO) - val first = a.run(firstIO) - try - { - runInterruptible { - val firstResult = first.exitValue - currentSource.put(None) - currentSink.put(None) - val secondResult = second.exitValue - exitCode(firstResult, secondResult) - } { - first.destroy() - second.destroy() - } - } - finally - { - BasicIO.close(pipeIn) - BasicIO.close(pipeOut) - } - } + val firstIO = + if (toError) + defaultIO.withError(handleOutOrError) + else + defaultIO.withOutput(handleOutOrError) + val secondIO = defaultIO.withInput(toInput => currentSink.put(Some(toInput))) + + val second = b.run(secondIO) + val first = a.run(firstIO) + try { + runInterruptible { + val firstResult = first.exitValue + currentSource.put(None) + currentSink.put(None) + val secondResult = second.exitValue + exitCode(firstResult, secondResult) + } { + first.destroy() + second.destroy() + } + } finally { + BasicIO.close(pipeIn) + BasicIO.close(pipeOut) + } + } } -private class PipeSource(currentSource: SyncVar[Option[InputStream]], pipe: PipedOutputStream, label: => String) extends Thread -{ - final override def run() - { - currentSource.get match - { - case Some(source) => - try { BasicIO.transferFully(source, pipe) } - catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() } - finally - { - BasicIO.close(source) - currentSource.unset() - } - run() - case None => - currentSource.unset() - BasicIO.close(pipe) - } - } +private class PipeSource(currentSource: SyncVar[Option[InputStream]], pipe: PipedOutputStream, label: => String) extends Thread { + final override def run() { + currentSource.get match { + case Some(source) => + try { BasicIO.transferFully(source, pipe) } + catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() } + finally { + BasicIO.close(source) + currentSource.unset() + } + run() + case None => + currentSource.unset() + BasicIO.close(pipe) + } + } } -private class PipeSink(pipe: PipedInputStream, currentSink: SyncVar[Option[OutputStream]], label: => String) extends Thread -{ - final override def run() - { - currentSink.get match - { - case Some(sink) => - try { BasicIO.transferFully(pipe, sink) } - catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() } - finally - { - BasicIO.close(sink) - currentSink.unset() - } - run() - case None => - currentSink.unset() - } - } +private class PipeSink(pipe: PipedInputStream, currentSink: SyncVar[Option[OutputStream]], label: => String) extends Thread { + final override def run() { + currentSink.get match { + case Some(sink) => + try { BasicIO.transferFully(pipe, sink) } + catch { case e: IOException => println("I/O error " + e.getMessage + " for process: " + label); e.printStackTrace() } + finally { + BasicIO.close(sink) + currentSink.unset() + } + run() + case None => + currentSink.unset() + } + } } -private[sbt] class DummyProcessBuilder(override val toString: String, exitValue : => Int) extends AbstractProcessBuilder -{ - override def run(io: ProcessIO): Process = new DummyProcess(exitValue) - override def canPipeTo = true +private[sbt] class DummyProcessBuilder(override val toString: String, exitValue: => Int) extends AbstractProcessBuilder { + override def run(io: ProcessIO): Process = new DummyProcess(exitValue) + override def canPipeTo = true } -/** A thin wrapper around a java.lang.Process. `ioThreads` are the Threads created to do I/O. -* The implementation of `exitValue` waits until these threads die before returning. */ -private class DummyProcess(action: => Int) extends Process -{ - private[this] val exitCode = Future(action) - override def exitValue() = exitCode() - override def destroy() {} +/** + * A thin wrapper around a java.lang.Process. `ioThreads` are the Threads created to do I/O. + * The implementation of `exitValue` waits until these threads die before returning. + */ +private class DummyProcess(action: => Int) extends Process { + private[this] val exitCode = Future(action) + override def exitValue() = exitCode() + override def destroy() {} } /** Represents a simple command without any redirection or combination. */ -private[sbt] class SimpleProcessBuilder(p: JProcessBuilder) extends AbstractProcessBuilder -{ - override def run(io: ProcessIO): Process = - { - import io._ - val inherited = inheritInput(p) - val process = p.start() +private[sbt] class SimpleProcessBuilder(p: JProcessBuilder) extends AbstractProcessBuilder { + override def run(io: ProcessIO): Process = + { + import io._ + val inherited = inheritInput(p) + val process = p.start() - // spawn threads that process the output and error streams, and also write input if not inherited. - if (!inherited) - Spawn(writeInput(process.getOutputStream)) - val outThread = Spawn(processOutput(process.getInputStream)) - val errorThread = - if(!p.redirectErrorStream) - Spawn(processError(process.getErrorStream)) :: Nil - else - Nil - new SimpleProcess(process, outThread :: errorThread) - } - override def toString = p.command.toString - override def canPipeTo = true + // spawn threads that process the output and error streams, and also write input if not inherited. + if (!inherited) + Spawn(writeInput(process.getOutputStream)) + val outThread = Spawn(processOutput(process.getInputStream)) + val errorThread = + if (!p.redirectErrorStream) + Spawn(processError(process.getErrorStream)) :: Nil + else + Nil + new SimpleProcess(process, outThread :: errorThread) + } + override def toString = p.command.toString + override def canPipeTo = true } -/** A thin wrapper around a java.lang.Process. `outputThreads` are the Threads created to read from the -* output and error streams of the process. -* The implementation of `exitValue` wait for the process to finish and then waits until the threads reading output and error streams die before -* returning. Note that the thread that reads the input stream cannot be interrupted, see https://github.com/sbt/sbt/issues/327 and -* http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4514257 */ -private class SimpleProcess(p: JProcess, outputThreads: List[Thread]) extends Process -{ - override def exitValue() = - { - try { - p.waitFor() - } catch { - case _: InterruptedException => p.destroy() - } - outputThreads.foreach(_.join()) // this ensures that all output is complete before returning (waitFor does not ensure this) - p.exitValue() - } - override def destroy() = p.destroy() +/** + * A thin wrapper around a java.lang.Process. `outputThreads` are the Threads created to read from the + * output and error streams of the process. + * The implementation of `exitValue` wait for the process to finish and then waits until the threads reading output and error streams die before + * returning. Note that the thread that reads the input stream cannot be interrupted, see https://github.com/sbt/sbt/issues/327 and + * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4514257 + */ +private class SimpleProcess(p: JProcess, outputThreads: List[Thread]) extends Process { + override def exitValue() = + { + try { + p.waitFor() + } catch { + case _: InterruptedException => p.destroy() + } + outputThreads.foreach(_.join()) // this ensures that all output is complete before returning (waitFor does not ensure this) + p.exitValue() + } + override def destroy() = p.destroy() } private class FileOutput(file: File, append: Boolean) extends OutputStreamBuilder(new FileOutputStream(file, append), file.getAbsolutePath) @@ -427,55 +389,48 @@ private class URLInput(url: URL) extends InputStreamBuilder(url.openStream, url. private class FileInput(file: File) extends InputStreamBuilder(new FileInputStream(file), file.getAbsolutePath) import Uncloseable.protect -private class OutputStreamBuilder(stream: => OutputStream, label: String) extends ThreadProcessBuilder(label, _.writeInput(protect(stream))) -{ - def this(stream: => OutputStream) = this(stream, "") +private class OutputStreamBuilder(stream: => OutputStream, label: String) extends ThreadProcessBuilder(label, _.writeInput(protect(stream))) { + def this(stream: => OutputStream) = this(stream, "") } -private class InputStreamBuilder(stream: => InputStream, label: String) extends ThreadProcessBuilder(label, _.processOutput(protect(stream))) -{ - def this(stream: => InputStream) = this(stream, "") +private class InputStreamBuilder(stream: => InputStream, label: String) extends ThreadProcessBuilder(label, _.processOutput(protect(stream))) { + def this(stream: => InputStream) = this(stream, "") } -private abstract class ThreadProcessBuilder(override val toString: String, runImpl: ProcessIO => Unit) extends AbstractProcessBuilder -{ - override def run(io: ProcessIO): Process = - { - val success = new SyncVar[Boolean] - success.put(false) - new ThreadProcess(Spawn {runImpl(io); success.set(true) }, success) - } +private abstract class ThreadProcessBuilder(override val toString: String, runImpl: ProcessIO => Unit) extends AbstractProcessBuilder { + override def run(io: ProcessIO): Process = + { + val success = new SyncVar[Boolean] + success.put(false) + new ThreadProcess(Spawn { runImpl(io); success.set(true) }, success) + } } -private final class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process -{ - override def exitValue() = - { - thread.join() - if(success.get) 0 else 1 - } - override def destroy() { thread.interrupt() } +private final class ThreadProcess(thread: Thread, success: SyncVar[Boolean]) extends Process { + override def exitValue() = + { + thread.join() + if (success.get) 0 else 1 + } + override def destroy() { thread.interrupt() } } -object Uncloseable -{ - def apply(in: InputStream): InputStream = new FilterInputStream(in) { override def close() {} } - def apply(out: OutputStream): OutputStream = new FilterOutputStream(out) { override def close() {} } - def protect(in: InputStream): InputStream = if(in eq System.in) Uncloseable(in) else in - def protect(out: OutputStream): OutputStream = if( (out eq System.out) || (out eq System.err)) Uncloseable(out) else out +object Uncloseable { + def apply(in: InputStream): InputStream = new FilterInputStream(in) { override def close() {} } + def apply(out: OutputStream): OutputStream = new FilterOutputStream(out) { override def close() {} } + def protect(in: InputStream): InputStream = if (in eq System.in) Uncloseable(in) else in + def protect(out: OutputStream): OutputStream = if ((out eq System.out) || (out eq System.err)) Uncloseable(out) else out } -private object Streamed -{ - def apply[T](nonzeroException: Boolean): Streamed[T] = - { - val q = new java.util.concurrent.LinkedBlockingQueue[Either[Int, T]] - def next(): Stream[T] = - q.take match - { - case Left(0) => Stream.empty - case Left(code) => if(nonzeroException) error("Nonzero exit code: " + code) else Stream.empty - case Right(s) => Stream.cons(s, next) - } - new Streamed((s: T) => q.put(Right(s)), code => q.put(Left(code)), () => next()) - } +private object Streamed { + def apply[T](nonzeroException: Boolean): Streamed[T] = + { + val q = new java.util.concurrent.LinkedBlockingQueue[Either[Int, T]] + def next(): Stream[T] = + q.take match { + case Left(0) => Stream.empty + case Left(code) => if (nonzeroException) error("Nonzero exit code: " + code) else Stream.empty + case Right(s) => Stream.cons(s, next) + } + new Streamed((s: T) => q.put(Right(s)), code => q.put(Left(code)), () => next()) + } } private final class Streamed[T](val process: T => Unit, val done: Int => Unit, val stream: () => Stream[T]) extends NotNull diff --git a/util/process/src/main/scala/sbt/SyncVar.scala b/util/process/src/main/scala/sbt/SyncVar.scala index a04675851..c268aac3d 100644 --- a/util/process/src/main/scala/sbt/SyncVar.scala +++ b/util/process/src/main/scala/sbt/SyncVar.scala @@ -1,40 +1,39 @@ package sbt // minimal copy of scala.concurrent.SyncVar since that version deprecated put and unset -private[sbt] final class SyncVar[A] -{ - private[this] var isDefined: Boolean = false - private[this] var value: Option[A] = None +private[sbt] final class SyncVar[A] { + private[this] var isDefined: Boolean = false + private[this] var value: Option[A] = None - /** Waits until a value is set and then gets it. Does not clear the value */ - def get: A = synchronized { - while (!isDefined) wait() - value.get - } + /** Waits until a value is set and then gets it. Does not clear the value */ + def get: A = synchronized { + while (!isDefined) wait() + value.get + } - /** Waits until a value is set, gets it, and finally clears the value. */ - def take(): A = synchronized { - try get finally unset() - } + /** Waits until a value is set, gets it, and finally clears the value. */ + def take(): A = synchronized { + try get finally unset() + } - /** Sets the value, whether or not it is currently defined. */ - def set(x: A): Unit = synchronized { - isDefined = true - value = Some(x) - notifyAll() - } + /** Sets the value, whether or not it is currently defined. */ + def set(x: A): Unit = synchronized { + isDefined = true + value = Some(x) + notifyAll() + } - /** Sets the value, first waiting until it is undefined if it is currently defined. */ - def put(x: A): Unit = synchronized { - while (isDefined) wait() - set(x) - } + /** Sets the value, first waiting until it is undefined if it is currently defined. */ + def put(x: A): Unit = synchronized { + while (isDefined) wait() + set(x) + } - /** Clears the value, whether or not it is current defined. */ - def unset(): Unit = synchronized { - isDefined = false - value = None - notifyAll() - } + /** Clears the value, whether or not it is current defined. */ + def unset(): Unit = synchronized { + isDefined = false + value = None + notifyAll() + } } diff --git a/util/relation/src/main/scala/sbt/Relation.scala b/util/relation/src/main/scala/sbt/Relation.scala index 77c0b70c2..987aafb14 100644 --- a/util/relation/src/main/scala/sbt/Relation.scala +++ b/util/relation/src/main/scala/sbt/Relation.scala @@ -3,165 +3,170 @@ */ package sbt - import Relation._ +import Relation._ -object Relation -{ - /** Constructs a new immutable, finite relation that is initially empty. */ - def empty[A,B]: Relation[A,B] = make(Map.empty, Map.empty) +object Relation { + /** Constructs a new immutable, finite relation that is initially empty. */ + def empty[A, B]: Relation[A, B] = make(Map.empty, Map.empty) - /** Constructs a [[Relation]] from underlying `forward` and `reverse` representations, without checking that they are consistent. - * This is a low-level constructor and the alternatives [[empty]] and [[reconstruct]] should be preferred. */ - def make[A,B](forward: Map[A,Set[B]], reverse: Map[B, Set[A]]): Relation[A,B] = new MRelation(forward, reverse) + /** + * Constructs a [[Relation]] from underlying `forward` and `reverse` representations, without checking that they are consistent. + * This is a low-level constructor and the alternatives [[empty]] and [[reconstruct]] should be preferred. + */ + def make[A, B](forward: Map[A, Set[B]], reverse: Map[B, Set[A]]): Relation[A, B] = new MRelation(forward, reverse) - /** Constructs a relation such that for every entry `_1 -> _2s` in `forward` and every `_2` in `_2s`, `(_1, _2)` is in the relation. */ - def reconstruct[A,B](forward: Map[A, Set[B]]): Relation[A,B] = - { - val reversePairs = for( (a,bs) <- forward.view; b <- bs.view) yield (b, a) - val reverse = (Map.empty[B,Set[A]] /: reversePairs) { case (m, (b, a)) => add(m, b, a :: Nil) } - make(forward filter { case (a, bs) => bs.nonEmpty }, reverse) - } + /** Constructs a relation such that for every entry `_1 -> _2s` in `forward` and every `_2` in `_2s`, `(_1, _2)` is in the relation. */ + def reconstruct[A, B](forward: Map[A, Set[B]]): Relation[A, B] = + { + val reversePairs = for ((a, bs) <- forward.view; b <- bs.view) yield (b, a) + val reverse = (Map.empty[B, Set[A]] /: reversePairs) { case (m, (b, a)) => add(m, b, a :: Nil) } + make(forward filter { case (a, bs) => bs.nonEmpty }, reverse) + } - def merge[A,B](rels: Traversable[Relation[A,B]]): Relation[A,B] = (Relation.empty[A, B] /: rels)(_ ++ _) + def merge[A, B](rels: Traversable[Relation[A, B]]): Relation[A, B] = (Relation.empty[A, B] /: rels)(_ ++ _) - private[sbt] def remove[X,Y](map: M[X,Y], from: X, to: Y): M[X,Y] = - map.get(from) match { - case Some(tos) => - val newSet = tos - to - if(newSet.isEmpty) map - from else map.updated(from, newSet) - case None => map - } + private[sbt] def remove[X, Y](map: M[X, Y], from: X, to: Y): M[X, Y] = + map.get(from) match { + case Some(tos) => + val newSet = tos - to + if (newSet.isEmpty) map - from else map.updated(from, newSet) + case None => map + } - private[sbt] def combine[X,Y](a: M[X,Y], b: M[X,Y]): M[X,Y] = - (a /: b) { (map, mapping) => add(map, mapping._1, mapping._2) } + private[sbt] def combine[X, Y](a: M[X, Y], b: M[X, Y]): M[X, Y] = + (a /: b) { (map, mapping) => add(map, mapping._1, mapping._2) } - private[sbt] def add[X,Y](map: M[X,Y], from: X, to: Traversable[Y]): M[X,Y] = - map.updated(from, get(map, from) ++ to) + private[sbt] def add[X, Y](map: M[X, Y], from: X, to: Traversable[Y]): M[X, Y] = + map.updated(from, get(map, from) ++ to) - private[sbt] def get[X,Y](map: M[X,Y], t: X): Set[Y] = map.getOrElse(t, Set.empty[Y]) + private[sbt] def get[X, Y](map: M[X, Y], t: X): Set[Y] = map.getOrElse(t, Set.empty[Y]) - private[sbt] type M[X,Y] = Map[X, Set[Y]] + private[sbt]type M[X, Y] = Map[X, Set[Y]] } /** Binary relation between A and B. It is a set of pairs (_1, _2) for _1 in A, _2 in B. */ -trait Relation[A,B] -{ - /** Returns the set of all `_2`s such that `(_1, _2)` is in this relation. */ - def forward(_1: A): Set[B] - /** Returns the set of all `_1`s such that `(_1, _2)` is in this relation. */ - def reverse(_2: B): Set[A] - /** Includes `pair` in the relation. */ - def +(pair: (A, B)): Relation[A,B] - /** Includes `(a, b)` in the relation. */ - def +(a: A, b: B): Relation[A,B] - /** Includes in the relation `(a, b)` for all `b` in `bs`. */ - def +(a: A, bs: Traversable[B]): Relation[A,B] - /** Returns the union of the relation `r` with this relation. */ - def ++(r: Relation[A,B]): Relation[A,B] - /** Includes the given pairs in this relation. */ - def ++(rs: Traversable[(A,B)]): Relation[A,B] - /** Removes all elements `(_1, _2)` for all `_1` in `_1s` from this relation. */ - def --(_1s: Traversable[A]): Relation[A,B] - /** Removes all `pairs` from this relation. */ - def --(pairs: TraversableOnce[(A,B)]): Relation[A,B] - /** Removes all `relations` from this relation. */ - def --(relations: Relation[A,B]): Relation[A,B] - /** Removes all pairs `(_1, _2)` from this relation. */ - def -(_1: A): Relation[A,B] - /** Removes `pair` from this relation. */ - def -(pair: (A,B)): Relation[A,B] - /** Returns the set of all `_1`s such that `(_1, _2)` is in this relation. */ - def _1s: collection.Set[A] - /** Returns the set of all `_2`s such that `(_1, _2)` is in this relation. */ - def _2s: collection.Set[B] - /** Returns the number of pairs in this relation */ - def size: Int +trait Relation[A, B] { + /** Returns the set of all `_2`s such that `(_1, _2)` is in this relation. */ + def forward(_1: A): Set[B] + /** Returns the set of all `_1`s such that `(_1, _2)` is in this relation. */ + def reverse(_2: B): Set[A] + /** Includes `pair` in the relation. */ + def +(pair: (A, B)): Relation[A, B] + /** Includes `(a, b)` in the relation. */ + def +(a: A, b: B): Relation[A, B] + /** Includes in the relation `(a, b)` for all `b` in `bs`. */ + def +(a: A, bs: Traversable[B]): Relation[A, B] + /** Returns the union of the relation `r` with this relation. */ + def ++(r: Relation[A, B]): Relation[A, B] + /** Includes the given pairs in this relation. */ + def ++(rs: Traversable[(A, B)]): Relation[A, B] + /** Removes all elements `(_1, _2)` for all `_1` in `_1s` from this relation. */ + def --(_1s: Traversable[A]): Relation[A, B] + /** Removes all `pairs` from this relation. */ + def --(pairs: TraversableOnce[(A, B)]): Relation[A, B] + /** Removes all `relations` from this relation. */ + def --(relations: Relation[A, B]): Relation[A, B] + /** Removes all pairs `(_1, _2)` from this relation. */ + def -(_1: A): Relation[A, B] + /** Removes `pair` from this relation. */ + def -(pair: (A, B)): Relation[A, B] + /** Returns the set of all `_1`s such that `(_1, _2)` is in this relation. */ + def _1s: collection.Set[A] + /** Returns the set of all `_2`s such that `(_1, _2)` is in this relation. */ + def _2s: collection.Set[B] + /** Returns the number of pairs in this relation */ + def size: Int - /** Returns true iff `(a,b)` is in this relation*/ - def contains(a: A, b: B): Boolean + /** Returns true iff `(a,b)` is in this relation*/ + def contains(a: A, b: B): Boolean - /** Returns a relation with only pairs `(a,b)` for which `f(a,b)` is true.*/ - def filter(f: (A,B) => Boolean): Relation[A,B] + /** Returns a relation with only pairs `(a,b)` for which `f(a,b)` is true.*/ + def filter(f: (A, B) => Boolean): Relation[A, B] - /** Returns a pair of relations: the first contains only pairs `(a,b)` for which `f(a,b)` is true and - * the other only pairs `(a,b)` for which `f(a,b)` is false. */ - def partition(f: (A,B) => Boolean): (Relation[A,B], Relation[A,B]) + /** + * Returns a pair of relations: the first contains only pairs `(a,b)` for which `f(a,b)` is true and + * the other only pairs `(a,b)` for which `f(a,b)` is false. + */ + def partition(f: (A, B) => Boolean): (Relation[A, B], Relation[A, B]) - /** Partitions this relation into a map of relations according to some discriminator function. */ - def groupBy[K](discriminator: ((A,B)) => K): Map[K, Relation[A,B]] + /** Partitions this relation into a map of relations according to some discriminator function. */ + def groupBy[K](discriminator: ((A, B)) => K): Map[K, Relation[A, B]] - /** Returns all pairs in this relation.*/ - def all: Traversable[(A,B)] + /** Returns all pairs in this relation.*/ + def all: Traversable[(A, B)] - /** Represents this relation as a `Map` from a `_1` to the set of `_2`s such that `(_1, _2)` is in this relation. - * - * Specifically, there is one entry for each `_1` such that `(_1, _2)` is in this relation for some `_2`. - * The value associated with a given `_1` is the set of all `_2`s such that `(_1, _2)` is in this relation.*/ - def forwardMap: Map[A, Set[B]] + /** + * Represents this relation as a `Map` from a `_1` to the set of `_2`s such that `(_1, _2)` is in this relation. + * + * Specifically, there is one entry for each `_1` such that `(_1, _2)` is in this relation for some `_2`. + * The value associated with a given `_1` is the set of all `_2`s such that `(_1, _2)` is in this relation. + */ + def forwardMap: Map[A, Set[B]] - /** Represents this relation as a `Map` from a `_2` to the set of `_1`s such that `(_1, _2)` is in this relation. - * - * Specifically, there is one entry for each `_2` such that `(_1, _2)` is in this relation for some `_1`. - * The value associated with a given `_2` is the set of all `_1`s such that `(_1, _2)` is in this relation.*/ - def reverseMap: Map[B, Set[A]] + /** + * Represents this relation as a `Map` from a `_2` to the set of `_1`s such that `(_1, _2)` is in this relation. + * + * Specifically, there is one entry for each `_2` such that `(_1, _2)` is in this relation for some `_1`. + * The value associated with a given `_2` is the set of all `_1`s such that `(_1, _2)` is in this relation. + */ + def reverseMap: Map[B, Set[A]] } // Note that we assume without checking that fwd and rev are consistent. -private final class MRelation[A,B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]]) extends Relation[A,B] -{ - def forwardMap = fwd - def reverseMap = rev +private final class MRelation[A, B](fwd: Map[A, Set[B]], rev: Map[B, Set[A]]) extends Relation[A, B] { + def forwardMap = fwd + def reverseMap = rev - def forward(t: A) = get(fwd, t) - def reverse(t: B) = get(rev, t) + def forward(t: A) = get(fwd, t) + def reverse(t: B) = get(rev, t) - def _1s = fwd.keySet - def _2s = rev.keySet + def _1s = fwd.keySet + def _2s = rev.keySet - def size = (fwd.valuesIterator map { _.size }).foldLeft(0)(_ + _) + def size = (fwd.valuesIterator map { _.size }).foldLeft(0)(_ + _) - def all: Traversable[(A,B)] = fwd.iterator.flatMap { case (a, bs) => bs.iterator.map( b => (a,b) ) }.toTraversable + def all: Traversable[(A, B)] = fwd.iterator.flatMap { case (a, bs) => bs.iterator.map(b => (a, b)) }.toTraversable - def +(pair: (A,B)) = this + (pair._1, Set(pair._2)) - def +(from: A, to: B) = this + (from, to :: Nil) - def +(from: A, to: Traversable[B]) = if(to.isEmpty) this else - new MRelation( add(fwd, from, to), (rev /: to) { (map, t) => add(map, t, from :: Nil) }) + def +(pair: (A, B)) = this + (pair._1, Set(pair._2)) + def +(from: A, to: B) = this + (from, to :: Nil) + def +(from: A, to: Traversable[B]) = if (to.isEmpty) this else + new MRelation(add(fwd, from, to), (rev /: to) { (map, t) => add(map, t, from :: Nil) }) - def ++(rs: Traversable[(A,B)]) = ((this: Relation[A,B]) /: rs) { _ + _ } - def ++(other: Relation[A,B]) = new MRelation[A,B]( combine(fwd, other.forwardMap), combine(rev, other.reverseMap) ) + def ++(rs: Traversable[(A, B)]) = ((this: Relation[A, B]) /: rs) { _ + _ } + def ++(other: Relation[A, B]) = new MRelation[A, B](combine(fwd, other.forwardMap), combine(rev, other.reverseMap)) - def --(ts: Traversable[A]): Relation[A,B] = ((this: Relation[A,B]) /: ts) { _ - _ } - def --(pairs: TraversableOnce[(A,B)]): Relation[A,B] = ((this: Relation[A,B]) /: pairs) { _ - _ } - def --(relations: Relation[A,B]): Relation[A,B] = --(relations.all) - def -(pair: (A,B)): Relation[A,B] = - new MRelation( remove(fwd, pair._1, pair._2), remove(rev, pair._2, pair._1) ) - def -(t: A): Relation[A,B] = - fwd.get(t) match { - case Some(rs) => - val upRev = (rev /: rs) { (map, r) => remove(map, r, t) } - new MRelation(fwd - t, upRev) - case None => this - } + def --(ts: Traversable[A]): Relation[A, B] = ((this: Relation[A, B]) /: ts) { _ - _ } + def --(pairs: TraversableOnce[(A, B)]): Relation[A, B] = ((this: Relation[A, B]) /: pairs) { _ - _ } + def --(relations: Relation[A, B]): Relation[A, B] = --(relations.all) + def -(pair: (A, B)): Relation[A, B] = + new MRelation(remove(fwd, pair._1, pair._2), remove(rev, pair._2, pair._1)) + def -(t: A): Relation[A, B] = + fwd.get(t) match { + case Some(rs) => + val upRev = (rev /: rs) { (map, r) => remove(map, r, t) } + new MRelation(fwd - t, upRev) + case None => this + } - def filter(f: (A,B) => Boolean): Relation[A,B] = Relation.empty[A,B] ++ all.filter(f.tupled) + def filter(f: (A, B) => Boolean): Relation[A, B] = Relation.empty[A, B] ++ all.filter(f.tupled) - def partition(f: (A,B) => Boolean): (Relation[A,B], Relation[A,B]) = { - val (y, n) = all.partition(f.tupled) - (Relation.empty[A,B] ++ y, Relation.empty[A,B] ++ n) - } + def partition(f: (A, B) => Boolean): (Relation[A, B], Relation[A, B]) = { + val (y, n) = all.partition(f.tupled) + (Relation.empty[A, B] ++ y, Relation.empty[A, B] ++ n) + } - def groupBy[K](discriminator: ((A,B)) => K): Map[K, Relation[A,B]] = all.groupBy(discriminator) mapValues { Relation.empty[A,B] ++ _ } + def groupBy[K](discriminator: ((A, B)) => K): Map[K, Relation[A, B]] = all.groupBy(discriminator) mapValues { Relation.empty[A, B] ++ _ } - def contains(a: A, b: B): Boolean = forward(a)(b) + def contains(a: A, b: B): Boolean = forward(a)(b) - override def equals(other: Any) = other match { - // We assume that the forward and reverse maps are consistent, so we only use the forward map - // for equality. Note that key -> Empty is semantically the same as key not existing. - case o: MRelation[A,B] => forwardMap.filterNot(_._2.isEmpty) == o.forwardMap.filterNot(_._2.isEmpty) - case _ => false - } + override def equals(other: Any) = other match { + // We assume that the forward and reverse maps are consistent, so we only use the forward map + // for equality. Note that key -> Empty is semantically the same as key not existing. + case o: MRelation[A, B] => forwardMap.filterNot(_._2.isEmpty) == o.forwardMap.filterNot(_._2.isEmpty) + case _ => false + } - override def hashCode = fwd.filterNot(_._2.isEmpty).hashCode() + override def hashCode = fwd.filterNot(_._2.isEmpty).hashCode() - override def toString = all.map { case (a,b) => a + " -> " + b }.mkString("Relation [", ", ", "]") + override def toString = all.map { case (a, b) => a + " -> " + b }.mkString("Relation [", ", ", "]") }